diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index 5c8c03bf1..c72649628 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -16,10 +16,7 @@ jobs: DATABASE_URL: postgis://postgres:postgres@localhost/smbackend ADDITIONAL_INSTALLED_APPS: smbackend_turku,ptv PTV_ID_OFFSET: 10000000 - BIKE_SERVICE_STATIONS_IDS: service_node=500000,service=500000,units_offset=500000 - GAS_FILLING_STATIONS_IDS: service_node=200000,service=200000,units_offset=200000 - CHARGING_STATIONS_IDS: service_node=300000,service=300000,units_offset=300000 - BICYCLE_STANDS_IDS: service_node=400000,service=400000,units_offset=400000 + LAM_COUNTER_API_BASE_URL: https://tie.digitraffic.fi/api/tms/v1/history steps: - uses: actions/checkout@v2 diff --git a/config_dev.env.example b/config_dev.env.example index c7b6241cb..296a49ad6 100644 --- a/config_dev.env.example +++ b/config_dev.env.example @@ -180,4 +180,8 @@ YIT_ROUTES_URL=https://api.autori.io/api/dailymaintenance-a3/route/ YIT_VEHICLES_URL=https://api.autori.io/api/dailymaintenance-a3/route/types/vehicle/ YIT_CONTRACTS_URL=https://api.autori.io/api/dailymaintenance-a3/contracts/ YIT_TOKEN_URL=https://login.microsoftonline.com/86792d09-0d81-4899-8d66-95dfc96c8014/oauth2/v2.0/token?Scope=api://7f45c30e-cc67-4a93-85f1-0149b44c1cdf/.default -KUNTEC_KEY= \ No newline at end of file +# API key to the Kuntec API +KUNTEC_KEY= +# Telraam API token, required when fetching Telraam data to csv (import_telraam_to_csv.py) +# https://telraam.helpspace-docs.io/article/27/you-wish-more-data-and-statistics-telraam-api +TELRAAM_TOKEN= \ No newline at end of file diff --git a/eco_counter/README.md b/eco_counter/README.md index a89da2dea..32bdcad68 100644 --- a/eco_counter/README.md +++ b/eco_counter/README.md @@ -2,8 +2,8 @@ Imports/Processes data from: https://data.turku.fi/2yxpk2imqi2mzxpa6e6knq -Imports both "Liikennelasketa-Ilmaisintiedot 15 min aikaväleillä"(Traffic Counter) and "Eco-Counter" (Eco Counter) datas. Imports/processes "LAM-Counter" (LAM Counter) data from https://www.digitraffic.fi/tieliikenne/lam/ - +Imports both "Liikennelasketa-Ilmaisintiedot 15 min aikaväleillä"(Traffic Counter) and "Eco-Counter" (Eco Counter) datas. Imports/processes "LAM-Counter" (LAM Counter) data from https://www.digitraffic.fi/tieliikenne/lam/ and +Telraam data from https://telraam-api.net/. ## Installation: Add following lines to the .env: @@ -16,6 +16,8 @@ Note, The urls can change. Up-to-date urls can be found at: https://www.avoindata.fi/data/fi/dataset/turun-seudun-liikennemaaria and https://www.digitraffic.fi/tieliikenne/lam/ +Telraam API token, required when fetching Telraam data to csv (import_telraam_to_csv.py) https://telraam.helpspace-docs.io/article/27/you-wish-more-data-and-statistics-telraam-api +TELRAAM_TOKEN= ## Importing @@ -23,14 +25,19 @@ https://www.digitraffic.fi/tieliikenne/lam/ The initial import, this must be done before starting with the continous incremental imports: ./manage.py import_counter_data --init COUNTERS e.g. ./manage.py import_counter_data --init EC TC -The counters are EC(Eco Counter), TC(Traffic Counter) and LC(Lam Counter). +The counters are EC(Eco Counter), TC(Traffic Counter), LC(Lam Counter) and TR(Telraam Counter). ### Continous Import For continous (hourly) imports run: ./manage.py import_counter_data --counters COUNTERS e.g. ./manage.py import_counter_data --counters EC TC -Counter names are: EC (Eco Counter), TC (Traffic Counter) and LC (Lam Counter). -Note, Traffic Counter data is updated once a week. +Counter names are: EC (Eco Counter), TC (Traffic Counter), LC (Lam Counter) and TR (Telraam Counter). +Note, Traffic Counter data is updated once a week and Lam Counter data once a day. + +### Importing Telraam raw data +In order to import Telraam data into the database the raw data has to be imported. The raw data is imported with the _import_telraam_to_csv_ management command. +The imported should be set to be run once a hour (see: https://github.com/City-of-Turku/smbackend/wiki/Celery-Tasks#telraam-to-csv-eco_countertasksimport_telraam_to_csv ) +Telraam raw data is imported to PROJECT_ROOT/media/telraam_data/. ## Troubleshooting For reasons unknown, the amount of sensors can sometimes change in the source csv file, e.g. the amount of columns changes. If this happens, run the initial import: ./manage.py import_counter_data --init and after that it is safe to run the importer as normal. diff --git a/eco_counter/api/serializers.py b/eco_counter/api/serializers.py index e0cbc80c8..7b9f81713 100644 --- a/eco_counter/api/serializers.py +++ b/eco_counter/api/serializers.py @@ -35,7 +35,6 @@ class StationSerializer(serializers.ModelSerializer): class Meta: model = Station - fields = [ "id", "name", @@ -43,7 +42,8 @@ class Meta: "name_sv", "name_en", "csv_data_source", - "geom", + "location", + "geometry", "x", "y", "lon", @@ -52,18 +52,18 @@ class Meta: ] def get_y(self, obj): - return obj.geom.y + return obj.location.y def get_lat(self, obj): - obj.geom.transform(4326) - return obj.geom.y + obj.location.transform(4326) + return obj.location.y def get_x(self, obj): - return obj.geom.x + return obj.location.x def get_lon(self, obj): - obj.geom.transform(4326) - return obj.geom.x + obj.location.transform(4326) + return obj.location.x def get_sensor_types(self, obj): # Return the sensor types(car, bike etc) that has a total year value >0. @@ -100,7 +100,6 @@ class Meta: class DaySerializer(serializers.ModelSerializer): - station_name = serializers.PrimaryKeyRelatedField( many=False, source="station.name", read_only=True ) @@ -207,7 +206,6 @@ class Meta: class HourDataSerializer(serializers.ModelSerializer): - day_info = DayInfoSerializer(source="day") class Meta: @@ -229,7 +227,6 @@ class Meta: class DayDataSerializer(serializers.ModelSerializer): - day_info = DayInfoSerializer(source="day") class Meta: @@ -254,7 +251,6 @@ class Meta: class MonthDataSerializer(serializers.ModelSerializer): - month_info = MonthInfoSerializer(source="month") class Meta: @@ -267,7 +263,6 @@ class Meta: class YearDataSerializer(serializers.ModelSerializer): - year_info = YearInfoSerializer(source="year") class Meta: diff --git a/eco_counter/api/views.py b/eco_counter/api/views.py index 279e9f5af..04d3ea3b1 100644 --- a/eco_counter/api/views.py +++ b/eco_counter/api/views.py @@ -69,7 +69,6 @@ def list(self, request): class HourDataViewSet(viewsets.ReadOnlyModelViewSet): - queryset = HourData.objects.all() serializer_class = HourDataSerializer @@ -80,7 +79,6 @@ def get_hour_data(self, request): class DayDataViewSet(viewsets.ReadOnlyModelViewSet): - queryset = DayData.objects.all() serializer_class = DayDataSerializer diff --git a/eco_counter/constants.py b/eco_counter/constants.py new file mode 100644 index 000000000..7744b4229 --- /dev/null +++ b/eco_counter/constants.py @@ -0,0 +1,136 @@ +import platform +import types + +import requests +from django.conf import settings +from requests.adapters import HTTPAdapter +from requests.packages.urllib3.util.retry import Retry + +INDEX_COLUMN_NAME = "startTime" + +TRAFFIC_COUNTER_START_YEAR = 2015 +# Manually define the end year, as the source data comes from the page +# defined in env variable TRAFFIC_COUNTER_OBSERVATIONS_BASE_URL. +# Change end year when data for the next year is available. +TRAFFIC_COUNTER_END_YEAR = 2023 +ECO_COUNTER_START_YEAR = 2020 +LAM_COUNTER_START_YEAR = 2010 +TELRAAM_COUNTER_START_YEAR = 2023 + + +TRAFFIC_COUNTER = "TC" +ECO_COUNTER = "EC" +LAM_COUNTER = "LC" +TELRAAM_COUNTER = "TR" +TELRAAM_CSV = "TV" + +COUNTERS = types.SimpleNamespace() +COUNTERS.TRAFFIC_COUNTER = TRAFFIC_COUNTER +COUNTERS.ECO_COUNTER = ECO_COUNTER +COUNTERS.LAM_COUNTER = LAM_COUNTER +COUNTERS.TELRAAM_COUNTER = TELRAAM_COUNTER + +CSV_DATA_SOURCES = ( + (TRAFFIC_COUNTER, "TrafficCounter"), + (ECO_COUNTER, "EcoCounter"), + (LAM_COUNTER, "LamCounter"), + (TELRAAM_COUNTER, "TelraamCounter"), + (TELRAAM_CSV, "TelraamCSV"), +) +COUNTER_START_YEARS = { + ECO_COUNTER: ECO_COUNTER_START_YEAR, + TRAFFIC_COUNTER: TRAFFIC_COUNTER_START_YEAR, + LAM_COUNTER: LAM_COUNTER_START_YEAR, + TELRAAM_COUNTER: TELRAAM_COUNTER_START_YEAR, +} + +TRAFFIC_COUNTER_METADATA_GEOJSON = "traffic_counter_metadata.geojson" +LAM_STATIONS_API_FETCH_URL = ( + settings.LAM_COUNTER_API_BASE_URL + + "?api=liikennemaara&tyyppi=h&pvm={start_date}&loppu={end_date}" + + "&lam_type=option1&piste={id}&luokka=kaikki&suunta={direction}&sisallytakaistat=0" +) +# LAM stations in the locations list are included. +LAM_STATION_LOCATIONS = ["Turku", "Raisio", "Kaarina", "Lieto", "Hauninen", "Oriketo"] +# Header that is added to the request that fetches the LAM data. +LAM_STATION_USER_HEADER = { + "Digitraffic-User": f"{platform.uname()[1]}/Turun Palvelukartta" +} +# Mappings are derived by the 'suunta' and the 'suuntaselite' columns in the source data. +# (P)oispäin or (K)eskustaan päin) +LAM_STATIONS_DIRECTION_MAPPINGS = { + # vt8_Raisio + "1_Vaasa": "P", + "2_Turku": "K", + # vt1_Kaarina_Kirismäki + "1_Turku": "K", + "2_Helsinki": "P", + # vt10_Lieto + "1_Hämeenlinna": "P", + # "2_Turku": "K", Duplicate + # vt1_Turku_Kupittaa + # "1_Turku" Duplicate + # "2_Helsinki" Duplicate + # vt1_Turku_Kurkela_länsi + # "1_Turku" Duplicate + # "2_Helsinki" Duplicate + # vt1_Kaarina_Kurkela_itä + # "1_Turku" Duplicate + # "2_Helsinki" Duplicate + # vt1_Kaarina + # "1_Turku" Duplicate + # "2_Helsinki" Duplicate + # vt1_Kaarina_Piikkiö + # "1_Turku" Duplicate + # "2_Helsinki" Duplicate + # yt1851_Turku_Härkämäki + "1_Suikkila": "K", + "2_Artukainen": "P", + # kt40_Hauninen + "1_Piikkiö": "K", + "2_Naantali": "P", + # kt40_Oriketo + # "1_Piikkiö": "K", duplicate + # "2_Naantali": "P", dupicate +} +keys = [k for k in range(TRAFFIC_COUNTER_START_YEAR, TRAFFIC_COUNTER_END_YEAR + 1)] +# Create a dict where the years to be importer are keys and the value is the url of the csv data. +# e.g. {2015, "https://data.turku.fi/2yxpk2imqi2mzxpa6e6knq/2015_laskenta_juha.csv"} +TRAFFIC_COUNTER_CSV_URLS = dict( + [ + (k, f"{settings.TRAFFIC_COUNTER_OBSERVATIONS_BASE_URL}{k}_laskenta_juha.csv") + for k in keys + ] +) +TELRAAM_COUNTER_API_BASE_URL = "https://telraam-api.net" +# Maximum 3 months at a time +TELRAAM_COUNTER_TRAFFIC_URL = f"{TELRAAM_COUNTER_API_BASE_URL}/v1/reports/traffic" +TELRAAM_COUNTER_CAMERAS_URL = TELRAAM_COUNTER_API_BASE_URL + "/v1/cameras/{mac_id}" + +TELRAAM_COUNTER_CAMERA_SEGMENTS_URL = ( + TELRAAM_COUNTER_API_BASE_URL + "/v1/segments/id/{id}" +) +# The start month of the start year as telraam data is not available +# from the beginning of the start tear +TELRAAM_COUNTER_START_MONTH = 5 +TELRAAM_COUNTER_API_TIME_FORMAT = "%Y-%m-%d %H:%M:%S" +TELRAAM_COUNTER_CSV_FILE_PATH = f"{settings.MEDIA_ROOT}/telraam_data/" +TELRAAM_COUNTER_CSV_FILE = ( + TELRAAM_COUNTER_CSV_FILE_PATH + "telraam_data_{id}_{day}_{month}_{year}.csv" +) +TELRAAM_COUNTER_CAMERAS = { + # Mac id: Direction flag (True=rgt prefix will be keskustaan päin) + 350457790598039: False, # Kristiinanankatu, Joelle katsottaessa vasemmalle + 350457790600975: True, # Kristiinanankatu, Joelle katsottaessa oikealle +} +# For 429 (too many request) TELRAAM need a retry strategy +retry_strategy = Retry( + total=10, + status_forcelist=[429], + method_whitelist=["GET", "POST"], + backoff_factor=30, # 30, 60, 120 , 240, ..seconds +) +adapter = HTTPAdapter(max_retries=retry_strategy) +TELRAAM_HTTP = requests.Session() +TELRAAM_HTTP.mount("https://", adapter) +TELRAAM_HTTP.mount("http://", adapter) diff --git a/eco_counter/management/commands/delete_all_counter_data.py b/eco_counter/management/commands/delete_all_counter_data.py new file mode 100644 index 000000000..9633df83c --- /dev/null +++ b/eco_counter/management/commands/delete_all_counter_data.py @@ -0,0 +1,17 @@ +import logging + +from django import db +from django.core.management.base import BaseCommand + +from eco_counter.models import ImportState, Station + +logger = logging.getLogger("eco_counter") + + +class Command(BaseCommand): + @db.transaction.atomic + def handle(self, *args, **options): + logger.info("Deleting all counter data...") + logger.info(f"{Station.objects.all().delete()}") + logger.info(f"{ImportState.objects.all().delete()}") + logger.info("Deleted all counter data.") diff --git a/eco_counter/management/commands/import_counter_data.py b/eco_counter/management/commands/import_counter_data.py index 9a25a5ec5..0a607e682 100644 --- a/eco_counter/management/commands/import_counter_data.py +++ b/eco_counter/management/commands/import_counter_data.py @@ -1,48 +1,13 @@ """ +To run test: +pytest -m test_import_counter_data Usage: see README.md - -Brief explanation of the import alogithm: -1. Import the stations. -2. Read the csv file as a pandas DataFrame. -3. Reads the year and month from the ImportState. -4. Set the import to start from that year and month, the import always begins - from the first day and time 00:00:00 of the month in state, i.e. the longest - timespan that is imported is one month and the shortest is 15min, depending - on the import state. -5. Delete tables(HourData, Day, DayData and Week) that will be repopulated. * -6. Set the current state to state variables: current_years, currents_months, - current_weeks, these dictionaries holds references to the model instances. - Every station has its own state variables and the key is the name of the station. -7. Iterate through all the rows - 7.1 Read the time - 7.2 Read the current year, month, week and day number. - 7.3 If index % 4 == 0 save current hour to current_hours state, the input - data has a sample rateof 15min, and the precision stored while importing - is One hour. - 7.4 If day number has changed save hourly and day data. - 7.4.1 If Year, month or week number has changed. Save this data, create new tables - and update references to state variables. - 7.4.2 Create new day tables using the current state variables(year, month week), - update day state variable. Create HourData tables and update current_hours - state variable. HourData tables are the only tables that contains data that are - in the state, thus they are updated every fourth iteration. (15min samples to 1h) - 8.6 Iterate through all the columns, except the first that holds the time. - 8.6.1 Store the sampled data to current_hour state for every station, - every mode of transportaion and direction. -9. Finally store all data in states that has not been saved. -10. Save import state. - -* If executed with the --init-tables flag, the imports will start from the beginning -of the .csv file, 1.1.2020. for the eco counter , 1.1.2015 for the traffic counter and -1.1.2010 for the lam counter. - """ import gc import logging -import re -from datetime import datetime, timedelta +from datetime import datetime import dateutil.parser import pandas as pd @@ -50,18 +15,25 @@ from django.conf import settings from django.core.management.base import BaseCommand, CommandError -from eco_counter.models import ( +from eco_counter.constants import ( COUNTER_START_YEARS, + COUNTERS, + ECO_COUNTER, + INDEX_COLUMN_NAME, + LAM_COUNTER, + TELRAAM_COUNTER, + TELRAAM_COUNTER_START_MONTH, + TRAFFIC_COUNTER, + TRAFFIC_COUNTER_START_YEAR, +) +from eco_counter.models import ( Day, DayData, - ECO_COUNTER, HourData, ImportState, - LAM_COUNTER, Month, MonthData, Station, - TRAFFIC_COUNTER, Week, WeekData, Year, @@ -72,12 +44,10 @@ gen_eco_counter_test_csv, get_eco_counter_csv, get_lam_counter_csv, + get_telraam_counter_csv, get_test_dataframe, get_traffic_counter_csv, - save_eco_counter_stations, - save_lam_counter_stations, - save_traffic_counter_stations, - TIMESTAMP_COL_NAME, + save_stations, ) logger = logging.getLogger("eco_counter") @@ -97,9 +67,25 @@ class Command(BaseCommand): help = "Imports traffic counter data in the Turku region." - COUNTERS = [ECO_COUNTER, TRAFFIC_COUNTER, LAM_COUNTER] - COUNTER_CHOICES_STR = f"{ECO_COUNTER}, {TRAFFIC_COUNTER} and {LAM_COUNTER}" + COUNTERS = [ECO_COUNTER, TRAFFIC_COUNTER, LAM_COUNTER, TELRAAM_COUNTER] + COUNTER_CHOICES_STR = ( + f"{ECO_COUNTER}, {TRAFFIC_COUNTER}, {TELRAAM_COUNTER} and {LAM_COUNTER}" + ) TIMEZONE = pytz.timezone("Europe/Helsinki") + """ + Movement types: + (A)uto, car + (P)yörä, bicycle + (J)alankulkija, pedestrian + (B)ussi, bus + Direction types: + (K)eskustaan päin, towards the center + (P)poispäin keskustasta, away from the center + So for the example column with prefix "ap" contains data for cars moving away from the center. + The naming convention is derived from the eco-counter source data that was the + original data source. + + """ STATION_TYPES = [ ("ak", "ap", "at"), ("pk", "pp", "pt"), @@ -107,393 +93,265 @@ class Command(BaseCommand): ("bk", "bp", "bt"), ] + TYPE_DIRS = ["AK", "AP", "JK", "JP", "BK", "BP", "PK", "PP"] + ALL_TYPE_DIRS = TYPE_DIRS + ["AT", "JT", "BT", "PT"] + type_dirs_lower = [TD.lower() for TD in TYPE_DIRS] + def delete_tables( - self, csv_data_sources=[ECO_COUNTER, TRAFFIC_COUNTER, LAM_COUNTER] + self, + csv_data_sources=[ECO_COUNTER, TRAFFIC_COUNTER, LAM_COUNTER, TELRAAM_COUNTER], ): for csv_data_source in csv_data_sources: - Station.objects.filter(csv_data_source=csv_data_source).delete() + for station in Station.objects.filter(csv_data_source=csv_data_source): + Year.objects.filter(station=station).delete() ImportState.objects.filter(csv_data_source=csv_data_source).delete() - def calc_and_save_cumulative_data(self, src_obj, dst_obj): + def save_values(self, values, dst_obj): + for station_types in self.STATION_TYPES: + setattr(dst_obj, f"value_{station_types[0]}", values[station_types[0]]) + setattr(dst_obj, f"value_{station_types[1]}", values[station_types[1]]) + setattr( + dst_obj, + f"value_{station_types[2]}", + values[station_types[0]] + values[station_types[1]], + ) + dst_obj.save() + def add_values(self, values, dst_obj): + """ + Populate values for all movement types and directions for a station. + """ for station_types in self.STATION_TYPES: - setattr(dst_obj, f"value_{station_types[0]}", 0) - setattr(dst_obj, f"value_{station_types[1]}", 0) - setattr(dst_obj, f"value_{station_types[2]}", 0) + key = f"value_{station_types[0]}" + k_val = getattr(dst_obj, key, 0) + values[station_types[0]] + setattr(dst_obj, key, k_val) + key = f"value_{station_types[1]}" + p_val = getattr(dst_obj, key, 0) + values[station_types[1]] + setattr(dst_obj, key, p_val) + key = f"value_{station_types[2]}" + t_val = ( + getattr(dst_obj, key, 0) + + values[station_types[0]] + + values[station_types[1]] + ) + setattr(dst_obj, key, t_val) + dst_obj.save() - for src in src_obj: - for station_types in self.STATION_TYPES: - setattr( - dst_obj, - f"value_{station_types[0]}", - getattr(dst_obj, f"value_{station_types[0]}") - + getattr(src, f"value_{station_types[0]}"), + def get_values(self, sum_series, station_name): + """ + Returns a dict containing the aggregated sum value for every movement type and direction. + """ + values = {} + for type_dir in self.TYPE_DIRS: + key = f"{station_name} {type_dir}" + values[type_dir.lower()] = sum_series.get(key, 0) + return values + + def save_years(self, df, stations): + logger.info("Saving years...") + years = df.groupby(df.index.year) + for index, row in years: + logger.info(f"Saving year {index}") + sum_series = row.sum() + for station in stations: + year, _ = Year.objects.get_or_create(station=station, year_number=index) + values = self.get_values(sum_series, station.name) + year_data, _ = YearData.objects.get_or_create( + year=year, station=station ) - setattr( - dst_obj, - f"value_{station_types[1]}", - getattr(dst_obj, f"value_{station_types[1]}") - + getattr(src, f"value_{station_types[1]}"), + self.save_values(values, year_data) + + def save_months(self, df, stations): + logger.info("Saving months...") + months = df.groupby([df.index.year, df.index.month]) + for index, row in months: + year_number, month_number = index + logger.info(f"Saving month {month_number} of year {year_number}") + sum_series = row.sum() + for station in stations: + year, _ = Year.objects.get_or_create( + station=station, year_number=year_number ) - setattr( - dst_obj, - f"value_{station_types[2]}", - getattr(dst_obj, f"value_{station_types[2]}") - + getattr(src, f"value_{station_types[2]}"), + month, _ = Month.objects.get_or_create( + station=station, year=year, month_number=month_number ) - dst_obj.save() - - def create_and_save_year_data(self, stations, current_years): - for station in stations: - year = current_years[station] - year_data = YearData.objects.update_or_create( - year=year, station=stations[station] - )[0] - self.calc_and_save_cumulative_data(year.month_data.all(), year_data) + values = self.get_values(sum_series, station.name) + month_data, _ = MonthData.objects.get_or_create( + year=year, month=month, station=station + ) + self.save_values(values, month_data) - def create_and_save_month_data(self, stations, current_months, current_years): - for station in stations: - month = current_months[station] - month_data = MonthData.objects.update_or_create( - month=month, station=stations[station], year=current_years[station] - )[0] - day_data = DayData.objects.filter(day__month=month) - self.calc_and_save_cumulative_data(day_data, month_data) - - def create_and_save_week_data(self, stations, current_weeks): - for station in stations: - week = current_weeks[station] - week_data = WeekData.objects.update_or_create( - week=week, station=stations[station] - )[0] - day_data = DayData.objects.filter(day__week=week) - self.calc_and_save_cumulative_data(day_data, week_data) - - def create_and_save_day_data(self, stations, current_hours, current_days): + def save_current_year(self, stations, year_number, end_month_number): + logger.info(f"Saving current year {year_number}") for station in stations: - day_data = DayData.objects.create( - station=stations[station], day=current_days[station] + year, _ = Year.objects.get_or_create( + station=station, year_number=year_number ) - current_hour = current_hours[station] + year_data, _ = YearData.objects.get_or_create(station=station, year=year) for station_types in self.STATION_TYPES: - setattr( - day_data, - f"value_{station_types[0]}", - sum(getattr(current_hour, f"values_{station_types[0]}")), + setattr(year_data, f"value_{station_types[0]}", 0) + setattr(year_data, f"value_{station_types[1]}", 0) + setattr(year_data, f"value_{station_types[2]}", 0) + for month_number in range(1, end_month_number + 1): + month, _ = Month.objects.get_or_create( + station=station, year=year, month_number=month_number ) - setattr( - day_data, - f"value_{station_types[1]}", - sum(getattr(current_hour, f"values_{station_types[1]}")), + month_data, _ = MonthData.objects.get_or_create( + station=station, month=month, year=year ) - setattr( - day_data, - f"value_{station_types[2]}", - sum(getattr(current_hour, f"values_{station_types[2]}")), + for station_types in self.STATION_TYPES: + for i in range(3): + key = f"value_{station_types[i]}" + m_val = getattr(month_data, key, 0) + y_val = getattr(year_data, key, 0) + setattr(year_data, key, m_val + y_val) + year_data.save() + + def save_weeks(self, df, stations): + logger.info("Saving weeks...") + weeks = df.groupby([df.index.year, df.index.isocalendar().week]) + for index, row in weeks: + year_number, week_number = index + logger.info(f"Saving week number {week_number} of year {year_number}") + sum_series = row.sum() + for station in stations: + year = Year.objects.get(station=station, year_number=year_number) + week, _ = Week.objects.get_or_create( + station=station, + week_number=week_number, + years__year_number=year_number, ) - day_data.save() - - def save_hour_data(self, current_hour, current_hours): - for station in current_hour: - hour_data = current_hours[station] - - for station_type in self.STATION_TYPES: - # keskustaan päin - k_field = station_type[0] - k_value = 0 - # poispäin keskustasta - p_field = station_type[1] - p_value = 0 - # molempiin suuntiin k - t_field = station_type[2] - t_value = 0 - total_field = station_type[2] - if k_field.upper() in current_hour[station]: - k_value = current_hour[station][k_field.upper()] - getattr(hour_data, f"values_{k_field}").append(k_value) - - if p_field.upper() in current_hour[station]: - p_value = current_hour[station][p_field.upper()] - getattr(hour_data, f"values_{p_field}").append(p_value) - - if t_field.upper() in current_hour[station]: - t_value = current_hour[station][t_field.upper()] - getattr(hour_data, f"values_{total_field}").append(t_value) - else: - getattr(hour_data, f"values_{total_field}").append( - k_value + p_value - ) + if week.years.count() == 0: + week.years.add(year) - hour_data.save() - - def get_station_name_and_type(self, column): - # Station type is always: A|P|J|B + K|P - station_type = re.findall("[APJB][PKT]", column)[0] - station_name = column.replace(station_type, "").strip() - return station_name, station_type + values = self.get_values(sum_series, station.name) + week_data, _ = WeekData.objects.get_or_create( + station=station, week=week + ) + self.save_values(values, week_data) - def save_observations( - self, csv_data, start_time, column_names, csv_data_source=ECO_COUNTER - ): - errorneous_values = 0 - negative_values = 0 - - stations = {} - # Populate stations dict, used to lookup station relations - for station in Station.objects.filter(csv_data_source=csv_data_source): - stations[station.name] = station - # state variable for the current hour that is calucalted for every iteration(15min) - current_hour = {} - current_hours = {} - current_days = {} - current_weeks = {} - current_months = {} - current_years = {} - import_state = ImportState.objects.get(csv_data_source=csv_data_source) - current_year_number = import_state.current_year_number - current_month_number = import_state.current_month_number - current_weekday_number = None - - current_week_number = int(start_time.strftime("%-V")) - prev_weekday_number = start_time.weekday() - prev_year_number = current_year_number - prev_month_number = current_month_number - prev_week_number = current_week_number - current_time = None - prev_time = None - year_has_changed = False - changed_daylight_saving_to_summer = False - # All Hourly, daily and weekly data that are past the current_week_number - # are delete thus they are repopulated. HourData and DayData are deleted - # thus their on_delete is set to models.CASCADE. - Day.objects.filter( - month__month_number=current_month_number, - month__year__year_number=current_year_number, - station__csv_data_source=csv_data_source, - ).delete() - # If week number >= 52 then do not delete the week as it has been created - # in the previous year. - if current_week_number >= 52: - # Set to 0 as we want to delete the first week, as it is not the first week - # of the year if week number is >=52. - start_week_number = 0 - else: - # Add by one, i.e., do not delete the current_week. - start_week_number = current_week_number + 1 - - for week_number in range(start_week_number, start_week_number + 5): - Week.objects.filter( - week_number=week_number, - years__year_number=current_year_number, - station__csv_data_source=csv_data_source, - ).delete() - # Set the references to the current state. - for station in stations: - current_years[station] = Year.objects.get_or_create( - station=stations[station], year_number=current_year_number - )[0] - current_months[station] = Month.objects.get_or_create( - station=stations[station], - year=current_years[station], - month_number=current_month_number, - )[0] - current_weeks[station] = Week.objects.get_or_create( - station=stations[station], - week_number=current_week_number, - years__year_number=current_year_number, - )[0] - current_weeks[station].years.add(current_years[station]) - - for index, row in csv_data.iterrows(): - try: - timestamp = row.get(TIMESTAMP_COL_NAME, None) - if type(timestamp) == str: - current_time = dateutil.parser.parse(timestamp) - # Support also timestamps that are of Pandas Timestamp type. - elif type(timestamp) == pd.Timestamp: - current_time = dateutil.parser.parse(str(timestamp)) - # When the time is changed due to daylight savings - # Input data does not contain any timestamp for that hour, only data - # so the current_time is calculated - else: - current_time = prev_time + timedelta(minutes=15) - except dateutil.parser._parser.ParserError: - # If malformed time, calcultate new current_time. - current_time = prev_time + timedelta(minutes=15) - - current_time = self.TIMEZONE.localize(current_time) - if prev_time: - # Compare the utcoffset, if not equal the daylight saving has changed. - if current_time.tzinfo.utcoffset( - current_time - ) != prev_time.tzinfo.utcoffset(prev_time): - # Take the daylight saving time (dst) hour from the utcoffset - current_time_dst_hour = dateutil.parser.parse( - str(current_time.tzinfo.utcoffset(current_time)) + def save_days(self, df, stations): + logger.info("Saving days...") + days = df.groupby( + [df.index.year, df.index.month, df.index.isocalendar().week, df.index.day] + ) + prev_week_number = None + for index, row in days: + year_number, month_number, week_number, day_number = index + + date = datetime(year_number, month_number, day_number) + sum_series = row.sum() + for station in stations: + year = Year.objects.get(station=station, year_number=year_number) + month = Month.objects.get( + station=station, year=year, month_number=month_number + ) + week = Week.objects.get( + station=station, years=year, week_number=week_number + ) + day, _ = Day.objects.get_or_create( + station=station, + date=date, + weekday_number=date.weekday(), + year=year, + month=month, + week=week, + ) + values = self.get_values(sum_series, station.name) + day_data, _ = DayData.objects.get_or_create(station=station, day=day) + self.save_values(values, day_data) + if not prev_week_number or prev_week_number != week_number: + prev_week_number = week_number + logger.info(f"Saved days for week {week_number} of year {year_number}") + + def save_hours(self, df, stations): + logger.info("Saving hours...") + hours = df.groupby([df.index.year, df.index.month, df.index.day, df.index.hour]) + for i_station, station in enumerate(stations): + prev_day_number = None + prev_month_number = None + values = {k: [] for k in self.ALL_TYPE_DIRS} + for index, row in hours: + sum_series = row.sum() + year_number, month_number, day_number, _ = index + if not prev_day_number: + prev_day_number = day_number + if not prev_month_number: + prev_month_number = month_number + + if day_number != prev_day_number or month_number != prev_month_number: + """ + If day or month changed. Save the hours for the day and clear the values dict. + """ + if month_number != prev_month_number: + prev_day_number = day_number + day = Day.objects.get( + date=datetime(year_number, month_number, prev_day_number), + station=station, ) - prev_time_dst_hour = dateutil.parser.parse( - str(prev_time.tzinfo.utcoffset(prev_time)) + hour_data, _ = HourData.objects.get_or_create( + station=station, day=day ) - # If the prev_time_dst_hour is less than current_time_dst_hour, - # then this is the hour clocks are changed backwards, i.e. wintertime - if prev_time_dst_hour < current_time_dst_hour: - # Add an hour where the values are 0, for the nonexistent hour 3:00-4:00 - # To keep the hour data consistent with 24 hours. + for td in self.ALL_TYPE_DIRS: + setattr(hour_data, f"values_{td.lower()}", values[td]) + hour_data.save() + values = {k: [] for k in self.ALL_TYPE_DIRS} + # output logger only when last station is saved + if i_station == len(stations) - 1: logger.info( - f"Detected daylight savings time change to summer. DateTime: {current_time}" - ) - temp_hour = {} - for station in stations: - temp_hour[station] = {} - for column in column_names[1:]: - station_name, station_type = self.get_station_name_and_type( - column - ) - temp_hour[station_name][station_type] = 0 - self.save_hour_data(temp_hour, current_hours) - changed_daylight_saving_to_summer = True - - current_year_number = current_time.year - current_week_number = int(current_time.strftime("%-V")) - current_weekday_number = current_time.weekday() - current_month_number = datetime.date(current_time).month - - # Adds data for an hour every fourth iteration, sample rate is 15min. - if index % 4 == 0: - # If daylight has changed to summer we do not store the hourly data - if changed_daylight_saving_to_summer: - changed_daylight_saving_to_summer = False - else: - self.save_hour_data(current_hour, current_hours) - # Clear current_hour after storage, to get data for every hour. - current_hour = {} - - if prev_weekday_number != current_weekday_number or not current_hours: - # Store hour data if data exists. - if current_hours: - self.create_and_save_day_data(stations, current_hours, current_days) - current_hours = {} - - # Year, month, week tables are created before the day tables - # to ensure correct relations. - if prev_year_number != current_year_number or not current_years: - year_has_changed = True - # If year has changed, we must store the current month data before storing - # the year data, the year data is calculated from the month datas. - self.create_and_save_month_data( - stations, current_months, current_years - ) - self.create_and_save_year_data(stations, current_years) - - for station in stations: - year = Year.objects.create( - year_number=current_year_number, station=stations[station] - ) - current_years[station] = year - current_weeks[station].years.add(year) - prev_year_number = current_year_number - - if prev_month_number != current_month_number or not current_months: - if prev_month_number and not year_has_changed: - self.create_and_save_month_data( - stations, current_months, current_years - ) - for station in stations: - month = Month.objects.create( - station=stations[station], - year=current_years[station], - month_number=current_month_number, - ) - current_months[station] = month - prev_month_number = current_month_number - - if prev_week_number != current_week_number or not current_weeks: - if prev_week_number and not year_has_changed: - self.create_and_save_week_data(stations, current_weeks) - for station in stations: - week = Week.objects.create( - station=stations[station], week_number=current_week_number + f"Saved hour data for day {prev_day_number}, month {prev_month_number} year {year_number}" ) - week.years.add(current_years[station]) - current_weeks[station] = week - prev_week_number = current_week_number - if year_has_changed: - year_has_changed = False - for station in stations: - day = Day.objects.create( - station=stations[station], - date=current_time, - weekday_number=current_weekday_number, - week=current_weeks[station], - month=current_months[station], - year=current_years[station], - ) - current_days[station] = day - hour_data = HourData.objects.create( - station=stations[station], day=current_days[station] - ) - current_hours[station] = hour_data - prev_weekday_number = current_weekday_number - - """ - Build the current_hour dict by iterating all cols in row. - current_hour dict store the rows in a structured form. - current_hour keys are the station names and every value contains a dict with the type as its key - The type is: A|P|J|B (Auto, Pyöräilijä, Jalankulkija, Bussi) + direction P|K , e.g. "JK" - current_hour[station][station_type] = value, e.g. current_hour["TeatteriSilta"]["PK"] = 6 - Note the first col is the TIMESTAMP_COL_NAME and is discarded, the rest are observations - for every station. - """ - for column in column_names[1:]: - station_name, station_type = self.get_station_name_and_type(column) - value = row[column] - if pd.isnull(value): - value = int(0) + prev_day_number = day_number + prev_month_number = month_number else: - value = int(row[column]) - if value > ERRORNEOUS_VALUE_THRESHOLD: - logger.warning( - ( - f"Found errorneous(>={ERRORNEOUS_VALUE_THRESHOLD}) value: {value}, " - f"column: {column}, time: {current_time}, index: {index}" - ) - ) - errorneous_values += 1 - value = 0 - if value < 0: - logger.warning( - ( - f"Found negative value: {value}, " - f"column: {column}, time: {current_time}, index: {index}" - ) - ) - negative_values += 1 - value = 0 - if station_name not in current_hour: - current_hour[station_name] = {} - # if type exist in current_hour, we add the new value to get the hourly sample - if station_type in current_hour[station_name]: - current_hour[station_name][station_type] = ( - int(current_hour[station_name][station_type]) + value - ) - else: - current_hour[station_name][station_type] = value - prev_time = current_time - # Finally save hours, days, months etc. that are not fully populated. - self.save_hour_data(current_hour, current_hours) - self.create_and_save_day_data(stations, current_hours, current_days) - self.create_and_save_week_data(stations, current_weeks) - self.create_and_save_month_data(stations, current_months, current_years) - self.create_and_save_year_data(stations, current_years) - - import_state.current_year_number = current_year_number - import_state.current_month_number = current_month_number + # Add data to values dict for an hour + for station_types in self.STATION_TYPES: + for i in range(3): + if i < 2: + dir_key = f"{station.name} {station_types[i].upper()}" + val = sum_series.get(dir_key, 0) + else: + k_key = f"{station.name} {station_types[0].upper()}" + p_key = f"{station.name} {station_types[1].upper()}" + val = sum_series.get(p_key, 0) + sum_series.get( + k_key, 0 + ) + values_key = station_types[i].upper() + values[values_key].append(val) + + def save_observations(self, csv_data, start_time, csv_data_source=ECO_COUNTER): + import_state = ImportState.objects.get(csv_data_source=csv_data_source) + # Populate stations list, this is used to set/lookup station relations. + stations = [ + station + for station in Station.objects.filter(csv_data_source=csv_data_source) + ] + df = csv_data + df["Date"] = pd.to_datetime(df["startTime"], format="%Y-%m-%dT%H:%M") + df = df.drop("startTime", axis=1) + df = df.set_index("Date") + # Fill missing cells with the value 0 + df = df.fillna(0) + # Set negative numbers to 0 + df = df.clip(lower=0) + # Set values higher than ERRORNEOUS_VALUES_THRESHOLD to 0 + df[df > ERRORNEOUS_VALUE_THRESHOLD] = 0 + if not import_state.current_year_number: + # In initial import populate all years. + self.save_years(df, stations) + self.save_months(df, stations) + if import_state.current_year_number: + end_month_number = df.index[-1].month + self.save_current_year(stations, start_time.year, end_month_number) + + self.save_weeks(df, stations) + self.save_days(df, stations) + self.save_hours(df, stations) + end_date = df.index[-1] + import_state.current_year_number = end_date.year + import_state.current_month_number = end_date.month import_state.save() - logger.info( - f"Found {errorneous_values} errorneous(>={ERRORNEOUS_VALUE_THRESHOLD}) values." - ) - logger.info(f"Found {negative_values} negative values.") - logger.info(f"Imported observations until:{str(current_time)}") + logger.info(f"Imported observations until:{str(end_date)}") def add_arguments(self, parser): parser.add_argument( @@ -540,38 +398,19 @@ def handle(self, *args, **options): logger.info(f"Deleting tables for: {initial_import_counters}") self.delete_tables(csv_data_sources=initial_import_counters) for counter in initial_import_counters: + ImportState.objects.filter(csv_data_source=counter).delete() import_state = ImportState.objects.create( csv_data_source=counter, - current_year_number=COUNTER_START_YEARS[counter], ) logger.info(f"Retrieving stations for {counter}.") - if counter == ECO_COUNTER: - save_eco_counter_stations() - elif counter == TRAFFIC_COUNTER: - save_traffic_counter_stations() - elif counter == LAM_COUNTER: - save_lam_counter_stations() + save_stations(counter) if options["test_counter"]: logger.info("Testing eco_counter importer.") counter = options["test_counter"][0] start_time = options["test_counter"][1] end_time = options["test_counter"][2] - import_state, created = ImportState.objects.get_or_create( - csv_data_source=counter - ) - if created: - import_state.current_year_number = start_time.year - import_state.current_month_number = start_time.month - import_state.save() - if counter == ECO_COUNTER: - save_eco_counter_stations() - elif counter == TRAFFIC_COUNTER: - save_traffic_counter_stations() - elif counter == LAM_COUNTER: - save_lam_counter_stations() - else: - raise CommandError("No valid counter argument given.") + import_state, _ = ImportState.objects.get_or_create(csv_data_source=counter) test_dataframe = get_test_dataframe(counter) csv_data = gen_eco_counter_test_csv( test_dataframe.keys(), start_time, end_time @@ -579,10 +418,9 @@ def handle(self, *args, **options): self.save_observations( csv_data, start_time, - test_dataframe.keys(), csv_data_source=counter, ) - # Import if counters arg or (initial import). + # Import if counters arg or initial import. if options["counters"] or initial_import_counters: if not initial_import_counters: # run with counters argument @@ -596,32 +434,49 @@ def handle(self, *args, **options): import_state = ImportState.objects.filter( csv_data_source=counter ).first() - if counter == LAM_COUNTER: - start_time = f"{import_state.current_year_number}-{import_state.current_month_number}-01" - csv_data = get_lam_counter_csv(start_time) - elif counter == ECO_COUNTER: - csv_data = get_eco_counter_csv() - elif counter == TRAFFIC_COUNTER: - csv_data = get_traffic_counter_csv( - start_year=import_state.current_year_number + + if ( + import_state.current_year_number + and import_state.current_month_number + ): + start_time = "{year}-{month}-1T00:00".format( + year=import_state.current_year_number, + month=import_state.current_month_number, ) - start_time = "{year}-{month}-1T00:00".format( - year=import_state.current_year_number, - month=import_state.current_month_number, - ) + else: + start_month = ( + TELRAAM_COUNTER_START_MONTH + if counter == TELRAAM_COUNTER + else "01" + ) + start_time = f"{COUNTER_START_YEARS[counter]}-{start_month}-01" + start_time = dateutil.parser.parse(start_time) start_time = self.TIMEZONE.localize(start_time) # The timeformat for the input data is : 2020-03-01T00:00 # Convert starting time to input datas timeformat start_time_string = start_time.strftime("%Y-%m-%dT%H:%M") + # start_index = None + match counter: + case COUNTERS.TELRAAM_COUNTER: + csv_data = get_telraam_counter_csv(start_time.date()) + case COUNTERS.LAM_COUNTER: + csv_data = get_lam_counter_csv(start_time.date()) + case COUNTERS.ECO_COUNTER: + csv_data = get_eco_counter_csv() + case COUNTERS.TRAFFIC_COUNTER: + if import_state.current_year_number: + start_year = import_state.current_year_number + else: + start_year = TRAFFIC_COUNTER_START_YEAR + csv_data = get_traffic_counter_csv(start_year=start_year) start_index = csv_data.index[ - csv_data[TIMESTAMP_COL_NAME] == start_time_string + csv_data[INDEX_COLUMN_NAME] == start_time_string ].values[0] # As LAM data is fetched with a timespan, no index data is available, instead # show time. if counter == LAM_COUNTER: logger.info(f"Starting saving observations at time:{start_time}") - else: logger.info(f"Starting saving observations at index:{start_index}") @@ -629,7 +484,6 @@ def handle(self, *args, **options): self.save_observations( csv_data, start_time, - csv_data.keys(), csv_data_source=counter, ) # Try to Free memory diff --git a/eco_counter/management/commands/import_telraam_to_csv.py b/eco_counter/management/commands/import_telraam_to_csv.py new file mode 100644 index 000000000..6ef2e0f5b --- /dev/null +++ b/eco_counter/management/commands/import_telraam_to_csv.py @@ -0,0 +1,266 @@ +""" +Imports hourly Telraam data for given cameras. +Saves a CSV file for every camera and every day to PROJECT_ROOT/media/telraam_data/ + +""" + +import json +import logging +import os +from datetime import date, datetime, timedelta + +import pandas as pd +import pytz +from django.conf import settings +from django.core.management import BaseCommand + +from eco_counter.constants import ( + INDEX_COLUMN_NAME, + TELRAAM_COUNTER_API_TIME_FORMAT, + TELRAAM_COUNTER_CAMERAS, + TELRAAM_COUNTER_CSV_FILE, + TELRAAM_COUNTER_CSV_FILE_PATH, + TELRAAM_COUNTER_START_MONTH, + TELRAAM_COUNTER_START_YEAR, + TELRAAM_COUNTER_TRAFFIC_URL, + TELRAAM_CSV, + TELRAAM_HTTP, +) +from eco_counter.management.commands.utils import get_telraam_cameras +from eco_counter.models import ImportState + +TOKEN = settings.TELRAAM_TOKEN +assert TOKEN +logger = logging.getLogger("eco_counter") + +HEAVY = "heavy" +VEHICLE_TYPES = { + "pedestrian": "J", + "bike": "P", + "car": "A", + HEAVY: "A", # Is added to car column +} +LEFT = "lft" +RIGHT = "rgt" +TOTAL = "" # Total fields have no postfix in names +DIRECTIONS = [LEFT, RIGHT, TOTAL] + + +def get_mappings(station_name: str, direction: bool = True) -> dict: + """ + If direction is true, LEFT (lft) will be K (Keskustaan päin) + return mappings: + e.g., + "pedestrian_lgt": "station_name KP" + """ + dir1, dir2, dir_tot = "K", "P", "T" + if not direction: + dir1, dir2 = dir2, dir1 + dirs = {LEFT: dir1, RIGHT: dir2, TOTAL: dir_tot} + column_mappings = {} + for veh in VEHICLE_TYPES.items(): + for dir in DIRECTIONS: + if dir == TOTAL: + key = f"{veh[0]}{dir}" + else: + key = f"{veh[0]}_{dir}" + + value = f"{veh[1]}{dirs[dir]}" + column_mappings[key] = value + + mappings = {} + for field in column_mappings.keys(): + mappings[field] = f"{station_name} {column_mappings[field]}" + return mappings + + +def fetch_traffic_report(from_date: str, end_date: str, camera_id: str): + headers = { + "X-Api-Key": TOKEN, + "Content-Type": "application/json", + } + + data = { + "level": "instances", # Statistics for individual cameras + "format": "per-hour", + "id": camera_id, + "time_start": from_date, + "time_end": end_date, + } + + response = TELRAAM_HTTP.post( + TELRAAM_COUNTER_TRAFFIC_URL, headers=headers, data=json.dumps(data) + ) + return response.json().get("report", []) + + +def get_delta_hours(from_date: datetime, end_date: datetime) -> datetime: + delta = end_date - from_date + delta_hours = int(round(delta.total_seconds() / 3600)) + return delta_hours + + +def get_day_data( + day_date: date, camera_id: str, utf_offset: datetime, check_delta_hours: bool = True +) -> tuple[list, int]: + from_datetime = ( + datetime(day_date.year, day_date.month, day_date.day, 0, 0, 0) - utf_offset + ) + from_datetime_str = from_datetime.strftime(TELRAAM_COUNTER_API_TIME_FORMAT) + end_datetime = ( + datetime(day_date.year, day_date.month, day_date.day) + + timedelta(hours=23) + + timedelta(minutes=59) + ) - utf_offset + + end_datetime_str = end_datetime.strftime(TELRAAM_COUNTER_API_TIME_FORMAT) + report = fetch_traffic_report(from_datetime_str, end_datetime_str, camera_id) + delta_hours = len(report) + if not report: + logger.warning( + f"No report found for camera {camera_id}, populating with empty dicts" + ) + report = [{} for a in range(delta_hours)] + else: + logger.info( + f"Imorted report with {len(report)} elements for camera {camera_id}" + ) + if check_delta_hours and delta_hours != 24: + dif = 24 - delta_hours + if day_date == date.today(): + logger.warning( + f"Fetched report with delta_hours not equal to 24, appending missing {dif} empty dicts." + ) + report += [{} for a in range(dif)] + + else: + # Case when camera gets turned on in the middle of day. + logger.warning( + f"Fetched report with delta_hours not equal to 24, adding missing {dif} empty dicts to start of report." + ) + report = [{} for a in range(dif)] + report + delta_hours = len(report) + res = [] + start_date = from_datetime + for item in report: + d = {} + d["date"] = datetime.strftime(start_date, TELRAAM_COUNTER_API_TIME_FORMAT) + for veh in VEHICLE_TYPES.keys(): + for dir in DIRECTIONS: + if dir == TOTAL: + key = f"{veh}{dir}" + else: + key = f"{veh}_{dir}" + val = int(round(item.get(key, 0))) + d[key] = val + res.append(d) + start_date += timedelta(hours=1) + return res, delta_hours + + +def save_dataframe(from_date: date = True) -> datetime: + can_overwrite_csv_file = True if from_date else False + if not os.path.exists(TELRAAM_COUNTER_CSV_FILE_PATH): + os.makedirs(TELRAAM_COUNTER_CSV_FILE_PATH) + ImportState.objects.filter(csv_data_source=TELRAAM_CSV).delete() + import_state = ImportState.objects.create( + csv_data_source=TELRAAM_CSV, + current_year_number=TELRAAM_COUNTER_START_YEAR, + current_month_number=TELRAAM_COUNTER_START_MONTH, + current_day_number=1, + ) + else: + import_state = ImportState.objects.filter(csv_data_source=TELRAAM_CSV).first() + if not from_date: + from_date = date( + import_state.current_year_number, + import_state.current_month_number, + import_state.current_day_number, + ) + date_today = date.today() + # Source data date time is in UTC. Calculate a utf_offset + utc_offset = pytz.timezone("Europe/Helsinki").utcoffset(datetime.now()) + logger.info(f"Fetching Telraam data from {str(from_date)} to {str(date_today)}") + cameras = get_telraam_cameras() + for camera in cameras: + start_date = from_date + while start_date <= date_today: + report, delta_hours = get_day_data( + start_date, camera["instance_id"], utc_offset + ) + mappings = get_mappings( + camera["mac"], direction=TELRAAM_COUNTER_CAMERAS[camera["mac"]] + ) + columns = {} + columns[INDEX_COLUMN_NAME] = [] + for hour in range(delta_hours): + col_date = ( + datetime.strptime( + report[hour]["date"], TELRAAM_COUNTER_API_TIME_FORMAT + ) + + utc_offset + ) + col_date_str = col_date.strftime(TELRAAM_COUNTER_API_TIME_FORMAT) + columns[INDEX_COLUMN_NAME].append(col_date_str) + + for mapping in mappings.items(): + # key is the name of the column, e.g., name_ak + key = mapping[1] + value_key = mapping[0] + values_list = columns.get(key, []) + if HEAVY in value_key: + # add heavy values to car column, as the mapping is same. + values_list[-1] += report[hour][value_key] + else: + values_list.append(report[hour][value_key]) + columns[key] = values_list + df = pd.DataFrame(data=columns, index=columns[INDEX_COLUMN_NAME]) + df = df.drop(columns=[INDEX_COLUMN_NAME], axis=1) + df.index.rename(INDEX_COLUMN_NAME, inplace=True) + df = df.fillna(0) + df = df.astype(int) + + csv_file = TELRAAM_COUNTER_CSV_FILE.format( + id=camera["mac"], + day=start_date.day, + month=start_date.month, + year=start_date.year, + ) + if start_date == date_today: + # Remove latest csv, as it might not be populated until the end of day + if os.path.exists(csv_file): + os.remove(csv_file) + if not os.path.exists(csv_file) or can_overwrite_csv_file: + df.to_csv(csv_file) + start_date += timedelta(days=1) + + start_date -= timedelta(days=1) + import_state.current_year_number = start_date.year + import_state.current_month_number = start_date.month + import_state.current_day_number = start_date.day + import_state.save() + return start_date + + +class Command(BaseCommand): + def add_arguments(self, parser): + help_msg = ( + "The date from which the import begins in YYYY-MM-DD format. Note, the date cannot be more than " + + "three months in the past, which is the maximum length of history the Telraam API supports." + ) + parser.add_argument("--from-date", type=str, help=help_msg) + + def handle(self, *args, **options): + logger.info("Importing Telraam data...") + from_date_arg = options.get("from_date", None) + from_date = None + if from_date_arg: + try: + from_date = datetime.strptime(from_date_arg, "%Y-%m-%d").date() + except ValueError: + logger.error("Invalid date argument format. use YYYY-MM-DD.") + return + + until_date = save_dataframe(from_date) + + logger.info(f"Telraam data imported until {str(until_date)}") diff --git a/eco_counter/management/commands/utils.py b/eco_counter/management/commands/utils.py index c47e9f19e..695eefb81 100644 --- a/eco_counter/management/commands/utils.py +++ b/eco_counter/management/commands/utils.py @@ -1,5 +1,4 @@ import io -import json import logging from datetime import date, timedelta @@ -8,69 +7,124 @@ import requests from django.conf import settings from django.contrib.gis.gdal import DataSource -from django.contrib.gis.geos import GEOSGeometry, Point +from django.contrib.gis.geos import GEOSGeometry, LineString, MultiLineString, Point -from eco_counter.models import ( +from eco_counter.constants import ( + COUNTERS, ECO_COUNTER, + INDEX_COLUMN_NAME, LAM_COUNTER, - Station, + LAM_STATION_LOCATIONS, + LAM_STATION_USER_HEADER, + LAM_STATIONS_API_FETCH_URL, + LAM_STATIONS_DIRECTION_MAPPINGS, + TELRAAM_COUNTER, + TELRAAM_COUNTER_API_TIME_FORMAT, + TELRAAM_COUNTER_CAMERA_SEGMENTS_URL, + TELRAAM_COUNTER_CAMERAS, + TELRAAM_COUNTER_CAMERAS_URL, + TELRAAM_COUNTER_CSV_FILE, + TELRAAM_CSV, + TELRAAM_HTTP, TRAFFIC_COUNTER, - TRAFFIC_COUNTER_END_YEAR, - TRAFFIC_COUNTER_START_YEAR, + TRAFFIC_COUNTER_CSV_URLS, + TRAFFIC_COUNTER_METADATA_GEOJSON, ) +from eco_counter.models import ImportState, Station from eco_counter.tests.test_import_counter_data import TEST_COLUMN_NAMES from mobility_data.importers.utils import get_root_dir logger = logging.getLogger("eco_counter") -TIMESTAMP_COL_NAME = "startTime" -TRAFFIC_COUNTER_METADATA_GEOJSON = "traffic_counter_metadata.geojson" -# LAM stations located in the municipalities list are included. -LAM_STATION_MUNICIPALITIES = ["Turku", "Raisio", "Kaarina", "Lieto"] - -LAM_STATIONS_API_FETCH_URL = ( - settings.LAM_COUNTER_API_BASE_URL - + "?api=liikennemaara&tyyppi=h&pvm={start_date}&loppu={end_date}" - + "&lam_type=option1&piste={id}&luokka=kaikki&suunta={direction}&sisallytakaistat=0" -) -LAM_STATIONS_DIRECTION_MAPPINGS = { - "1_Piikkiö": "P", - "1_Naantali": "P", - "2_Naantali": "K", - "1_Turku": "K", - "2_Turku": "K", - "2_Helsinki": "P", - "1_Suikkila.": "K", - "2_Artukainen.": "P", - "1_Vaasa": "P", - "1_Kuusisto": "P", - "2_Kaarina": "K", - "1_Tampere": "P", - "1_Hämeenlinna": "P", -} - -keys = [k for k in range(TRAFFIC_COUNTER_START_YEAR, TRAFFIC_COUNTER_END_YEAR + 1)] -# Create a dict where the years to be importer are keys and the value is the url of the csv data. -# e.g. {2015, "https://data.turku.fi/2yxpk2imqi2mzxpa6e6knq/2015_laskenta_juha.csv"} -TRAFFIC_COUNTER_CSV_URLS = dict( - [ - (k, f"{settings.TRAFFIC_COUNTER_OBSERVATIONS_BASE_URL}{k}_laskenta_juha.csv") - for k in keys - ] -) class LAMStation: def __init__(self, feature): - self.lam_id = feature["tmsNumber"].as_int() - names = json.loads(feature["names"].as_string()) - self.name = names["fi"] - self.name_sv = names["sv"] - self.name_en = names["en"] + self.station_id = feature["tmsNumber"].as_int() + self.name = self.name_sv = self.name_en = feature["name"].as_string() # The source data has a obsolete Z dimension with value 0, remove it. geom = feature.geom.clone() geom.coord_dim = 2 - self.geom = GEOSGeometry(geom.wkt, srid=4326) - self.geom.transform(settings.DEFAULT_SRID) + self.location = GEOSGeometry(geom.wkt, srid=4326) + self.location.transform(settings.DEFAULT_SRID) + + +class EcoCounterStation: + def __init__(self, feature): + self.name = feature["properties"]["Nimi"] + lon = feature["geometry"]["coordinates"][0] + lat = feature["geometry"]["coordinates"][1] + self.location = Point(lon, lat, srid=4326) + self.location.transform(settings.DEFAULT_SRID) + + +class TrafficCounterStation: + def __init__(self, feature): + self.name = feature["Osoite_fi"].as_string() + self.name_sv = feature["Osoite_sv"].as_string() + self.name_en = feature["Osoite_en"].as_string() + geom = GEOSGeometry(feature.geom.wkt, srid=feature.geom.srid) + geom.transform(settings.DEFAULT_SRID) + self.location = geom + + +class TelraamCounterStation: + # The Telraam API return the coordinates in EPSGS 31370 + SOURCE_SRID = 4326 + TARGET_SRID = settings.DEFAULT_SRID + + def get_location_and_geometry(self, id): + url = TELRAAM_COUNTER_CAMERA_SEGMENTS_URL.format(id=id) + headers = { + "X-Api-Key": settings.TELRAAM_TOKEN, + } + response = TELRAAM_HTTP.get(url, headers=headers) + assert ( + response.status_code == 200 + ), "Could not fetch segment for camera {id}".format(id=id) + json_data = response.json() + coords = json_data["features"][0]["geometry"]["coordinates"] + lss = [] + for coord in coords: + ls = LineString(coord, srid=self.SOURCE_SRID) + lss.append(ls) + geometry = MultiLineString(lss, srid=self.SOURCE_SRID) + geometry.transform(self.TARGET_SRID) + mid_line = round(len(coords) / 2) + mid_point = round(len(coords[mid_line]) / 2) + location = Point(coords[mid_line][mid_point], srid=self.SOURCE_SRID) + location.transform(self.TARGET_SRID) + return location, geometry + + def __init__(self, feature): + self.name = feature["mac"] + self.name_sv = feature["mac"] + self.name_en = feature["mac"] + self.location, self.geometry = self.get_location_and_geometry( + feature["segment_id"] + ) + self.station_id = feature["mac"] + + +class ObservationStation( + LAMStation, EcoCounterStation, TrafficCounterStation, TelraamCounterStation +): + def __init__(self, csv_data_source, feature): + self.csv_data_source = csv_data_source + self.name = None + self.name_sv = None + self.name_en = None + self.location = None + self.geometry = None + self.station_id = None + match csv_data_source: + case COUNTERS.TELRAAM_COUNTER: + TelraamCounterStation.__init__(self, feature) + case COUNTERS.LAM_COUNTER: + LAMStation.__init__(self, feature) + case COUNTERS.ECO_COUNTER: + EcoCounterStation.__init__(self, feature) + case COUNTERS.TRAFFIC_COUNTER: + TrafficCounterStation.__init__(self, feature) def get_traffic_counter_metadata_data_layer(): @@ -152,13 +206,13 @@ def get_traffic_counter_csv(start_year=2015): logger.info(df.info(verbose=False)) logger.info(f"{ids_not_found} IDs not found in metadata.") # Move column 'startTime to first (0) position. - df.insert(0, TIMESTAMP_COL_NAME, df.pop(TIMESTAMP_COL_NAME)) + df.insert(0, INDEX_COLUMN_NAME, df.pop(INDEX_COLUMN_NAME)) # df.to_csv("tc_out.csv") return df def get_lam_dataframe(csv_url): - response = requests.get(csv_url) + response = requests.get(csv_url, headers=LAM_STATION_USER_HEADER) string_data = response.content csv_data = pd.read_csv(io.StringIO(string_data.decode("utf-8")), delimiter=";") return csv_data @@ -192,7 +246,6 @@ def get_lam_counter_csv(start_date): 5. Shift the columns with the calculated shift_index, this must be done if there is no data for the station from the start_date. This ensures the data matches the timestamps. """ - drop_columns = [ "pistetunnus", "sijainti", @@ -211,12 +264,14 @@ def get_lam_counter_csv(start_date): num_15min_freq = dif_time.total_seconds() / 60 / 15 time_stamps = pd.date_range(start_time, freq="15T", periods=num_15min_freq) data_frame = pd.DataFrame() - data_frame[TIMESTAMP_COL_NAME] = time_stamps + data_frame[INDEX_COLUMN_NAME] = time_stamps for station in Station.objects.filter(csv_data_source=LAM_COUNTER): # In the source data the directions are 1 and 2. for direction in range(1, 3): - df = get_lam_station_dataframe(station.lam_id, direction, start_date, today) - # Read the direction + df = get_lam_station_dataframe( + station.station_id, direction, start_date, today + ) + # Read the direction, e.g., Vaasa direction_name = df["suuntaselite"].iloc[0] # From the mappings determine the 'keskustaan päin' or 'poispäin keskustasta' direction. try: @@ -230,7 +285,7 @@ def get_lam_counter_csv(start_date): # Calculate shift index, i.e., if data starts from different position that the start_date. # then shift the rows to the correct position using the calculated shift_index. shift_index = data_frame.index[ - getattr(data_frame, TIMESTAMP_COL_NAME) == str(start_time) + getattr(data_frame, INDEX_COLUMN_NAME) == str(start_time) ][0] column_name = f"{station.name} A{direction_value}" # Drop all unnecessary columns. @@ -277,52 +332,34 @@ def get_lam_counter_csv(start_date): return data_frame -def save_lam_counter_stations(): +def has_list_elements_in_string(elements, string): + for element in elements: + if element in string: + return True + return False + + +def get_lam_counter_stations(): + stations = [] data_layer = DataSource(settings.LAM_COUNTER_STATIONS_URL)[0] - saved = 0 for feature in data_layer: - if feature["municipality"].as_string() in LAM_STATION_MUNICIPALITIES: - station_obj = LAMStation(feature) - if Station.objects.filter(name=station_obj.name).exists(): - continue - station = Station() - station.lam_id = station_obj.lam_id - station.name = station_obj.name - station.name_sv = station_obj.name_sv - station.name_en = station_obj.name_en - station.csv_data_source = LAM_COUNTER - station.geom = station_obj.geom - station.save() - saved += 1 - logger.info(f"Saved {saved} LAM Counter stations.") - - -def save_traffic_counter_stations(): - """ - Saves the stations defined in the metadata to Station table. - """ - saved = 0 + if has_list_elements_in_string( + LAM_STATION_LOCATIONS, feature["name"].as_string() + ): + stations.append(ObservationStation(LAM_COUNTER, feature)) + return stations + + +def get_traffic_counter_stations(): + stations = [] data_layer = get_traffic_counter_metadata_data_layer() for feature in data_layer: - name = feature["Osoite_fi"].as_string() - name_sv = feature["Osoite_sv"].as_string() - name_en = feature["Osoite_en"].as_string() - if Station.objects.filter(name=name).exists(): - continue - station = Station() - station.name = name - station.name_sv = name_sv - station.name_en = name_en - station.csv_data_source = TRAFFIC_COUNTER - geom = GEOSGeometry(feature.geom.wkt, srid=feature.geom.srid) - geom.transform(settings.DEFAULT_SRID) - station.geom = geom - station.save() - saved += 1 - logger.info(f"Saved {saved} Traffic Counter stations.") + stations.append(ObservationStation(TRAFFIC_COUNTER, feature)) + return stations -def save_eco_counter_stations(): +def get_eco_counter_stations(): + stations = [] response = requests.get(settings.ECO_COUNTER_STATIONS_URL) assert ( response.status_code == 200 @@ -331,21 +368,128 @@ def save_eco_counter_stations(): ) response_json = response.json() features = response_json["features"] - saved = 0 for feature in features: - station = Station() - name = feature["properties"]["Nimi"] - if not Station.objects.filter(name=name).exists(): - station.name = name - station.csv_data_source = ECO_COUNTER - lon = feature["geometry"]["coordinates"][0] - lat = feature["geometry"]["coordinates"][1] - point = Point(lon, lat, srid=4326) - point.transform(settings.DEFAULT_SRID) - station.geom = point - station.save() - saved += 1 - logger.info(f"Saved {saved} Eco Counter stations.") + stations.append(ObservationStation(ECO_COUNTER, feature)) + return stations + + +def fetch_telraam_camera(mac_id): + headers = { + "X-Api-Key": settings.TELRAAM_TOKEN, + } + url = TELRAAM_COUNTER_CAMERAS_URL.format(mac_id=mac_id) + response = TELRAAM_HTTP.get(url, headers=headers) + cameras = response.json().get("camera", None) + if cameras: + # Return first camera, as currently only one camera is + # returned in Turku by mac_id + return cameras[0] + else: + return None + + +def get_telraam_cameras(): + cameras = [] + for camera in TELRAAM_COUNTER_CAMERAS.items(): + fetched_camera = fetch_telraam_camera(camera[0]) + if fetched_camera: + cameras.append(fetched_camera) + else: + logger.warning(f"Could not fetch camera {camera[0]}") + return cameras + + +def get_telraam_counter_stations(): + stations = [] + cameras = get_telraam_cameras() + for feature in cameras: + stations.append(ObservationStation(TELRAAM_COUNTER, feature)) + return stations + + +def get_telraam_counter_csv(from_date): + df = pd.DataFrame() + try: + import_state = ImportState.objects.get(csv_data_source=TELRAAM_CSV) + except ImportState.DoesNotExist: + return None + end_date = date( + import_state.current_year_number, + import_state.current_month_number, + import_state.current_day_number, + ) + for camera in get_telraam_cameras(): + df_cam = pd.DataFrame() + start_date = from_date + + while start_date <= end_date: + csv_file = TELRAAM_COUNTER_CSV_FILE.format( + id=camera["mac"], + day=start_date.day, + month=start_date.month, + year=start_date.year, + ) + try: + df_tmp = pd.read_csv(csv_file, index_col=False) + except FileNotFoundError: + logger.warning( + f"File {csv_file} not found, skipping day{str(start_date)} for camera {camera}" + ) + else: + df_cam = pd.concat([df_cam, df_tmp]) + finally: + start_date += timedelta(days=1) + + if df.empty: + df = df_cam + else: + df = pd.merge(df, df_cam, on=INDEX_COLUMN_NAME) + + df[INDEX_COLUMN_NAME] = pd.to_datetime( + df[INDEX_COLUMN_NAME], format=TELRAAM_COUNTER_API_TIME_FORMAT + ) + return df + + +def save_stations(csv_data_source): + stations = [] + num_created = 0 + match csv_data_source: + case COUNTERS.TELRAAM_COUNTER: + stations = get_telraam_counter_stations() + case COUNTERS.LAM_COUNTER: + stations = get_lam_counter_stations() + case COUNTERS.ECO_COUNTER: + stations = get_eco_counter_stations() + case COUNTERS.TRAFFIC_COUNTER: + stations = get_traffic_counter_stations() + object_ids = list( + Station.objects.filter(csv_data_source=csv_data_source).values_list( + "id", flat=True + ) + ) + for station in stations: + obj, created = Station.objects.get_or_create( + name=station.name, + name_sv=station.name_sv, + name_en=station.name_en, + location=station.location, + geometry=station.geometry, + station_id=station.station_id, + csv_data_source=csv_data_source, + ) + if obj.id in object_ids: + object_ids.remove(obj.id) + if created: + num_created += 1 + Station.objects.filter(id__in=object_ids).delete() + logger.info( + f"Deleted {len(object_ids)} obsolete Stations for counter {csv_data_source}" + ) + num_stations = Station.objects.filter(csv_data_source=csv_data_source).count() + logger.info( + f"Created {num_created} Stations of total {num_stations} Stations for counter {csv_data_source}." + ) def get_test_dataframe(counter): @@ -358,20 +502,17 @@ def get_test_dataframe(counter): return pd.DataFrame(columns=TEST_COLUMN_NAMES[counter]) -def gen_eco_counter_test_csv(keys, start_time, end_time): +def gen_eco_counter_test_csv( + columns, start_time, end_time, time_stamp_column="startTime" +): """ Generates test data for a given timespan, for every row (15min) the value 1 is set. """ - df = pd.DataFrame(columns=keys) - df.keys = keys - cur_time = start_time - c = 0 - while cur_time <= end_time: - # Add value to all keys(sensor stations) - vals = [1 for x in range(len(keys) - 1)] - vals.insert(0, str(cur_time)) - df.loc[c] = vals - cur_time = cur_time + timedelta(minutes=15) - c += 1 + df = pd.DataFrame() + timestamps = pd.date_range(start=start_time, end=end_time, freq="15min") + for col in columns: + vals = [1 for i in range(len(timestamps))] + df.insert(0, col, vals) + df.insert(0, time_stamp_column, timestamps) return df diff --git a/eco_counter/migrations/0001_initial.py b/eco_counter/migrations/0001_initial.py index 03c92b877..2f05015ef 100644 --- a/eco_counter/migrations/0001_initial.py +++ b/eco_counter/migrations/0001_initial.py @@ -9,7 +9,6 @@ class Migration(migrations.Migration): - initial = True dependencies = [] diff --git a/eco_counter/migrations/0002_auto_20211013_1018.py b/eco_counter/migrations/0002_auto_20211013_1018.py index e8be909c9..3c1a2d709 100644 --- a/eco_counter/migrations/0002_auto_20211013_1018.py +++ b/eco_counter/migrations/0002_auto_20211013_1018.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("eco_counter", "0001_initial"), ] diff --git a/eco_counter/migrations/0003_alter_importstate_current_year_number_and_more.py b/eco_counter/migrations/0003_alter_importstate_current_year_number_and_more.py index 5b76531de..8e1c98c0d 100644 --- a/eco_counter/migrations/0003_alter_importstate_current_year_number_and_more.py +++ b/eco_counter/migrations/0003_alter_importstate_current_year_number_and_more.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("eco_counter", "0002_auto_20211013_1018"), ] diff --git a/eco_counter/migrations/0004_add_name_en_name_fi_station_name_sv_to_station.py b/eco_counter/migrations/0004_add_name_en_name_fi_station_name_sv_to_station.py index 89468829d..48b5e67cf 100644 --- a/eco_counter/migrations/0004_add_name_en_name_fi_station_name_sv_to_station.py +++ b/eco_counter/migrations/0004_add_name_en_name_fi_station_name_sv_to_station.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("eco_counter", "0003_alter_importstate_current_year_number_and_more"), ] diff --git a/eco_counter/migrations/0005_add_csv_data_source_to_importstate_and_station.py b/eco_counter/migrations/0005_add_csv_data_source_to_importstate_and_station.py index 34048ec00..223b5e567 100644 --- a/eco_counter/migrations/0005_add_csv_data_source_to_importstate_and_station.py +++ b/eco_counter/migrations/0005_add_csv_data_source_to_importstate_and_station.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("eco_counter", "0004_add_name_en_name_fi_station_name_sv_to_station"), ] diff --git a/eco_counter/migrations/0006_alter_importstate_csv_data_source_and_current_year_number.py b/eco_counter/migrations/0006_alter_importstate_csv_data_source_and_current_year_number.py index f4d38b4c8..2c409a5f5 100644 --- a/eco_counter/migrations/0006_alter_importstate_csv_data_source_and_current_year_number.py +++ b/eco_counter/migrations/0006_alter_importstate_csv_data_source_and_current_year_number.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("eco_counter", "0005_add_csv_data_source_to_importstate_and_station"), ] diff --git a/eco_counter/migrations/0007_add_fields_for_bus_data.py b/eco_counter/migrations/0007_add_fields_for_bus_data.py index 0d78f427a..3ea588818 100644 --- a/eco_counter/migrations/0007_add_fields_for_bus_data.py +++ b/eco_counter/migrations/0007_add_fields_for_bus_data.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ( "eco_counter", diff --git a/eco_counter/migrations/0008_remove_importstate_rows_imported.py b/eco_counter/migrations/0008_remove_importstate_rows_imported.py index bbdb102bf..fa429c761 100644 --- a/eco_counter/migrations/0008_remove_importstate_rows_imported.py +++ b/eco_counter/migrations/0008_remove_importstate_rows_imported.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("eco_counter", "0007_add_fields_for_bus_data"), ] diff --git a/eco_counter/migrations/0009_add_lam_counter_csv_data_source_to_station_and_importstate.py b/eco_counter/migrations/0009_add_lam_counter_csv_data_source_to_station_and_importstate.py index d8e4588ee..a326bf056 100644 --- a/eco_counter/migrations/0009_add_lam_counter_csv_data_source_to_station_and_importstate.py +++ b/eco_counter/migrations/0009_add_lam_counter_csv_data_source_to_station_and_importstate.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("eco_counter", "0008_remove_importstate_rows_imported"), ] diff --git a/eco_counter/migrations/0010_alter_max_length_of_station_name_to_64.py b/eco_counter/migrations/0010_alter_max_length_of_station_name_to_64.py index 8268724e0..a6828ee2e 100644 --- a/eco_counter/migrations/0010_alter_max_length_of_station_name_to_64.py +++ b/eco_counter/migrations/0010_alter_max_length_of_station_name_to_64.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ( "eco_counter", diff --git a/eco_counter/migrations/0011_add_lam_id_to_station.py b/eco_counter/migrations/0011_add_lam_id_to_station.py index bb89e01ce..93366d3d1 100644 --- a/eco_counter/migrations/0011_add_lam_id_to_station.py +++ b/eco_counter/migrations/0011_add_lam_id_to_station.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("eco_counter", "0010_alter_max_length_of_station_name_to_64"), ] diff --git a/eco_counter/migrations/0012_set_static_default_year_year_number.py b/eco_counter/migrations/0012_set_static_default_year_year_number.py index 75e1b30c4..59d1676e9 100644 --- a/eco_counter/migrations/0012_set_static_default_year_year_number.py +++ b/eco_counter/migrations/0012_set_static_default_year_year_number.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("eco_counter", "0011_add_lam_id_to_station"), ] diff --git a/eco_counter/migrations/0013_alter_importstate_current_month_and_year_to_nullables.py b/eco_counter/migrations/0013_alter_importstate_current_month_and_year_to_nullables.py new file mode 100644 index 000000000..763f680c0 --- /dev/null +++ b/eco_counter/migrations/0013_alter_importstate_current_month_and_year_to_nullables.py @@ -0,0 +1,29 @@ +# Generated by Django 4.1.2 on 2023-02-15 14:12 + +import django.core.validators +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("eco_counter", "0012_set_static_default_year_year_number"), + ] + + operations = [ + migrations.AlterField( + model_name="importstate", + name="current_month_number", + field=models.PositiveSmallIntegerField( + null=True, + validators=[ + django.core.validators.MinValueValidator(1), + django.core.validators.MaxValueValidator(12), + ], + ), + ), + migrations.AlterField( + model_name="importstate", + name="current_year_number", + field=models.PositiveSmallIntegerField(null=True), + ), + ] diff --git a/eco_counter/migrations/0014_remove_station_lam_id_station_station_id.py b/eco_counter/migrations/0014_remove_station_lam_id_station_station_id.py new file mode 100644 index 000000000..ccd432beb --- /dev/null +++ b/eco_counter/migrations/0014_remove_station_lam_id_station_station_id.py @@ -0,0 +1,21 @@ +# Generated by Django 4.2 on 2023-05-15 12:50 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("eco_counter", "0013_alter_importstate_current_month_and_year_to_nullables"), + ] + + operations = [ + migrations.RemoveField( + model_name="station", + name="lam_id", + ), + migrations.AddField( + model_name="station", + name="station_id", + field=models.CharField(max_length=16, null=True), + ), + ] diff --git a/eco_counter/migrations/0015_add_telraam_counter_and_traffic_csv_choice_to_importstate.py b/eco_counter/migrations/0015_add_telraam_counter_and_traffic_csv_choice_to_importstate.py new file mode 100644 index 000000000..4e5200a15 --- /dev/null +++ b/eco_counter/migrations/0015_add_telraam_counter_and_traffic_csv_choice_to_importstate.py @@ -0,0 +1,42 @@ +# Generated by Django 4.2 on 2023-05-22 09:35 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ("eco_counter", "0014_remove_station_lam_id_station_station_id"), + ] + + operations = [ + migrations.AlterField( + model_name="importstate", + name="csv_data_source", + field=models.CharField( + choices=[ + ("TC", "TrafficCounter"), + ("EC", "EcoCounter"), + ("LC", "LamCounter"), + ("TR", "TelraamCounter"), + ("TV", "TelraamCSV"), + ], + default="EC", + max_length=2, + ), + ), + migrations.AlterField( + model_name="station", + name="csv_data_source", + field=models.CharField( + choices=[ + ("TC", "TrafficCounter"), + ("EC", "EcoCounter"), + ("LC", "LamCounter"), + ("TR", "TelraamCounter"), + ("TV", "TelraamCSV"), + ], + default="EC", + max_length=2, + ), + ), + ] diff --git a/eco_counter/migrations/0016_add_importstate_current_day_number.py b/eco_counter/migrations/0016_add_importstate_current_day_number.py new file mode 100644 index 000000000..a6e6256bf --- /dev/null +++ b/eco_counter/migrations/0016_add_importstate_current_day_number.py @@ -0,0 +1,28 @@ +# Generated by Django 4.2 on 2023-05-22 09:39 + +import django.core.validators +from django.db import migrations, models + + +class Migration(migrations.Migration): + dependencies = [ + ( + "eco_counter", + "0015_add_telraam_counter_and_traffic_csv_choice_to_importstate", + ), + ] + + operations = [ + migrations.AddField( + model_name="importstate", + name="current_day_number", + field=models.PositiveSmallIntegerField( + blank=True, + null=True, + validators=[ + django.core.validators.MinValueValidator(1), + django.core.validators.MaxValueValidator(31), + ], + ), + ), + ] diff --git a/eco_counter/migrations/0017_rename_geom_station_location.py b/eco_counter/migrations/0017_rename_geom_station_location.py new file mode 100644 index 000000000..54cc3f846 --- /dev/null +++ b/eco_counter/migrations/0017_rename_geom_station_location.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2 on 2023-05-24 06:07 + +from django.db import migrations + + +class Migration(migrations.Migration): + dependencies = [ + ("eco_counter", "0016_add_importstate_current_day_number"), + ] + + operations = [ + migrations.RenameField( + model_name="station", + old_name="geom", + new_name="location", + ), + ] diff --git a/eco_counter/migrations/0018_add_geometry_to_station.py b/eco_counter/migrations/0018_add_geometry_to_station.py new file mode 100644 index 000000000..ad08d6fd6 --- /dev/null +++ b/eco_counter/migrations/0018_add_geometry_to_station.py @@ -0,0 +1,20 @@ +# Generated by Django 4.2 on 2023-05-24 06:55 + +import django.contrib.gis.db.models.fields +from django.db import migrations + + +class Migration(migrations.Migration): + dependencies = [ + ("eco_counter", "0017_rename_geom_station_location"), + ] + + operations = [ + migrations.AddField( + model_name="station", + name="geometry", + field=django.contrib.gis.db.models.fields.GeometryField( + null=True, srid=3067 + ), + ), + ] diff --git a/eco_counter/models.py b/eco_counter/models.py index 0818e76fe..a58565184 100644 --- a/eco_counter/models.py +++ b/eco_counter/models.py @@ -4,36 +4,17 @@ from django.core.validators import MaxValueValidator, MinValueValidator from django.utils.timezone import now -TRAFFIC_COUNTER_START_YEAR = 2015 -# Manually define the end year, as the source data comes from the page -# defined in env variable TRAFFIC_COUNTER_OBSERVATIONS_BASE_URL. -# Change end year when data for the next year is available. -TRAFFIC_COUNTER_END_YEAR = 2022 -ECO_COUNTER_START_YEAR = 2020 -LAM_COUNTER_START_YEAR = 2010 - - -TRAFFIC_COUNTER = "TC" -ECO_COUNTER = "EC" -LAM_COUNTER = "LC" -CSV_DATA_SOURCES = ( - (TRAFFIC_COUNTER, "TrafficCounter"), - (ECO_COUNTER, "EcoCounter"), - (LAM_COUNTER, "LamCounter"), -) -COUNTER_START_YEARS = { - ECO_COUNTER: ECO_COUNTER_START_YEAR, - TRAFFIC_COUNTER: TRAFFIC_COUNTER_START_YEAR, - LAM_COUNTER: LAM_COUNTER_START_YEAR, -} +from eco_counter.constants import CSV_DATA_SOURCES, ECO_COUNTER class ImportState(models.Model): - current_year_number = models.PositiveSmallIntegerField( - default=ECO_COUNTER_START_YEAR - ) + current_year_number = models.PositiveSmallIntegerField(null=True) current_month_number = models.PositiveSmallIntegerField( - validators=[MinValueValidator(1), MaxValueValidator(12)], default=1 + validators=[MinValueValidator(1), MaxValueValidator(12)], + null=True, + ) + current_day_number = models.PositiveSmallIntegerField( + null=True, blank=True, validators=[MinValueValidator(1), MaxValueValidator(31)] ) csv_data_source = models.CharField( max_length=2, @@ -43,20 +24,20 @@ class ImportState(models.Model): class Station(models.Model): - name = models.CharField(max_length=64) - geom = models.PointField(srid=settings.DEFAULT_SRID) + location = models.PointField(srid=settings.DEFAULT_SRID) + geometry = models.GeometryField(srid=settings.DEFAULT_SRID, null=True) csv_data_source = models.CharField( max_length=2, choices=CSV_DATA_SOURCES, default=ECO_COUNTER, ) - # For lam stations store the LAM station ID, this is - # required when fetching data from the API using the ID. - lam_id = models.PositiveSmallIntegerField(null=True) + # Optioal id of the station, used when fetching LAM + # and TELRAAM station data + station_id = models.CharField(max_length=16, null=True) def __str__(self): - return "%s %s" % (self.name, self.geom) + return "%s %s" % (self.name, self.location) class Meta: ordering = ["id"] @@ -88,7 +69,7 @@ class Year(models.Model): @property def num_days(self): - return self.days.all().count() + return self.days.count() def __str__(self): return "%s" % (self.year_number) @@ -108,7 +89,7 @@ class Month(models.Model): @property def num_days(self): - return self.days.all().count() + return self.days.count() def __str__(self): return "%s" % (self.month_number) @@ -128,7 +109,7 @@ class Week(models.Model): @property def num_days(self): - return self.days.all().count() + return self.days.count() def __str__(self): return "%s" % (self.week_number) diff --git a/eco_counter/specification.swagger2.0.yaml b/eco_counter/specification.swagger2.0.yaml index 767f05d8a..9505177f7 100755 --- a/eco_counter/specification.swagger2.0.yaml +++ b/eco_counter/specification.swagger2.0.yaml @@ -29,8 +29,11 @@ definitions: type: integer name: type: string - geom: + location: type: string + geometry: + type: string + description: Additional geometry, e.g., Telraam counters have a MultiLineString for its segment. lat: type: number lon: @@ -245,7 +248,7 @@ paths: summary: "Returns a list of stations." parameters: - in: query - description: "The type of the counter EC(Eco Counter), TC(Traffic Counter), LC(LAM Counter)" + description: "The type of the counter EC(Eco Counter), TC(Traffic Counter), LC(LAM Counter), TR(Telraam Counter)" name: counter_type type: string responses: diff --git a/eco_counter/tasks.py b/eco_counter/tasks.py index 9b7fdb714..9ddefd6f4 100644 --- a/eco_counter/tasks.py +++ b/eco_counter/tasks.py @@ -11,3 +11,16 @@ def import_counter_data(args, name="import_counter_data"): @shared_task_email def initial_import_counter_data(args, name="initial_import_counter_data"): management.call_command("import_counter_data", "--init", args) + + +@shared_task_email +def delete_all_counter_data(name="delete_all_counter_data"): + management.call_command("delete_all_counter_data") + + +@shared_task_email +def import_telraam_to_csv(*args, name="import_telraam_to_csv"): + if args: + management.call_command("import_telraam_to_csv", args) + else: + management.call_command("import_telraam_to_csv") diff --git a/eco_counter/tests/__init__.py b/eco_counter/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/eco_counter/tests/conftest.py b/eco_counter/tests/conftest.py index bb731ed14..e5af61b79 100644 --- a/eco_counter/tests/conftest.py +++ b/eco_counter/tests/conftest.py @@ -5,6 +5,7 @@ from dateutil.relativedelta import relativedelta from rest_framework.test import APIClient +from eco_counter.constants import ECO_COUNTER, LAM_COUNTER, TRAFFIC_COUNTER from eco_counter.models import ( Day, DayData, @@ -18,9 +19,9 @@ YearData, ) -TEST_TIMESTAMP = dateutil.parser.parse("2020-01-01 00:00:00") +from .constants import TEST_EC_STATION_NAME, TEST_LC_STATION_NAME, TEST_TC_STATION_NAME -TEST_STATION_NAME = "Auransilta" +TEST_TIMESTAMP = dateutil.parser.parse("2020-01-01 00:00:00") @pytest.fixture @@ -28,12 +29,6 @@ def test_timestamp(): return TEST_TIMESTAMP.date() -@pytest.mark.django_db -@pytest.fixture -def station_id(): - return Station.objects.get(name=TEST_STATION_NAME).id - - @pytest.fixture def api_client(): return APIClient() @@ -41,32 +36,60 @@ def api_client(): @pytest.mark.django_db @pytest.fixture -def station(): - station = Station.objects.create( - name=TEST_STATION_NAME, geom="POINT(60.4487578455581 22.269454227550053)" +def stations(): + stations = [] + stations.append( + Station.objects.create( + name=TEST_EC_STATION_NAME, + location="POINT(60.4487578455581 22.269454227550053)", + csv_data_source=ECO_COUNTER, + ) + ) + stations.append( + Station.objects.create( + name=TEST_TC_STATION_NAME, + location="POINT(60.4487578455581 22.269454227550053)", + csv_data_source=TRAFFIC_COUNTER, + ) ) - return station + stations.append( + Station.objects.create( + name=TEST_LC_STATION_NAME, + location="POINT(60.4487578455581 22.269454227550053)", + csv_data_source=LAM_COUNTER, + ) + ) + + return stations @pytest.mark.django_db @pytest.fixture -def years(station): +def station_id(): + return Station.objects.get(name=TEST_EC_STATION_NAME).id + + +@pytest.mark.django_db +@pytest.fixture +def years(stations): years = [] for i in range(2): - year = Year.objects.create(station=station, year_number=TEST_TIMESTAMP.year + i) + year = Year.objects.create( + station=stations[0], year_number=TEST_TIMESTAMP.year + i + ) years.append(year) return years @pytest.mark.django_db @pytest.fixture -def months(station, years): +def months(stations, years): months = [] for i in range(4): timestamp = TEST_TIMESTAMP + relativedelta(months=i) month_number = int(timestamp.month) month = Month.objects.create( - station=station, month_number=month_number, year=years[0] + station=stations[0], month_number=month_number, year=years[0] ) months.append(month) return months @@ -74,12 +97,12 @@ def months(station, years): @pytest.mark.django_db @pytest.fixture -def weeks(station, years): +def weeks(stations, years): weeks = [] for i in range(4): timestamp = TEST_TIMESTAMP + timedelta(weeks=i) week_number = int(timestamp.strftime("%-V")) - week = Week.objects.create(station=station, week_number=week_number) + week = Week.objects.create(station=stations[0], week_number=week_number) week.years.add(years[0]) weeks.append(week) return weeks @@ -87,12 +110,12 @@ def weeks(station, years): @pytest.mark.django_db @pytest.fixture -def days(station, years, months, weeks): +def days(stations, years, months, weeks): days = [] for i in range(7): timestamp = TEST_TIMESTAMP + timedelta(days=i) day = Day.objects.create( - station=station, + station=stations[0], date=timestamp, weekday_number=timestamp.weekday(), week=weeks[0], @@ -105,9 +128,9 @@ def days(station, years, months, weeks): @pytest.mark.django_db @pytest.fixture -def hour_data(station, days): +def hour_data(stations, days): hour_data = HourData.objects.create( - station=station, + station=stations[0], day=days[0], ) hour_data.values_ak = [ @@ -168,10 +191,10 @@ def hour_data(station, days): @pytest.mark.django_db @pytest.fixture -def day_datas(station, days): +def day_datas(stations, days): day_datas = [] for i in range(7): - day_data = DayData.objects.create(station=station, day=days[i]) + day_data = DayData.objects.create(station=stations[0], day=days[i]) day_data.value_ak = 5 + i day_data.value_ap = 6 + i day_data.save() @@ -181,10 +204,10 @@ def day_datas(station, days): @pytest.mark.django_db @pytest.fixture -def week_datas(station, weeks): +def week_datas(stations, weeks): week_datas = [] for i in range(4): - week_data = WeekData.objects.create(station=station, week=weeks[i]) + week_data = WeekData.objects.create(station=stations[0], week=weeks[i]) week_data.value_ak = 10 + i week_data.value_ap = 20 + i week_data.save() @@ -194,10 +217,10 @@ def week_datas(station, weeks): @pytest.mark.django_db @pytest.fixture -def month_datas(station, months): +def month_datas(stations, months): month_datas = [] for i in range(4): - month_data = MonthData.objects.create(station=station, month=months[i]) + month_data = MonthData.objects.create(station=stations[0], month=months[i]) month_data.value_ak = 10 + i month_data.value_ap = 20 + i month_data.save() @@ -207,10 +230,10 @@ def month_datas(station, months): @pytest.mark.django_db @pytest.fixture -def year_datas(station, years): +def year_datas(stations, years): year_datas = [] for i in range(2): - year_data = YearData.objects.create(station=station, year=years[i]) + year_data = YearData.objects.create(station=stations[0], year=years[i]) year_data.value_ak = 42 + i year_data.value_ap = 43 + i year_data.value_at = year_data.value_ak + year_data.value_ap diff --git a/eco_counter/tests/constants.py b/eco_counter/tests/constants.py new file mode 100644 index 000000000..2935a3cc3 --- /dev/null +++ b/eco_counter/tests/constants.py @@ -0,0 +1,39 @@ +TEST_EC_STATION_NAME = "Auransilta" +TEST_TC_STATION_NAME = "Myllysilta" +TEST_LC_STATION_NAME = "Tie 8 Raisio" + +ECO_COUNTER_TEST_COLUMN_NAMES = [ + "Auransilta AK", + "Auransilta AP", + "Auransilta JK", + "Auransilta JP", + "Auransilta PK", + "Auransilta PP", + "Auransilta BK", + "Auransilta BP", +] + +TRAFFIC_COUNTER_TEST_COLUMN_NAMES = [ + "Myllysilta AK", + "Myllysilta AP", + "Myllysilta PK", + "Myllysilta PP", + "Myllysilta JK", + "Myllysilta JP", + "Myllysilta BK", + "Myllysilta BP", + "Kalevantie 65 BK", + "Kalevantie 65 BP", + "Hämeentie 18 PK", +] + +LAM_COUNTER_TEST_COLUMN_NAMES = [ + "Tie 8 Raisio AP", + "Tie 8 Raisio AK", + "Tie 8 Raisio PP", + "Tie 8 Raisio PK", + "Tie 8 Raisio JP", + "Tie 8 Raisio JK", + "Tie 8 Raisio BP", + "Tie 8 Raisio BK", +] diff --git a/eco_counter/tests/test_api.py b/eco_counter/tests/test_api.py index 4c10654fb..f10ee7f9c 100644 --- a/eco_counter/tests/test_api.py +++ b/eco_counter/tests/test_api.py @@ -3,6 +3,8 @@ import pytest from rest_framework.reverse import reverse +from .constants import TEST_EC_STATION_NAME + @pytest.mark.django_db def test__hour_data(api_client, hour_data): @@ -263,9 +265,9 @@ def test__months_multiple_years(api_client, years, test_timestamp): @pytest.mark.django_db -def test__station(api_client, station, year_datas): +def test__station(api_client, stations, year_datas): url = reverse("eco_counter:stations-list") response = api_client.get(url) assert response.status_code == 200 - assert response.json()["results"][0]["name"] == station.name + assert response.json()["results"][0]["name"] == TEST_EC_STATION_NAME assert response.json()["results"][0]["sensor_types"] == ["at"] diff --git a/eco_counter/tests/test_import_counter_data.py b/eco_counter/tests/test_import_counter_data.py index c08374409..0a1856e80 100644 --- a/eco_counter/tests/test_import_counter_data.py +++ b/eco_counter/tests/test_import_counter_data.py @@ -13,62 +13,29 @@ import pytest from django.core.management import call_command +from eco_counter.constants import ECO_COUNTER, LAM_COUNTER, TRAFFIC_COUNTER from eco_counter.models import ( Day, DayData, - ECO_COUNTER, HourData, ImportState, - LAM_COUNTER, Month, MonthData, Station, - TRAFFIC_COUNTER, Week, WeekData, Year, YearData, ) -TEST_EC_STATION_NAME = "Auransilta" -TEST_TC_STATION_NAME = "Myllysilta" -TEST_LC_STATION_NAME = "Tie 8 Raisio" -ECO_COUNTER_TEST_COLUMN_NAMES = [ - "startTime", - "Auransilta AK", - "Auransilta AP", - "Auransilta JK", - "Auransilta JP", - "Auransilta PK", - "Auransilta PP", -] - -TRAFFIC_COUNTER_TEST_COLUMN_NAMES = [ - "startTime", - "Myllysilta AK", - "Myllysilta AP", - "Myllysilta PK", - "Myllysilta PP", - "Myllysilta JK", - "Myllysilta JP", - "Myllysilta BK", - "Myllysilta BP", - "Kalevantie 65 BK", - "Kalevantie 65 BP", - "Hämeentie 18 PK", -] - -LAM_COUNTER_TEST_COLUMN_NAMES = [ - "startTime", - "Tie 8 Raisio AP", - "Tie 8 Raisio AK", - "Tie 8 Raisio PP", - "Tie 8 Raisio PK", - "Tie 8 Raisio JP", - "Tie 8 Raisio JK", - "Tie 8 Raisio BP", - "Tie 8 Raisio BK", -] +from .constants import ( + ECO_COUNTER_TEST_COLUMN_NAMES, + LAM_COUNTER_TEST_COLUMN_NAMES, + TEST_EC_STATION_NAME, + TEST_LC_STATION_NAME, + TEST_TC_STATION_NAME, + TRAFFIC_COUNTER_TEST_COLUMN_NAMES, +) TEST_COLUMN_NAMES = { ECO_COUNTER: ECO_COUNTER_TEST_COLUMN_NAMES, @@ -91,14 +58,14 @@ def import_command(*args, **kwargs): @pytest.mark.test_import_counter_data @pytest.mark.django_db -def test_import_counter_data(): +def test_import_eco_counter_data(stations): """ In test data, for every 15min the value 1 is set, so the sum for an hour is 4. For a day the sum is 96(24*4) and for a week 682(96*7). The month sum depends on how many days the month has,~3000 - 1.1.2020 is used as the starting point thus it is the same - starting point as in the real data. + 1.1.2020 is used as the starting point. """ + start_time = dateutil.parser.parse("2020-01-01T00:00") end_time = dateutil.parser.parse("2020-02-29T23:45") import_command(test_counter=(ECO_COUNTER, start_time, end_time)) @@ -137,7 +104,6 @@ def test_import_counter_data(): assert day.weekday_number == 0 # First day in week 2 in 2020 is monday # Test week data - week_data = WeekData.objects.filter( week__week_number=1, station__name=TEST_EC_STATION_NAME )[0] @@ -159,7 +125,7 @@ def test_import_counter_data(): month = Month.objects.get( month_number=1, year__year_number=2020, station__name=TEST_EC_STATION_NAME ) - num_month_days = month.days.all().count() + num_month_days = month.days.count() jan_month_days = calendar.monthrange(month.year.year_number, month.month_number)[1] assert num_month_days == jan_month_days month_data = MonthData.objects.get(month=month) @@ -169,7 +135,7 @@ def test_import_counter_data(): month = Month.objects.get( month_number=2, year__year_number=2020, station__name=TEST_EC_STATION_NAME ) - num_month_days = month.days.all().count() + num_month_days = month.days.count() feb_month_days = calendar.monthrange(month.year.year_number, month.month_number)[1] assert num_month_days == feb_month_days month_data = MonthData.objects.get(month=month) @@ -191,7 +157,7 @@ def test_import_counter_data(): assert state.current_month_number == 2 assert state.current_year_number == 2020 week = Week.objects.filter(week_number=5)[0] - assert week.days.all().count() == num_ec_stations + assert week.days.count() == 7 # test incremental importing start_time = dateutil.parser.parse("2020-02-01T00:00") end_time = dateutil.parser.parse("2020-03-31T23:45") @@ -202,9 +168,9 @@ def test_import_counter_data(): assert state.current_year_number == 2020 # test that number of days in weeks remains intact week = Week.objects.filter(week_number=5)[0] - assert week.days.all().count() == 7 + assert week.days.count() == 7 week = Week.objects.filter(week_number=6)[0] - assert week.days.all().count() == 7 + assert week.days.count() == 7 # Test that we do not get multiple weeks assert Week.objects.filter(week_number=6).count() == num_ec_stations assert WeekData.objects.filter(week__week_number=6).count() == num_ec_stations @@ -217,7 +183,7 @@ def test_import_counter_data(): week__week_number=8, station__name=TEST_EC_STATION_NAME ) week = Week.objects.get(week_number=8, station__name=TEST_EC_STATION_NAME) - assert week.days.all().count() == 7 + assert week.days.count() == 7 assert week_data.value_jp == 672 # Test starting month assert num_month_days == feb_month_days @@ -227,7 +193,7 @@ def test_import_counter_data(): month = Month.objects.get( month_number=3, year__year_number=2020, station__name=TEST_EC_STATION_NAME ) - num_month_days = month.days.all().count() + num_month_days = month.days.count() mar_month_days = calendar.monthrange(month.year.year_number, month.month_number)[1] assert num_month_days == mar_month_days month_data = MonthData.objects.get(month=month) @@ -246,9 +212,6 @@ def test_import_counter_data(): assert year_data.value_pp == ( jan_month_days * 96 + feb_month_days * 96 + mar_month_days * 96 ) - # Test the day has 24hours stored even though in reality it has 23hours. - assert len(HourData.objects.get(day_id=day.id).values_ak) == 24 - # Test new year and daylight saving change to "winter time". start_time = dateutil.parser.parse("2021-10-01T00:00") end_time = dateutil.parser.parse("2021-10-31T23:45") @@ -278,7 +241,7 @@ def test_import_counter_data(): month = Month.objects.get( month_number=10, year__year_number=2021, station__name=TEST_EC_STATION_NAME ) - num_month_days = month.days.all().count() + num_month_days = month.days.count() oct_month_days = calendar.monthrange(month.year.year_number, month.month_number)[1] assert num_month_days == oct_month_days month_data = MonthData.objects.get(month=month) @@ -310,18 +273,28 @@ def test_import_counter_data(): state.current_year_number = 2020 state.save() start_time = dateutil.parser.parse("2020-12-26T00:00") - end_time = dateutil.parser.parse("2021-01-11T23:45") + end_time = dateutil.parser.parse("2021-01-17T23:45") import_command(test_counter=(ECO_COUNTER, start_time, end_time)) - weeks = Week.objects.filter(week_number=53, years__year_number=2020) assert len(weeks) == num_ec_stations - assert weeks[0].days.all().count() == 7 + # 4 days in 2020 + assert weeks[0].days.count() == 4 weeks = Week.objects.filter(week_number=53, years__year_number=2021) assert len(weeks) == num_ec_stations - assert weeks[0].days.all().count() == 7 + assert weeks[0].days.count() == 3 weeks = Week.objects.filter(week_number=1, years__year_number=2021) - assert len(weeks), num_ec_stations - assert weeks[0].days.all().count() == 7 + assert len(weeks) == num_ec_stations + assert weeks[0].days.count() == 7 + weeks = Week.objects.filter(week_number=2, years__year_number=2021) + assert len(weeks) == num_ec_stations + assert weeks[0].days.count() == 7 + # Test that exacly one year object is created for every station in 2020 + assert Year.objects.filter(year_number=2020).count() == num_ec_stations + + +@pytest.mark.test_import_counter_data +@pytest.mark.django_db +def test_import_traffic_counter_data(stations): # Test importing of Traffic Counter start_time = dateutil.parser.parse("2020-01-01T00:00") end_time = dateutil.parser.parse("2020-02-29T23:45") @@ -330,8 +303,6 @@ def test_import_counter_data(): state = ImportState.objects.get(csv_data_source=TRAFFIC_COUNTER) assert state.current_year_number == 2020 assert state.current_month_number == 2 - test_station = Station.objects.get(name=TEST_TC_STATION_NAME) - assert test_station hour_data = HourData.objects.get( station__name=TEST_TC_STATION_NAME, day__date=start_time ) @@ -375,23 +346,37 @@ def test_import_counter_data(): assert week_data.value_bk == 672 # 96*7 assert week_data.value_bt == 672 * 2 # Test traffic counter month data - month = Month.objects.get( + feb_month = Month.objects.get( station__name=TEST_TC_STATION_NAME, month_number=2, year__year_number=2020 ) - num_month_days = month.days.all().count() - feb_month_days = calendar.monthrange(month.year.year_number, month.month_number)[1] + num_month_days = feb_month.days.count() + feb_month_days = calendar.monthrange( + feb_month.year.year_number, feb_month.month_number + )[1] assert num_month_days == feb_month_days - month_data = MonthData.objects.get(month=month) + month_data = MonthData.objects.get(month=feb_month) assert month_data.value_pp == feb_month_days * 96 assert month_data.value_pk == feb_month_days * 96 assert month_data.value_pt == feb_month_days * 96 * 2 # Test traffic counter year data + jan_month = Month.objects.get( + station__name=TEST_TC_STATION_NAME, month_number=1, year__year_number=2020 + ) + jan_month_days = calendar.monthrange( + jan_month.year.year_number, jan_month.month_number + )[1] year_data = YearData.objects.get( station__name=TEST_TC_STATION_NAME, year__year_number=2020 ) assert year_data.value_bk == (jan_month_days + feb_month_days) * 24 * 4 assert year_data.value_bp == (jan_month_days + feb_month_days) * 24 * 4 assert year_data.value_bt == (jan_month_days + feb_month_days) * 24 * 4 * 2 + assert Year.objects.filter(year_number=2020).count() == num_tc_stations + + +@pytest.mark.test_import_counter_data +@pytest.mark.django_db +def test_import_lam_counter_data(stations): # Test lam counter data and year change start_time = dateutil.parser.parse("2019-12-01T00:00") end_time = dateutil.parser.parse("2020-01-31T23:45") @@ -419,8 +404,8 @@ def test_import_counter_data(): assert hour_data.values_bk == res assert hour_data.values_bp == res assert hour_data.values_bt == res_tot - # 2019 December 2019 has 5 weeks and January 2020 has 5 week = 10 weeks - assert Week.objects.filter(station__name=TEST_LC_STATION_NAME).count() == 10 + # 2019 December 2019 has 6 weeks(48,49,50,51,52 and 1) and January 2020 has 5 week = 11 weeks + assert Week.objects.filter(station__name=TEST_LC_STATION_NAME).count() == 11 # 5 days of week 5 in 2020 is imported, e.g. 4*24*5 = 480 assert ( WeekData.objects.filter(station__name=TEST_LC_STATION_NAME, week__week_number=5) @@ -441,8 +426,8 @@ def test_import_counter_data(): dec_month_days = calendar.monthrange( dec_month.year.year_number, dec_month.month_number )[1] - assert dec_month_days == dec_month.days.all().count() - assert jan_month_days == jan_month.days.all().count() + assert dec_month_days == dec_month.days.count() + assert jan_month_days == jan_month.days.count() month_data = MonthData.objects.get(month=dec_month) assert month_data.value_pp == dec_month_days * 96 assert month_data.value_pk == dec_month_days * 96 @@ -478,9 +463,4 @@ def test_import_counter_data(): ).count() == 1 ) - - # Test that exacly one year object is created for every station in 2020 - assert ( - Year.objects.filter(year_number=2020).count() - == num_ec_stations + num_tc_stations + num_lc_stations - ) + assert Year.objects.filter(year_number=2020).count() == num_lc_stations diff --git a/mobility_data/api/serializers/mobile_unit.py b/mobility_data/api/serializers/mobile_unit.py index b73a72b25..8986bc240 100644 --- a/mobility_data/api/serializers/mobile_unit.py +++ b/mobility_data/api/serializers/mobile_unit.py @@ -17,7 +17,6 @@ class GeometrySerializer(serializers.Serializer): - x = serializers.FloatField() y = serializers.FloatField() @@ -32,7 +31,6 @@ class Meta: class MobileUnitGroupBasicInfoSerializer(serializers.ModelSerializer): - group_type = GrouptTypeBasicInfoSerializer(many=False, read_only=True) class Meta: @@ -41,7 +39,6 @@ class Meta: class MobileUnitSerializer(serializers.ModelSerializer): - content_types = ContentTypeSerializer(many=True, read_only=True) mobile_unit_group = MobileUnitGroupBasicInfoSerializer(many=False, read_only=True) geometry_coords = serializers.SerializerMethodField(read_only=True) diff --git a/mobility_data/api/serializers/mobile_unit_group.py b/mobility_data/api/serializers/mobile_unit_group.py index dc0f5d653..08bca9470 100644 --- a/mobility_data/api/serializers/mobile_unit_group.py +++ b/mobility_data/api/serializers/mobile_unit_group.py @@ -18,7 +18,6 @@ class MobileUnitGroupSerializer(serializers.ModelSerializer): - group_type = GroupTypeSerializer(many=False, read_only=True) class Meta: @@ -27,7 +26,6 @@ class Meta: class MobileUnitGroupUnitsSerializer(serializers.ModelSerializer): - group_type = GroupTypeSerializer(many=False, read_only=True) mobile_units = serializers.SerializerMethodField() diff --git a/mobility_data/api/views.py b/mobility_data/api/views.py index 4a83f79fc..4e1ff1285 100644 --- a/mobility_data/api/views.py +++ b/mobility_data/api/views.py @@ -121,7 +121,6 @@ def list(self, request): class MobileUnitViewSet(viewsets.ReadOnlyModelViewSet): - queryset = MobileUnit.objects.filter(is_active=True) serializer_class = MobileUnitSerializer diff --git a/mobility_data/importers/bicycle_stands.py b/mobility_data/importers/bicycle_stands.py index 5b82c0949..53ea9bc68 100644 --- a/mobility_data/importers/bicycle_stands.py +++ b/mobility_data/importers/bicycle_stands.py @@ -50,7 +50,6 @@ class BicyleStand(MobileUnitDataBase): - WFS_HULL_LOCKABLE_STR = "runkolukitusmahdollisuus" GEOJSON_HULL_LOCKABLE_STR = "runkolukittava" COVERED_IN_STR = "katettu" diff --git a/mobility_data/importers/culture_routes.py b/mobility_data/importers/culture_routes.py index 36c156028..2b8eee4d9 100644 --- a/mobility_data/importers/culture_routes.py +++ b/mobility_data/importers/culture_routes.py @@ -26,6 +26,11 @@ SOURCE_DATA_SRID = 4326 # Routes are from https://citynomadi.com/route/?keywords=turku URLS = { + "Sotiemme Turku": { + "fi": "https://citynomadi.com/api/route/fb656ce4fc31868f4b90168ecc3fabdb/kml?lang=fi", + "sv": "https://citynomadi.com/api/route/fb656ce4fc31868f4b90168ecc3fabdb/kml?lang=sv", + "en": "https://citynomadi.com/api/route/fb656ce4fc31868f4b90168ecc3fabdb/kml?lang=fi", + }, "Stepping it up": { "fi": "https://citynomadi.com/api/route/9edfeee48c655d64abfef65fc5081e26/kml?lang=fi", "sv": "https://citynomadi.com/api/route/9edfeee48c655d64abfef65fc5081e26/kml?lang=sv_SE", diff --git a/mobility_data/importers/loading_unloading_places.py b/mobility_data/importers/loading_unloading_places.py index 9932d9eae..6a5e03f00 100644 --- a/mobility_data/importers/loading_unloading_places.py +++ b/mobility_data/importers/loading_unloading_places.py @@ -22,7 +22,6 @@ class LoadingPlace(MobileUnitDataBase): - extra_field_mappings = { "Saavutettavuus": { "type": FieldTypes.MULTILANG_STRING, diff --git a/mobility_data/importers/parking_machines.py b/mobility_data/importers/parking_machines.py index 3c5529bb6..9004d0cd9 100644 --- a/mobility_data/importers/parking_machines.py +++ b/mobility_data/importers/parking_machines.py @@ -15,7 +15,6 @@ class ParkingMachine(MobileUnitDataBase): - extra_field_mappings = { "Sijainti": { "type": FieldTypes.MULTILANG_STRING, diff --git a/mobility_data/importers/utils.py b/mobility_data/importers/utils.py index f0ad070eb..2a0a88ab5 100644 --- a/mobility_data/importers/utils.py +++ b/mobility_data/importers/utils.py @@ -321,7 +321,7 @@ def log_imported_message(logger, content_type, num_created, num_deleted): @db.transaction.atomic -def save_to_database(objects, content_types, logger=logger): +def save_to_database(objects, content_types, logger=logger, group_type=None): if type(content_types) != list: content_types = [content_types] diff --git a/mobility_data/management/commands/import_culture_routes.py b/mobility_data/management/commands/import_culture_routes.py index 241143c96..b59bab509 100644 --- a/mobility_data/management/commands/import_culture_routes.py +++ b/mobility_data/management/commands/import_culture_routes.py @@ -2,7 +2,12 @@ from django.core.management import BaseCommand -from mobility_data.importers.culture_routes import get_routes, save_to_database +from mobility_data.importers.culture_routes import ( + get_routes, + GROUP_CONTENT_TYPE_NAME, + save_to_database, +) +from mobility_data.models import MobileUnitGroup logger = logging.getLogger("mobility_data") @@ -18,8 +23,12 @@ def add_arguments(self, parser): def handle(self, *args, **options): logger.info("Importing culture routes...") - routes = get_routes() delete_tables = options.get("delete", False) + if delete_tables: + MobileUnitGroup.objects.filter( + group_type__type_name=GROUP_CONTENT_TYPE_NAME + ).delete() + routes = get_routes() routes_saved, routes_deleted, units_saved, units_deleted = save_to_database( routes, delete_tables=delete_tables ) diff --git a/mobility_data/management/commands/import_wfs.py b/mobility_data/management/commands/import_wfs.py index f7c111835..68d033fdd 100644 --- a/mobility_data/management/commands/import_wfs.py +++ b/mobility_data/management/commands/import_wfs.py @@ -27,7 +27,6 @@ class Command(BaseCommand): choices = get_configured_cotent_type_names(config) def add_arguments(self, parser): - parser.add_argument( "--data-file", nargs="?", diff --git a/mobility_data/migrations/0001_initial.py b/mobility_data/migrations/0001_initial.py index 1e0886446..2a676da34 100644 --- a/mobility_data/migrations/0001_initial.py +++ b/mobility_data/migrations/0001_initial.py @@ -8,7 +8,6 @@ class Migration(migrations.Migration): - initial = True dependencies = [] diff --git a/mobility_data/migrations/0002_mobileunit_extra.py b/mobility_data/migrations/0002_mobileunit_extra.py index e4d270784..c111b8b2a 100644 --- a/mobility_data/migrations/0002_mobileunit_extra.py +++ b/mobility_data/migrations/0002_mobileunit_extra.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0001_initial"), ] diff --git a/mobility_data/migrations/0003_auto_20211101_1501.py b/mobility_data/migrations/0003_auto_20211101_1501.py index c979c933b..03d9b3a63 100644 --- a/mobility_data/migrations/0003_auto_20211101_1501.py +++ b/mobility_data/migrations/0003_auto_20211101_1501.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0002_mobileunit_extra"), ] diff --git a/mobility_data/migrations/0004_auto_20211102_0806.py b/mobility_data/migrations/0004_auto_20211102_0806.py index 3c6c02c42..8bd818265 100644 --- a/mobility_data/migrations/0004_auto_20211102_0806.py +++ b/mobility_data/migrations/0004_auto_20211102_0806.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0003_auto_20211101_1501"), ] diff --git a/mobility_data/migrations/0005_alter_mobileunit_unit_id.py b/mobility_data/migrations/0005_alter_mobileunit_unit_id.py index a2d14764f..8e39c9e5b 100644 --- a/mobility_data/migrations/0005_alter_mobileunit_unit_id.py +++ b/mobility_data/migrations/0005_alter_mobileunit_unit_id.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0004_auto_20211102_0806"), ] diff --git a/mobility_data/migrations/0006_auto_20211112_1312.py b/mobility_data/migrations/0006_auto_20211112_1312.py index 74906ae73..a515a53b4 100644 --- a/mobility_data/migrations/0006_auto_20211112_1312.py +++ b/mobility_data/migrations/0006_auto_20211112_1312.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0005_alter_mobileunit_unit_id"), ] diff --git a/mobility_data/migrations/0007_auto_20211112_1320.py b/mobility_data/migrations/0007_auto_20211112_1320.py index dad0db099..c85b58c35 100644 --- a/mobility_data/migrations/0007_auto_20211112_1320.py +++ b/mobility_data/migrations/0007_auto_20211112_1320.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0006_auto_20211112_1312"), ] diff --git a/mobility_data/migrations/0008_auto_20211118_1256.py b/mobility_data/migrations/0008_auto_20211118_1256.py index 00b5ce879..bb2ef763b 100644 --- a/mobility_data/migrations/0008_auto_20211118_1256.py +++ b/mobility_data/migrations/0008_auto_20211118_1256.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0007_auto_20211112_1320"), ] diff --git a/mobility_data/migrations/0009_alter_contenttype_type_name.py b/mobility_data/migrations/0009_alter_contenttype_type_name.py index 2f10396f2..adebf9344 100644 --- a/mobility_data/migrations/0009_alter_contenttype_type_name.py +++ b/mobility_data/migrations/0009_alter_contenttype_type_name.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0008_auto_20211118_1256"), ] diff --git a/mobility_data/migrations/0010_alter_mobileunit_unit_id.py b/mobility_data/migrations/0010_alter_mobileunit_unit_id.py index 32ed7e492..b496cf815 100644 --- a/mobility_data/migrations/0010_alter_mobileunit_unit_id.py +++ b/mobility_data/migrations/0010_alter_mobileunit_unit_id.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0009_alter_contenttype_type_name"), ] diff --git a/mobility_data/migrations/0011_alter_contenttype_type_name.py b/mobility_data/migrations/0011_alter_contenttype_type_name.py index 4ececacd5..d03b6de37 100644 --- a/mobility_data/migrations/0011_alter_contenttype_type_name.py +++ b/mobility_data/migrations/0011_alter_contenttype_type_name.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0010_alter_mobileunit_unit_id"), ] diff --git a/mobility_data/migrations/0012_add_table_datasource.py b/mobility_data/migrations/0012_add_table_datasource.py index e2009f3f4..286c70f85 100644 --- a/mobility_data/migrations/0012_add_table_datasource.py +++ b/mobility_data/migrations/0012_add_table_datasource.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0011_alter_contenttype_type_name"), ] diff --git a/mobility_data/migrations/0013_add_contenttype_bike_service_station.py b/mobility_data/migrations/0013_add_contenttype_bike_service_station.py index d780db5eb..111576112 100644 --- a/mobility_data/migrations/0013_add_contenttype_bike_service_station.py +++ b/mobility_data/migrations/0013_add_contenttype_bike_service_station.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0012_add_table_datasource"), ] diff --git a/mobility_data/migrations/0014_add_contenttype_share_car_parking_place.py b/mobility_data/migrations/0014_add_contenttype_share_car_parking_place.py index 6f777b3a9..28dc128bc 100644 --- a/mobility_data/migrations/0014_add_contenttype_share_car_parking_place.py +++ b/mobility_data/migrations/0014_add_contenttype_share_car_parking_place.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0013_add_contenttype_bike_service_station"), ] diff --git a/mobility_data/migrations/0015_add_contenttype_brush_salted_and_sanded_bicycle_network.py b/mobility_data/migrations/0015_add_contenttype_brush_salted_and_sanded_bicycle_network.py index 7e825917a..7bbaf1fb2 100644 --- a/mobility_data/migrations/0015_add_contenttype_brush_salted_and_sanded_bicycle_network.py +++ b/mobility_data/migrations/0015_add_contenttype_brush_salted_and_sanded_bicycle_network.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0014_add_contenttype_share_car_parking_place"), ] diff --git a/mobility_data/migrations/0016_add_content_type_marina_and_guest_marina_and_boat_parking.py b/mobility_data/migrations/0016_add_content_type_marina_and_guest_marina_and_boat_parking.py index 4db804176..b6a88ee43 100644 --- a/mobility_data/migrations/0016_add_content_type_marina_and_guest_marina_and_boat_parking.py +++ b/mobility_data/migrations/0016_add_content_type_marina_and_guest_marina_and_boat_parking.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ( "mobility_data", diff --git a/mobility_data/migrations/0017_add_content_type_no_staff_parking.py b/mobility_data/migrations/0017_add_content_type_no_staff_parking.py index a8d9a34d8..91a677ca7 100644 --- a/mobility_data/migrations/0017_add_content_type_no_staff_parking.py +++ b/mobility_data/migrations/0017_add_content_type_no_staff_parking.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ( "mobility_data", diff --git a/mobility_data/migrations/0018_add_address_zip_and_municipality_to_mobileunit.py b/mobility_data/migrations/0018_add_address_zip_and_municipality_to_mobileunit.py index b0eaba8e2..731947c25 100644 --- a/mobility_data/migrations/0018_add_address_zip_and_municipality_to_mobileunit.py +++ b/mobility_data/migrations/0018_add_address_zip_and_municipality_to_mobileunit.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("munigeo", "0013_add_naturalsort_function"), ("mobility_data", "0017_add_content_type_no_staff_parking"), diff --git a/mobility_data/migrations/0019_add_content_type_berth.py b/mobility_data/migrations/0019_add_content_type_berth.py index a7bb004db..f2ae5ba0c 100644 --- a/mobility_data/migrations/0019_add_content_type_berth.py +++ b/mobility_data/migrations/0019_add_content_type_berth.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0018_add_address_zip_and_municipality_to_mobileunit"), ] diff --git a/mobility_data/migrations/0020_add_content_type_disabled_parking.py b/mobility_data/migrations/0020_add_content_type_disabled_parking.py index c9420a06b..6d14ce912 100644 --- a/mobility_data/migrations/0020_add_content_type_disabled_parking.py +++ b/mobility_data/migrations/0020_add_content_type_disabled_parking.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0019_add_content_type_berth"), ] diff --git a/mobility_data/migrations/0021_add_content_type_loading_unloading_place.py b/mobility_data/migrations/0021_add_content_type_loading_unloading_place.py index fcc5f8966..d53861082 100644 --- a/mobility_data/migrations/0021_add_content_type_loading_unloading_place.py +++ b/mobility_data/migrations/0021_add_content_type_loading_unloading_place.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0020_add_content_type_disabled_parking"), ] diff --git a/mobility_data/migrations/0022_add_content_type_marina_southwest_finland_and_slipway.py b/mobility_data/migrations/0022_add_content_type_marina_southwest_finland_and_slipway.py index 0f04c423f..35d377c75 100644 --- a/mobility_data/migrations/0022_add_content_type_marina_southwest_finland_and_slipway.py +++ b/mobility_data/migrations/0022_add_content_type_marina_southwest_finland_and_slipway.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0021_add_content_type_loading_unloading_place"), ] diff --git a/mobility_data/migrations/0023_add_content_type_recreational_route.py b/mobility_data/migrations/0023_add_content_type_recreational_route.py index fffa9a2e3..a9aefde55 100644 --- a/mobility_data/migrations/0023_add_content_type_recreational_route.py +++ b/mobility_data/migrations/0023_add_content_type_recreational_route.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0022_add_content_type_marina_southwest_finland_and_slipway"), ] diff --git a/mobility_data/migrations/0024_add_content_type_ferry_route_and_fishing_spot.py b/mobility_data/migrations/0024_add_content_type_ferry_route_and_fishing_spot.py index 1be872497..abbd4078c 100644 --- a/mobility_data/migrations/0024_add_content_type_ferry_route_and_fishing_spot.py +++ b/mobility_data/migrations/0024_add_content_type_ferry_route_and_fishing_spot.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0023_add_content_type_recreational_route"), ] diff --git a/mobility_data/migrations/0025_add_content_type_paavonpolku.py b/mobility_data/migrations/0025_add_content_type_paavonpolku.py index e59deb461..c6d5c1632 100644 --- a/mobility_data/migrations/0025_add_content_type_paavonpolku.py +++ b/mobility_data/migrations/0025_add_content_type_paavonpolku.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0024_add_content_type_ferry_route_and_fishing_spot"), ] diff --git a/mobility_data/migrations/0026_add_content_type_fitness_trail.py b/mobility_data/migrations/0026_add_content_type_fitness_trail.py index 6b363e46d..3864afa9a 100644 --- a/mobility_data/migrations/0026_add_content_type_fitness_trail.py +++ b/mobility_data/migrations/0026_add_content_type_fitness_trail.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0025_add_content_type_paavonpolku"), ] diff --git a/mobility_data/migrations/0027_add_content_type_nature_trail.py b/mobility_data/migrations/0027_add_content_type_nature_trail.py index eb591c0b6..85b0597e3 100644 --- a/mobility_data/migrations/0027_add_content_type_nature_trail.py +++ b/mobility_data/migrations/0027_add_content_type_nature_trail.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0026_add_content_type_fitness_trail"), ] diff --git a/mobility_data/migrations/0028_add_content_type_hiking_trail.py b/mobility_data/migrations/0028_add_content_type_hiking_trail.py index 0be27c5db..fcbd9f6f1 100644 --- a/mobility_data/migrations/0028_add_content_type_hiking_trail.py +++ b/mobility_data/migrations/0028_add_content_type_hiking_trail.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0027_add_content_type_nature_trail"), ] diff --git a/mobility_data/migrations/0029_add_content_type_paddling_trail.py b/mobility_data/migrations/0029_add_content_type_paddling_trail.py index 23bc0b1a0..230c041d1 100644 --- a/mobility_data/migrations/0029_add_content_type_paddling_trail.py +++ b/mobility_data/migrations/0029_add_content_type_paddling_trail.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0028_add_content_type_hiking_trail"), ] diff --git a/mobility_data/migrations/0030_add_unique_constraint_to_content_and_group_type_name.py b/mobility_data/migrations/0030_add_unique_constraint_to_content_and_group_type_name.py index 5d772991d..200332733 100644 --- a/mobility_data/migrations/0030_add_unique_constraint_to_content_and_group_type_name.py +++ b/mobility_data/migrations/0030_add_unique_constraint_to_content_and_group_type_name.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0029_add_content_type_paddling_trail"), ] diff --git a/mobility_data/migrations/0031_increase_datasource_type_name_size.py b/mobility_data/migrations/0031_increase_datasource_type_name_size.py index b34c88518..23dd3c673 100644 --- a/mobility_data/migrations/0031_increase_datasource_type_name_size.py +++ b/mobility_data/migrations/0031_increase_datasource_type_name_size.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0030_add_unique_constraint_to_content_and_group_type_name"), ] diff --git a/mobility_data/migrations/0032_remove_choices_from_type_name.py b/mobility_data/migrations/0032_remove_choices_from_type_name.py index 37bbf6fa2..3cf5be960 100644 --- a/mobility_data/migrations/0032_remove_choices_from_type_name.py +++ b/mobility_data/migrations/0032_remove_choices_from_type_name.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0031_increase_datasource_type_name_size"), ] diff --git a/mobility_data/migrations/0033_remove_contenttype_type_name.py b/mobility_data/migrations/0033_remove_contenttype_type_name.py index abf0125e1..63fd075d5 100644 --- a/mobility_data/migrations/0033_remove_contenttype_type_name.py +++ b/mobility_data/migrations/0033_remove_contenttype_type_name.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0032_remove_choices_from_type_name"), ] diff --git a/mobility_data/migrations/0034_remove_grouptype_type_name.py b/mobility_data/migrations/0034_remove_grouptype_type_name.py index f56790277..2ae108676 100644 --- a/mobility_data/migrations/0034_remove_grouptype_type_name.py +++ b/mobility_data/migrations/0034_remove_grouptype_type_name.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0033_remove_contenttype_type_name"), ] diff --git a/mobility_data/migrations/0035_add_many_to_many_field_content_types_to_mobile_unit.py b/mobility_data/migrations/0035_add_many_to_many_field_content_types_to_mobile_unit.py index 029d6b23f..7e4acbace 100644 --- a/mobility_data/migrations/0035_add_many_to_many_field_content_types_to_mobile_unit.py +++ b/mobility_data/migrations/0035_add_many_to_many_field_content_types_to_mobile_unit.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0034_remove_grouptype_type_name"), ] diff --git a/mobility_data/migrations/0036_populate_mobile_type_content_types.py b/mobility_data/migrations/0036_populate_mobile_type_content_types.py index cafd7c797..35f488c9f 100644 --- a/mobility_data/migrations/0036_populate_mobile_type_content_types.py +++ b/mobility_data/migrations/0036_populate_mobile_type_content_types.py @@ -11,7 +11,6 @@ def make_many_to_many_content_types(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0035_add_many_to_many_field_content_types_to_mobile_unit"), ] diff --git a/mobility_data/migrations/0037_remove_mobileunit_content_type.py b/mobility_data/migrations/0037_remove_mobileunit_content_type.py index 8d0626c0a..7288e80fd 100644 --- a/mobility_data/migrations/0037_remove_mobileunit_content_type.py +++ b/mobility_data/migrations/0037_remove_mobileunit_content_type.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0036_populate_mobile_type_content_types"), ] diff --git a/mobility_data/migrations/0038_alter_contenttype_and_grouptype_ordering_to_field_name.py b/mobility_data/migrations/0038_alter_contenttype_and_grouptype_ordering_to_field_name.py index 2d450fe7f..c1c951134 100644 --- a/mobility_data/migrations/0038_alter_contenttype_and_grouptype_ordering_to_field_name.py +++ b/mobility_data/migrations/0038_alter_contenttype_and_grouptype_ordering_to_field_name.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0037_remove_mobileunit_content_type"), ] diff --git a/mobility_data/migrations/0039_contentype_and_grouptype_rename_name_to_type_name.py b/mobility_data/migrations/0039_contentype_and_grouptype_rename_name_to_type_name.py index aa0b6a828..5e0d35302 100644 --- a/mobility_data/migrations/0039_contentype_and_grouptype_rename_name_to_type_name.py +++ b/mobility_data/migrations/0039_contentype_and_grouptype_rename_name_to_type_name.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ( "mobility_data", diff --git a/mobility_data/migrations/0040_contenttype_name_grouptype_name.py b/mobility_data/migrations/0040_contenttype_name_grouptype_name.py index 6303c45d3..ed30f8320 100644 --- a/mobility_data/migrations/0040_contenttype_name_grouptype_name.py +++ b/mobility_data/migrations/0040_contenttype_name_grouptype_name.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0039_contentype_and_grouptype_rename_name_to_type_name"), ] diff --git a/mobility_data/migrations/0041_contenttype_grouptype_make_name_and_description_multilingual.py b/mobility_data/migrations/0041_contenttype_grouptype_make_name_and_description_multilingual.py index c6d5e252d..6cd4f97df 100644 --- a/mobility_data/migrations/0041_contenttype_grouptype_make_name_and_description_multilingual.py +++ b/mobility_data/migrations/0041_contenttype_grouptype_make_name_and_description_multilingual.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - dependencies = [ ("mobility_data", "0040_contenttype_name_grouptype_name"), ] diff --git a/mobility_data/models/content_type.py b/mobility_data/models/content_type.py index 2acf722f9..a2f09b140 100644 --- a/mobility_data/models/content_type.py +++ b/mobility_data/models/content_type.py @@ -22,10 +22,8 @@ def __str__(self): class ContentType(BaseType): - pass class GroupType(BaseType): - pass diff --git a/mobility_data/tests/test_import_gas_filling_stations.py b/mobility_data/tests/test_import_gas_filling_stations.py index 11dd72c4f..5bd913c35 100644 --- a/mobility_data/tests/test_import_gas_filling_stations.py +++ b/mobility_data/tests/test_import_gas_filling_stations.py @@ -11,7 +11,6 @@ @pytest.mark.django_db def test_importer(municipalities): - from mobility_data.importers.gas_filling_station import ( CONTENT_TYPE_NAME, get_filtered_gas_filling_station_objects, diff --git a/requirements.in b/requirements.in index 85f1db3f1..6cdaf4f58 100644 --- a/requirements.in +++ b/requirements.in @@ -29,7 +29,7 @@ pep8-naming jedi parso whitenoise -pandas>1.4 +pandas>=2.0.0 pykml shapely celery diff --git a/requirements.txt b/requirements.txt index c5816cae9..9cbc1857e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,6 @@ # -# This file is autogenerated by pip-compile with python 3.10 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: # # pip-compile requirements.in # @@ -138,7 +138,7 @@ numpy==1.23.0 # pandas packaging==21.0 # via pytest -pandas==1.4.3 +pandas==2.0.1 # via -r requirements.in parso==0.8.2 # via @@ -233,11 +233,15 @@ toml==0.10.2 # pytest # pytest-cov tomli==1.2.1 - # via pep517 + # via + # black + # pep517 tqdm==4.62.3 # via -r requirements.in tzdata==2022.1 - # via django-celery-beat + # via + # django-celery-beat + # pandas uritemplate==4.1.1 # via drf-spectacular url-normalize==1.4.3 diff --git a/smbackend/settings.py b/smbackend/settings.py index 0e786c728..91306fdb0 100644 --- a/smbackend/settings.py +++ b/smbackend/settings.py @@ -62,6 +62,7 @@ EMAIL_HOST_USER=(str, None), EMAIL_PORT=(int, None), EMAIL_USE_TLS=(bool, None), + TELRAAM_TOKEN=(str, None), ) # Build paths inside the project like this: os.path.join(BASE_DIR, ...) @@ -313,7 +314,7 @@ def gettext(s): def preprocessing_filter_spec(endpoints): filtered = [] for endpoint in DOC_ENDPOINTS: - for (path, path_regex, method, callback) in endpoints: + for path, path_regex, method, callback in endpoints: if path.startswith(endpoint): filtered.append((path, path_regex, method, callback)) return filtered @@ -433,3 +434,4 @@ def preprocessing_filter_spec(endpoints): YIT_CONTRACTS_URL = env("YIT_CONTRACTS_URL") YIT_TOKEN_URL = env("YIT_TOKEN_URL") KUNTEC_KEY = env("KUNTEC_KEY") +TELRAAM_TOKEN = env("TELRAAM_TOKEN")