diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index c72649628..feeb729fd 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -19,9 +19,9 @@ jobs: LAM_COUNTER_API_BASE_URL: https://tie.digitraffic.fi/api/tms/v1/history steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python 3.10.0 - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: 3.10.0 - name: Install required Ubuntu packages @@ -50,7 +50,7 @@ jobs: pip install coverage coverage report -m - name: Upload Coverage to Codecov - uses: codecov/codecov-action@v2 + uses: codecov/codecov-action@v3 # Majority of the tests require database services: # Label used to access the service container diff --git a/README.md b/README.md index e86ba9c9c..91f269449 100644 --- a/README.md +++ b/README.md @@ -141,8 +141,12 @@ For Turku specific imports see smbackend_turku/README.md. ./manage.py geo_import helsinki --divisions ./manage.py index_search_columns ``` - +Import exclude rules fixtures used by the search: +``` +./manage.py loaddata services/fixtures/exclusion_rules.json +``` 7. Redis + Redis is used for caching and as a message broker for Celery. Install Redis. Ubuntu: `sudo apt-get install redis-server` @@ -198,3 +202,6 @@ psql template1 -c 'CREATE EXTENSION IF NOT EXISTS pg_trgm;' Mobility platform ----------------- The mobility data platform of the service map is being developed as part of European Union Horizon 2020 programme funded SCALE-UP project (grant agreement no. 955332). + +For more information see: mobility_data/README.mk + diff --git a/bicycle_network/api/views.py b/bicycle_network/api/views.py index d7bb73423..f72fb002e 100644 --- a/bicycle_network/api/views.py +++ b/bicycle_network/api/views.py @@ -1,5 +1,3 @@ -from distutils.util import strtobool - from django.contrib.gis.db.models.functions import Distance from django.contrib.gis.gdal import SpatialReference from django.contrib.gis.geos import Point @@ -10,6 +8,7 @@ from rest_framework.exceptions import ParseError from services.api_pagination import Pagination +from services.utils import strtobool from ..models import BicycleNetwork, BicycleNetworkPart from .serializers import ( diff --git a/config_dev.env.example b/config_dev.env.example index 296a49ad6..dc66fbe57 100644 --- a/config_dev.env.example +++ b/config_dev.env.example @@ -145,6 +145,23 @@ EMAIL_HOST_USER=example@example.com EMAIL_PORT=25 EMAIL_USE_TLS=True +# Django project log level, default INFO +DJANGO_LOG_LEVEL= +# Turku services importers log level, default DEBUG +TURKU_SERVICES_IMPORT_LOG_LEVEL= +# Search log level, default INFO +SEARCH_LOG_LEVEL= +# IoT APP, default INFO +IOT_LOG_LEVEL= +# Eco counter, default INFO +ECO_COUNTER_LOG_LEVEL= +# Mobility data (includes importers), default INFO +MOBILITY_DATA_LOG_LEVEL= +# Bicycle networks APP, default INFO +BICYCLE_NETWORK_LOG_LEVEL= +# Street maintenance, default INFO +STREET_MAINTENANCE_LOG_LEVEL= + # Settings needed for enabling Turku area: #ADDITIONAL_INSTALLED_APPS=smbackend_turku,ptv #TURKU_API_KEY=secret @@ -184,4 +201,4 @@ YIT_TOKEN_URL=https://login.microsoftonline.com/86792d09-0d81-4899-8d66-95dfc96c KUNTEC_KEY= # Telraam API token, required when fetching Telraam data to csv (import_telraam_to_csv.py) # https://telraam.helpspace-docs.io/article/27/you-wish-more-data-and-statistics-telraam-api -TELRAAM_TOKEN= \ No newline at end of file +TELRAAM_TOKEN= diff --git a/eco_counter/api/serializers.py b/eco_counter/api/serializers.py index 7b9f81713..ea095df18 100644 --- a/eco_counter/api/serializers.py +++ b/eco_counter/api/serializers.py @@ -1,3 +1,4 @@ +from django.db.models import Q from rest_framework import serializers from ..models import ( @@ -23,6 +24,9 @@ "value_jk", "value_jp", "value_jt", + "value_bk", + "value_bp", + "value_bt", ] @@ -32,6 +36,7 @@ class StationSerializer(serializers.ModelSerializer): lon = serializers.SerializerMethodField() lat = serializers.SerializerMethodField() sensor_types = serializers.SerializerMethodField() + data_from_year = serializers.SerializerMethodField() class Meta: model = Station @@ -49,6 +54,7 @@ class Meta: "lon", "lat", "sensor_types", + "data_from_year", ] def get_y(self, obj): @@ -76,6 +82,19 @@ def get_sensor_types(self, obj): result.append(type) return result + def get_data_from_year(self, obj): + q_exp = ( + Q(value_at__gt=0) + | Q(value_pt__gt=0) + | Q(value_jt__gt=0) + | Q(value_bt__gt=0) + ) + qs = YearData.objects.filter(q_exp, station=obj).order_by("year__year_number") + if qs.count() > 0: + return qs[0].year.year_number + else: + return None + class YearSerializer(serializers.ModelSerializer): station_name = serializers.PrimaryKeyRelatedField( diff --git a/eco_counter/api/views.py b/eco_counter/api/views.py index 04d3ea3b1..695e6f0ec 100644 --- a/eco_counter/api/views.py +++ b/eco_counter/api/views.py @@ -62,7 +62,24 @@ def list(self, request): if counter_type in str(CSV_DATA_SOURCES): queryset = Station.objects.filter(csv_data_source=counter_type) else: - raise ParseError("Valid 'counter_type' choices are: 'EC','TC' or 'LC'.") + raise ParseError( + "Valid 'counter_type' choices are: 'EC', 'TC', 'TR' or 'LC'." + ) + if "data_type" in filters: + data_type = filters["data_type"].lower() + data_types = ["a", "j", "b", "p"] + if data_type not in data_types: + raise ParseError( + f"Valid 'data_type' choices are: {', '.join(data_types)}" + ) + ids = [] + data_type = data_type + "t" + for station in Station.objects.all(): + filter = {"station": station, f"value_{data_type}__gt": 0} + if YearData.objects.filter(**filter).count() > 0: + ids.append(station.id) + queryset = Station.objects.filter(id__in=ids) + page = self.paginate_queryset(queryset) serializer = StationSerializer(page, many=True) return self.get_paginated_response(serializer.data) diff --git a/eco_counter/constants.py b/eco_counter/constants.py index 7744b4229..94d4ab1a8 100644 --- a/eco_counter/constants.py +++ b/eco_counter/constants.py @@ -114,6 +114,8 @@ # from the beginning of the start tear TELRAAM_COUNTER_START_MONTH = 5 TELRAAM_COUNTER_API_TIME_FORMAT = "%Y-%m-%d %H:%M:%S" +TELRAAM_COUNTER_DATA_TIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ" + TELRAAM_COUNTER_CSV_FILE_PATH = f"{settings.MEDIA_ROOT}/telraam_data/" TELRAAM_COUNTER_CSV_FILE = ( TELRAAM_COUNTER_CSV_FILE_PATH + "telraam_data_{id}_{day}_{month}_{year}.csv" @@ -127,7 +129,7 @@ retry_strategy = Retry( total=10, status_forcelist=[429], - method_whitelist=["GET", "POST"], + allowed_methods=["GET", "POST"], backoff_factor=30, # 30, 60, 120 , 240, ..seconds ) adapter = HTTPAdapter(max_retries=retry_strategy) diff --git a/eco_counter/data/traffic_counter_metadata.geojson b/eco_counter/data/traffic_counter_metadata.geojson index 6a1996103..22e9ed52e 100644 --- a/eco_counter/data/traffic_counter_metadata.geojson +++ b/eco_counter/data/traffic_counter_metadata.geojson @@ -108,7 +108,20 @@ { "type": "Feature", "properties": { "fid": 235, "ID": "9", "Osoite_fi": "Uudenmaantie 5", "Osoite_sv": "Nylandsvägen 5", "Osoite_en": "Uudenmaantie 5", "Liittymänumero": "609", "Ilmaisimet": "208L", "Mittauspisteiden_ID": "5537", "Tyyppi": "B", "Suunta": "K" }, "geometry": { "type": "Point", "coordinates": [ 23460746.775335978716612, 6703683.553299261257052 ] } }, { "type": "Feature", "properties": { "fid": 80, "ID": "15", "Osoite_fi": "Ispoisten puistotie 2", "Osoite_sv": "Ispois parväg 2", "Osoite_en": "Ispoisten puistotie 2", "Liittymänumero": "727", "Ilmaisimet": "206L", "Mittauspisteiden_ID": "8746", "Tyyppi": "B", "Suunta": "K" }, "geometry": { "type": "Point", "coordinates": [ 23459625.991249438375235, 6702405.184565890580416 ] } }, { "type": "Feature", "properties": { "fid": 128, "ID": "5", "Osoite_fi": "Skarppakullantie 37", "Osoite_sv": "Skarppakullav 37", "Osoite_en": "Skarppakullantie 37", "Liittymänumero": "652", "Ilmaisimet": "108L", "Mittauspisteiden_ID": "8388", "Tyyppi": "B", "Suunta": "P" }, "geometry": { "type": "Point", "coordinates": [ 23462435.035431213676929, 6702584.968190263025463 ] } }, - { "type": "Feature", "properties": { "fid": 140, "ID": "9", "Osoite_fi": "Skarppakullantie/Skanssinkatu", "Osoite_sv": "Skarppakullavägen/Skansgatan", "Osoite_en": "Skarppakullantie/Skanssinkatu", "Liittymänumero": "653", "Ilmaisimet": "208L", "Mittauspisteiden_ID": "8461", "Tyyppi": "B", "Suunta": "K" }, "geometry": { "type": "Point", "coordinates": [ 23462437.22350587323308, 6702359.768247644416988 ] } } + { "type": "Feature", "properties": { "fid": 140, "ID": "9", "Osoite_fi": "Skarppakullantie/Skanssinkatu", "Osoite_sv": "Skarppakullavägen/Skansgatan", "Osoite_en": "Skarppakullantie/Skanssinkatu", "Liittymänumero": "653", "Ilmaisimet": "208L", "Mittauspisteiden_ID": "8461", "Tyyppi": "B", "Suunta": "K" }, "geometry": { "type": "Point", "coordinates": [ 23462437.22350587323308, 6702359.768247644416988 ] } }, + { "type": "Feature", "properties": { "fid": 3, "ID": null, "Osoite_fi": "Itäinen rantakatu 8", "Osoite_sv": "Östra strandgatan 8", "Osoite_en": "Itäinen rantakatu 8", "Liittymänumero": "701", "Ilmaisimet": "X3", "Mittauspisteiden_ID": "903", "Tyyppi": "P", "Suunta": "T" }, "geometry": { "type": "Point", "coordinates": [ 23459837.50562446564436, 6704244.290738201700151 ] } }, + { "type": "Feature", "properties": { "fid": 4, "ID": null, "Osoite_fi": "Itäinen rantakatu 43", "Osoite_sv": "Östra strandgatan 43", "Osoite_en": "Itäinen rantakatu 43", "Liittymänumero": "710", "Ilmaisimet": "pp1", "Mittauspisteiden_ID": "2273", "Tyyppi": "P", "Suunta": "T" }, "geometry": { "type": "Point", "coordinates": [ 23459072.372346129268408, 6703798.638464853167534 ] } }, + { "type": "Feature", "properties": { "fid": 5, "ID": null, "Osoite_fi": "Itäinen rantakatu 43", "Osoite_sv": "Östra strandgatan 43", "Osoite_en": "Itäinen rantakatu 43", "Liittymänumero": "710", "Ilmaisimet": "pp2", "Mittauspisteiden_ID": "2274", "Tyyppi": "P", "Suunta": "T" }, "geometry": { "type": "Point", "coordinates": [ 23459073.968076642602682, 6703799.561500884592533 ] } }, + { "type": "Feature", "properties": { "fid": 6, "ID": null, "Osoite_fi": "Itäinen rantakatu 43", "Osoite_sv": "Östra strandgatan 43", "Osoite_en": "Itäinen rantakatu 43", "Liittymänumero": "710", "Ilmaisimet": "pp3", "Mittauspisteiden_ID": "2275", "Tyyppi": "P", "Suunta": "T" }, "geometry": { "type": "Point", "coordinates": [ 23459076.346651934087276, 6703801.192924783565104 ] } }, + { "type": "Feature", "properties": { "fid": 7, "ID": null, "Osoite_fi": "Itäinen rantakatu 43", "Osoite_sv": "Östra strandgatan 43", "Osoite_en": "Itäinen rantakatu 43", "Liittymänumero": "710", "Ilmaisimet": "pp4", "Mittauspisteiden_ID": "2276", "Tyyppi": "P", "Suunta": "T" }, "geometry": { "type": "Point", "coordinates": [ 23459077.927361972630024, 6703802.362830685451627 ] } }, + { "type": "Feature", "properties": { "fid": 8, "ID": null, "Osoite_fi": "Myllysilta 1", "Osoite_sv": "Kvarnbron 1", "Osoite_en": "Myllysilta 1", "Liittymänumero": "108", "Ilmaisimet": "2PP", "Mittauspisteiden_ID": "1921", "Tyyppi": "P", "Suunta": "T" }, "geometry": { "type": "Point", "coordinates": [ 23459138.651998423039913, 6703933.748812982812524 ] } }, + { "type": "Feature", "properties": { "fid": 9, "ID": "", "Osoite_fi": "Myllysilta 2", "Osoite_sv": "Kvarnbron 2", "Osoite_en": "Myllysilta 2", "Liittymänumero": "108", "Ilmaisimet": "1PP", "Mittauspisteiden_ID": "1920", "Tyyppi": "P", "Suunta": "T" }, "geometry": { "type": "Point", "coordinates": [ 23459121.444739304482937, 6703958.63655239995569 ] } }, + { "type": "Feature", "properties": { "fid": 10, "ID": null, "Osoite_fi": "Martinsilta", "Osoite_sv": "Martinsbron", "Osoite_en": "Martinsilta", "Liittymänumero": "101", "Ilmaisimet": "1PP", "Mittauspisteiden_ID": "1570", "Tyyppi": "P", "Suunta": "T" }, "geometry": { "type": "Point", "coordinates": [ 23458979.291542522609234, 6703870.531346249394119 ] } }, + { "type": "Feature", "properties": { "fid": 11, "ID": null, "Osoite_fi": "Kalevantie 17", "Osoite_sv": "Kalevavägen 17", "Osoite_en": "Kalevantie 17", "Liittymänumero": "518", "Ilmaisimet": "ZELT_pp", "Mittauspisteiden_ID": "6816", "Tyyppi": "P", "Suunta": "T" }, "geometry": { "type": "Point", "coordinates": [ 23461917.734735164791346, 6704427.855136526748538 ] } }, + { "type": "Feature", "properties": { "fid": 12, "ID": null, "Osoite_fi": "Lemminkäisenkatu 35", "Osoite_sv": "Lemminkäinengatan 35", "Osoite_en": "Lemminkäisenkatu 35", "Liittymänumero": "642", "Ilmaisimet": "PP2", "Mittauspisteiden_ID": "7172", "Tyyppi": "P", "Suunta": "T" }, "geometry": { "type": "Point", "coordinates": [ 23461714.611965991556644, 6703777.939228449948132 ] } }, + { "type": "Feature", "properties": { "fid": 13, "ID": "", "Osoite_fi": "Lemminkäisenkatu 36", "Osoite_sv": "Lemminkäinengatan 36", "Osoite_en": "Lemminkäisenkatu 36", "Liittymänumero": "642", "Ilmaisimet": "ZELT_pp", "Mittauspisteiden_ID": "7171", "Tyyppi": "P", "Suunta": "T" }, "geometry": { "type": "Point", "coordinates": [ 23461637.359737996011972, 6703819.079618069343269 ] } }, + { "type": "Feature", "properties": { "fid": 35, "ID": null, "Osoite_fi": "Helsinginkatu 7", "Osoite_sv": "Helsingforsgatan 7", "Osoite_en": "Helsinginkatu 7", "Liittymänumero": "619", "Ilmaisimet": "X4", "Mittauspisteiden_ID": "4083", "Tyyppi": "P", "Suunta": "T" }, "geometry": { "type": "Point", "coordinates": [ 23460447.87659065425396, 6705474.187256957404315 ] } }, + { "type": "Feature", "properties": { "fid": 36, "ID": null, "Osoite_fi": "Helsinginkatu 7", "Osoite_sv": "Helsingforsgatan 7", "Osoite_en": "Helsinginkatu 7", "Liittymänumero": "619", "Ilmaisimet": "X3", "Mittauspisteiden_ID": "4080", "Tyyppi": "P", "Suunta": "T" }, "geometry": { "type": "Point", "coordinates": [ 23460433.286612428724766, 6705502.207770394161344 ] } } ] } \ No newline at end of file diff --git a/eco_counter/management/commands/import_telraam_to_csv.py b/eco_counter/management/commands/import_telraam_to_csv.py index 6ef2e0f5b..a6143a79a 100644 --- a/eco_counter/management/commands/import_telraam_to_csv.py +++ b/eco_counter/management/commands/import_telraam_to_csv.py @@ -7,6 +7,7 @@ import json import logging import os +import re from datetime import date, datetime, timedelta import pandas as pd @@ -20,6 +21,7 @@ TELRAAM_COUNTER_CAMERAS, TELRAAM_COUNTER_CSV_FILE, TELRAAM_COUNTER_CSV_FILE_PATH, + TELRAAM_COUNTER_DATA_TIME_FORMAT, TELRAAM_COUNTER_START_MONTH, TELRAAM_COUNTER_START_YEAR, TELRAAM_COUNTER_TRAFFIC_URL, @@ -101,61 +103,78 @@ def get_delta_hours(from_date: datetime, end_date: datetime) -> datetime: def get_day_data( - day_date: date, camera_id: str, utf_offset: datetime, check_delta_hours: bool = True -) -> tuple[list, int]: + day_date: date, camera_id: str, utc_offset: datetime, check_delta_hours: bool = True +) -> list: from_datetime = ( - datetime(day_date.year, day_date.month, day_date.day, 0, 0, 0) - utf_offset - ) + datetime(day_date.year, day_date.month, day_date.day, 0, 0, 0) + ) - utc_offset from_datetime_str = from_datetime.strftime(TELRAAM_COUNTER_API_TIME_FORMAT) end_datetime = ( datetime(day_date.year, day_date.month, day_date.day) + timedelta(hours=23) + timedelta(minutes=59) - ) - utf_offset + ) - utc_offset end_datetime_str = end_datetime.strftime(TELRAAM_COUNTER_API_TIME_FORMAT) report = fetch_traffic_report(from_datetime_str, end_datetime_str, camera_id) - delta_hours = len(report) - if not report: - logger.warning( - f"No report found for camera {camera_id}, populating with empty dicts" - ) - report = [{} for a in range(delta_hours)] - else: - logger.info( - f"Imorted report with {len(report)} elements for camera {camera_id}" - ) - if check_delta_hours and delta_hours != 24: - dif = 24 - delta_hours - if day_date == date.today(): - logger.warning( - f"Fetched report with delta_hours not equal to 24, appending missing {dif} empty dicts." - ) - report += [{} for a in range(dif)] - - else: - # Case when camera gets turned on in the middle of day. - logger.warning( - f"Fetched report with delta_hours not equal to 24, adding missing {dif} empty dicts to start of report." - ) - report = [{} for a in range(dif)] + report - delta_hours = len(report) + logger.info( + f"Imorted report with {len(report)} elements for camera {camera_id}, for date {str(day_date)}" + ) res = [] - start_date = from_datetime - for item in report: + start_datetime = from_datetime + utc_offset + # As fetched data migth not include data for every hour, use report_index variable to index + report_index = 0 + # Add value for every hour + while start_datetime <= end_datetime + utc_offset: d = {} - d["date"] = datetime.strftime(start_date, TELRAAM_COUNTER_API_TIME_FORMAT) + d["date"] = datetime.strftime(start_datetime, TELRAAM_COUNTER_API_TIME_FORMAT) + item_datetime = None + report_item = None + if report_index < len(report): + report_item = report[report_index] + item_datetime = report_item["date"].replace(".000", "") + item_datetime = ( + datetime.strptime(item_datetime, TELRAAM_COUNTER_DATA_TIME_FORMAT) + + utc_offset + ) + # If datetimes are equal, the fetched report contains data for given start_datetime + if item_datetime == start_datetime: + # In next ireration read the next element in report + report_index += 1 + else: + report_item = None + for veh in VEHICLE_TYPES.keys(): for dir in DIRECTIONS: if dir == TOTAL: key = f"{veh}{dir}" else: key = f"{veh}_{dir}" - val = int(round(item.get(key, 0))) + if report_item: + val = int(round(report_item.get(key, 0))) + else: + val = 0 d[key] = val res.append(d) - start_date += timedelta(hours=1) - return res, delta_hours + start_datetime += timedelta(hours=1) + return res + + +def get_last_saved_date() -> date: + # Try to find the import from CSV file names + start_date = date.today() + pattern = r"^0" + # Go back 90 days, as three months is the maximum length that data is store in the telraam API + c = 90 + while c >= 0: + date_str = start_date.strftime("%d_%m_%Y").replace("_0", "_") + date_str = re.sub(pattern, "", date_str) + for filename in os.listdir(TELRAAM_COUNTER_CSV_FILE_PATH): + if filename.endswith(date_str + ".csv"): + return start_date + start_date -= timedelta(days=1) + c -= 1 + return None def save_dataframe(from_date: date = True) -> datetime: @@ -171,6 +190,27 @@ def save_dataframe(from_date: date = True) -> datetime: ) else: import_state = ImportState.objects.filter(csv_data_source=TELRAAM_CSV).first() + # In case that a import state is not found, try to create a state + # by finding the last date a CSV file is saved. + if not import_state: + last_saved_date = get_last_saved_date() + if last_saved_date: + import_state = ImportState.objects.create( + csv_data_source=TELRAAM_CSV, + current_year_number=last_saved_date.year, + current_month_number=last_saved_date.month, + current_day_number=last_saved_date.day, + ) + else: + # As no date found set it to current date + date_today = date.today() + import_state = ImportState.objects.create( + csv_data_source=TELRAAM_CSV, + current_year_number=date_today.year, + current_month_number=date_today.month, + current_day_number=date_today.day, + ) + if not from_date: from_date = date( import_state.current_year_number, @@ -185,20 +225,15 @@ def save_dataframe(from_date: date = True) -> datetime: for camera in cameras: start_date = from_date while start_date <= date_today: - report, delta_hours = get_day_data( - start_date, camera["instance_id"], utc_offset - ) + report = get_day_data(start_date, camera["instance_id"], utc_offset) mappings = get_mappings( camera["mac"], direction=TELRAAM_COUNTER_CAMERAS[camera["mac"]] ) columns = {} columns[INDEX_COLUMN_NAME] = [] - for hour in range(delta_hours): - col_date = ( - datetime.strptime( - report[hour]["date"], TELRAAM_COUNTER_API_TIME_FORMAT - ) - + utc_offset + for hour in range(len(report)): + col_date = datetime.strptime( + report[hour]["date"], TELRAAM_COUNTER_API_TIME_FORMAT ) col_date_str = col_date.strftime(TELRAAM_COUNTER_API_TIME_FORMAT) columns[INDEX_COLUMN_NAME].append(col_date_str) @@ -214,6 +249,7 @@ def save_dataframe(from_date: date = True) -> datetime: else: values_list.append(report[hour][value_key]) columns[key] = values_list + df = pd.DataFrame(data=columns, index=columns[INDEX_COLUMN_NAME]) df = df.drop(columns=[INDEX_COLUMN_NAME], axis=1) df.index.rename(INDEX_COLUMN_NAME, inplace=True) @@ -262,5 +298,4 @@ def handle(self, *args, **options): return until_date = save_dataframe(from_date) - logger.info(f"Telraam data imported until {str(until_date)}") diff --git a/eco_counter/specification.swagger2.0.yaml b/eco_counter/specification.swagger2.0.yaml index 9505177f7..8302a6194 100755 --- a/eco_counter/specification.swagger2.0.yaml +++ b/eco_counter/specification.swagger2.0.yaml @@ -251,6 +251,10 @@ paths: description: "The type of the counter EC(Eco Counter), TC(Traffic Counter), LC(LAM Counter), TR(Telraam Counter)" name: counter_type type: string + - in: query + description: "The data type of the counter: A(car), B(bus), J(pedestrian) or P(bicycle). Returns stations containing data of the specified type." + name: data_type + type: string responses: 200: description: "List of stations." diff --git a/eco_counter/tests/test_api.py b/eco_counter/tests/test_api.py index f10ee7f9c..9e7fc2ce7 100644 --- a/eco_counter/tests/test_api.py +++ b/eco_counter/tests/test_api.py @@ -3,6 +3,7 @@ import pytest from rest_framework.reverse import reverse +from .conftest import TEST_TIMESTAMP from .constants import TEST_EC_STATION_NAME @@ -271,3 +272,11 @@ def test__station(api_client, stations, year_datas): assert response.status_code == 200 assert response.json()["results"][0]["name"] == TEST_EC_STATION_NAME assert response.json()["results"][0]["sensor_types"] == ["at"] + assert response.json()["results"][0]["data_from_year"] == TEST_TIMESTAMP.year + # Test retrieving station by data type + url = reverse("eco_counter:stations-list") + "?data_type=a" + response = api_client.get(url) + assert response.json()["count"] == 1 + url = reverse("eco_counter:stations-list") + "?data_type=p" + response = api_client.get(url) + assert response.json()["count"] == 0 diff --git a/mobility_data/api/views.py b/mobility_data/api/views.py index 4e1ff1285..59660ed98 100644 --- a/mobility_data/api/views.py +++ b/mobility_data/api/views.py @@ -1,6 +1,5 @@ import logging import types -from distutils.util import strtobool from django.contrib.gis.gdal import SpatialReference from django.core.exceptions import ValidationError @@ -12,6 +11,7 @@ from rest_framework.response import Response from services.models import Unit +from services.utils import strtobool from ..models import ContentType, GroupType, MobileUnit, MobileUnitGroup from .serializers import ( diff --git a/mobility_data/importers/berths.py b/mobility_data/importers/berths.py index 9c0d40af3..d45338503 100644 --- a/mobility_data/importers/berths.py +++ b/mobility_data/importers/berths.py @@ -1,9 +1,10 @@ import csv import os -from distutils.util import strtobool from django.contrib.gis.geos import Point +from services.utils import strtobool + from .utils import FieldTypes, get_file_name_from_data_source, get_root_dir # Default name of the file, if not added to DataSource. diff --git a/mobility_data/importers/bicycle_stands.py b/mobility_data/importers/bicycle_stands.py index 53ea9bc68..a1b3b4ec4 100644 --- a/mobility_data/importers/bicycle_stands.py +++ b/mobility_data/importers/bicycle_stands.py @@ -8,11 +8,7 @@ from django.conf import settings from django.contrib.gis.gdal import DataSource from django.contrib.gis.geos import GEOSGeometry -from munigeo.models import ( - AdministrativeDivision, - AdministrativeDivisionGeometry, - Municipality, -) +from munigeo.models import Municipality from services.models import Unit from smbackend_turku.importers.utils import get_external_source_config @@ -22,7 +18,6 @@ get_municipality_name, get_root_dir, get_street_name_translations, - locates_in_turku, MobileUnitDataBase, ) @@ -43,10 +38,6 @@ GEOJSON_SOURCE_DATA_SRID = 4326 GEOJSON_FILENAME = "bicycle_stands_for_units.geojson" logger = logging.getLogger("mobility_data") -division_turku = AdministrativeDivision.objects.get(name="Turku") -turku_boundary = AdministrativeDivisionGeometry.objects.get( - division=division_turku -).boundary class BicyleStand(MobileUnitDataBase): @@ -198,24 +189,24 @@ def set_gml_feature(self, feature): self.prefix_name = {k: f"{NAME_PREFIX[k]} {v}" for k, v in self.name.items()} -def get_bicycle_stand_objects(data_source=None): +def get_data_sources(): + data_sources = [] + # Add the WFS datasource that is in GML format + ds = DataSource(BICYCLE_STANDS_URL) + data_sources.append(("gml", ds)) + # Add the GEOJSON datasource which is a file + data_path = os.path.join(get_root_dir(), "mobility_data/data") + file_path = os.path.join(data_path, GEOJSON_FILENAME) + ds = DataSource(file_path) + data_sources.append(("geojson", ds)) + return data_sources + + +def get_bicycle_stand_objects(): """ Returns a list containg instances of BicycleStand class. """ - data_sources = [] - - if data_source: - data_sources.append(data_source) - else: - # Add the WFS datasource that is in GML format - ds = DataSource(BICYCLE_STANDS_URL) - data_sources.append(("gml", ds)) - # Add the GEOJSON datasource which is a file - data_path = os.path.join(get_root_dir(), "mobility_data/data") - file_path = os.path.join(data_path, GEOJSON_FILENAME) - ds = DataSource(file_path) - data_sources.append(("geojson", ds)) - + data_sources = get_data_sources() bicycle_stands = [] """ external_stands dict is used to keep track of the names of imported external stands @@ -225,25 +216,19 @@ def get_bicycle_stand_objects(data_source=None): external_stands = {} for data_source in data_sources: for feature in data_source[1][0]: - source_data_srid = ( - WFS_SOURCE_DATA_SRID - if data_source[0] == "gml" - else GEOJSON_SOURCE_DATA_SRID - ) - if locates_in_turku(feature, source_data_srid): - bicycle_stand = BicyleStand() - if data_source[0] == "gml": - bicycle_stand.set_gml_feature(feature) - elif data_source[0] == "geojson": - bicycle_stand.set_geojson_feature(feature) - if ( - bicycle_stand.name[FI_KEY] not in external_stands - and not bicycle_stand.extra["maintained_by_turku"] - ): - external_stands[bicycle_stand.name[FI_KEY]] = True - bicycle_stands.append(bicycle_stand) - elif bicycle_stand.extra["maintained_by_turku"]: - bicycle_stands.append(bicycle_stand) + bicycle_stand = BicyleStand() + if data_source[0] == "gml": + bicycle_stand.set_gml_feature(feature) + elif data_source[0] == "geojson": + bicycle_stand.set_geojson_feature(feature) + if ( + bicycle_stand.name[FI_KEY] not in external_stands + and not bicycle_stand.extra["maintained_by_turku"] + ): + external_stands[bicycle_stand.name[FI_KEY]] = True + bicycle_stands.append(bicycle_stand) + elif bicycle_stand.extra["maintained_by_turku"]: + bicycle_stands.append(bicycle_stand) logger.info(f"Retrieved {len(bicycle_stands)} bicycle stands.") return bicycle_stands diff --git a/mobility_data/importers/bike_service_stations.py b/mobility_data/importers/bike_service_stations.py index 1a5870c00..3dff7e082 100644 --- a/mobility_data/importers/bike_service_stations.py +++ b/mobility_data/importers/bike_service_stations.py @@ -64,20 +64,18 @@ def __init__(self, feature): self.extra["in_terrain"] = feature["Maastossa"].as_string() -def get_bike_service_station_objects(geojson_file=None): - bicycle_repair_points = [] - file_name = None - if not geojson_file: - file_name = get_file_name_from_data_source(CONTENT_TYPE_NAME) - if not file_name: - file_name = f"{get_root_dir()}/mobility_data/data/{GEOJSON_FILENAME}" - else: - file_name = f"{get_root_dir()}/mobility_data/data/{GEOJSON_FILENAME}" +def get_data_layer(): + file_name = get_file_name_from_data_source(CONTENT_TYPE_NAME) + if not file_name: + file_name = f"{get_root_dir()}/mobility_data/data/{GEOJSON_FILENAME}" else: - # Use the test data file - file_name = f"{get_root_dir()}/mobility_data/tests/data/{geojson_file}" - + file_name = f"{get_root_dir()}/mobility_data/data/{GEOJSON_FILENAME}" data_layer = GDALDataSource(file_name)[0] - for feature in data_layer: + return data_layer + + +def get_bike_service_station_objects(): + bicycle_repair_points = [] + for feature in get_data_layer(): bicycle_repair_points.append(BikeServiceStation(feature)) return bicycle_repair_points diff --git a/mobility_data/importers/charging_stations.py b/mobility_data/importers/charging_stations.py index fbebc6441..21f80a9ad 100644 --- a/mobility_data/importers/charging_stations.py +++ b/mobility_data/importers/charging_stations.py @@ -111,18 +111,18 @@ def get_number_of_rows(file_name): return number_of_rows -def get_charging_station_objects(csv_file=None): +def get_csv_file_name(): + file_name = get_file_name_from_data_source(CONTENT_TYPE_NAME) + if file_name: + return file_name + return f"{get_root_dir()}/mobility_data/data/{SOURCE_DATA_FILE_NAME}" + + +def get_charging_station_objects(): # Store the imported stations to dict, the index is the key. + file_name = get_csv_file_name() charging_stations = {} column_mappings = {} - if not csv_file: - file_name = get_file_name_from_data_source(CONTENT_TYPE_NAME) - if not file_name: - file_name = f"{get_root_dir()}/mobility_data/data/{SOURCE_DATA_FILE_NAME}" - else: - # Use the test data file - file_name = f"{get_root_dir()}/mobility_data/tests/data/{csv_file}" - number_of_rows = get_number_of_rows(file_name) with open(file_name, encoding="utf-8-sig") as csv_file: csv_reader = csv.reader(csv_file, delimiter=";") diff --git a/mobility_data/importers/disabled_and_no_staff_parking.py b/mobility_data/importers/disabled_and_no_staff_parking.py index 1fd6982d7..4c54ec8d7 100644 --- a/mobility_data/importers/disabled_and_no_staff_parking.py +++ b/mobility_data/importers/disabled_and_no_staff_parking.py @@ -135,19 +135,17 @@ def __init__(self, feature): self.extra[field_name] = feature[field].as_double() -def get_no_staff_parking_objects(geojson_file=None): - no_staff_parkings = [] - disabled_parkings = [] - file_name = None +def get_geojson_file_name(): + file_name = get_file_name_from_data_source(NO_STAFF_PARKING_CONTENT_TYPE_NAME) + if file_name: + return file_name + return f"{get_root_dir()}/mobility_data/data/{GEOJSON_FILENAME}" - if not geojson_file: - file_name = get_file_name_from_data_source(NO_STAFF_PARKING_CONTENT_TYPE_NAME) - if not file_name: - file_name = f"{get_root_dir()}/mobility_data/data/{GEOJSON_FILENAME}" - else: - # Use the test data file - file_name = f"{get_root_dir()}/mobility_data/tests/data/{geojson_file}" +def get_no_staff_parking_objects(): + no_staff_parkings = [] + disabled_parkings = [] + file_name = get_geojson_file_name() data_layer = GDALDataSource(file_name)[0] for feature in data_layer: diff --git a/mobility_data/importers/gas_filling_station.py b/mobility_data/importers/gas_filling_station.py index 4624c12e8..3e9cd132a 100644 --- a/mobility_data/importers/gas_filling_station.py +++ b/mobility_data/importers/gas_filling_station.py @@ -1,12 +1,12 @@ import logging +import requests from django.conf import settings from django.contrib.gis.geos import Point, Polygon from munigeo.models import Municipality from .constants import SOUTHWEST_FINLAND_BOUNDARY, SOUTHWEST_FINLAND_BOUNDARY_SRID from .utils import ( - fetch_json, get_street_name_and_number, get_street_name_translations, LANGUAGES, @@ -54,15 +54,21 @@ def __init__(self, elem, srid=settings.DEFAULT_SRID): self.extra["lng_cng"] = self.lng_cng -def get_filtered_gas_filling_station_objects(json_data=None): +def get_json_data(url): + response = requests.get(url) + assert response.status_code == 200, "Fetching {} status code: {}".format( + url, response.status_code + ) + return response.json() + + +def get_filtered_gas_filling_station_objects(): """ Returns a list of GasFillingStation objects that are filtered by location. Stations inside boundarys of Southwest Finland are included, the rest are discarded. """ - - if not json_data: - json_data = fetch_json(GAS_FILLING_STATIONS_URL) + json_data = get_json_data(GAS_FILLING_STATIONS_URL) # srid = json_data["spatialReference"]["wkid"] # NOTE, hack to fix srid 102100 in source data causes "crs not found" srid = 4326 diff --git a/mobility_data/importers/loading_unloading_places.py b/mobility_data/importers/loading_unloading_places.py index 6a5e03f00..0e6e9d7ca 100644 --- a/mobility_data/importers/loading_unloading_places.py +++ b/mobility_data/importers/loading_unloading_places.py @@ -104,18 +104,16 @@ def __init__(self, feature): self.extra[field_name] = feature[field].as_int() -def get_loading_and_unloading_objects(geojson_file=None): - objects = [] - file_name = None +def get_geojson_file_name(): + file_name = get_file_name_from_data_source(CONTENT_TYPE_NAME) + if file_name: + return file_name + return f"{get_root_dir()}/mobility_data/data/{GEOJSON_FILENAME}" - if not geojson_file: - file_name = get_file_name_from_data_source(CONTENT_TYPE_NAME) - if not file_name: - file_name = f"{get_root_dir()}/mobility_data/data/{GEOJSON_FILENAME}" - else: - # Use the test data file - file_name = f"{get_root_dir()}/mobility_data/tests/data/{geojson_file}" +def get_loading_and_unloading_objects(): + objects = [] + file_name = get_geojson_file_name() data_layer = GDALDataSource(file_name)[0] for feature in data_layer: objects.append(LoadingPlace(feature)) diff --git a/mobility_data/importers/lounaistieto_shapefiles.py b/mobility_data/importers/lounaistieto_shapefiles.py index 14ce4fd73..395b76a3a 100644 --- a/mobility_data/importers/lounaistieto_shapefiles.py +++ b/mobility_data/importers/lounaistieto_shapefiles.py @@ -123,5 +123,5 @@ def import_lounaistieto_data_source(config): if obj.add_feature(feature, config, srid): objects.append(obj) content_type = get_or_create_content_type_from_config(config["content_type_name"]) - num_ceated, num_deleted = save_to_database(objects, content_type) - log_imported_message(logger, content_type, num_ceated, num_deleted) + num_created, num_deleted = save_to_database(objects, content_type) + log_imported_message(logger, content_type, num_created, num_deleted) diff --git a/mobility_data/importers/share_car_parking_places.py b/mobility_data/importers/share_car_parking_places.py index eb31dd927..bed241ed1 100644 --- a/mobility_data/importers/share_car_parking_places.py +++ b/mobility_data/importers/share_car_parking_places.py @@ -54,16 +54,16 @@ def __init__(self, feature): self.extra[self.RESTRICTION_FIELD][language] = restrictions[i].strip() +def get_geojson_file_name(): + file_name = get_file_name_from_data_source(CONTENT_TYPE_NAME) + if file_name: + return file_name + return f"{get_root_dir()}/mobility_data/data/{GEOJSON_FILENAME}" + + def get_car_share_parking_place_objects(geojson_file=None): car_share_parking_places = [] - file_name = None - if not geojson_file: - file_name = get_file_name_from_data_source(CONTENT_TYPE_NAME) - if not file_name: - file_name = f"{get_root_dir()}/mobility_data/data/{GEOJSON_FILENAME}" - else: - # Use the test data file - file_name = f"{get_root_dir()}/mobility_data/tests/data/{geojson_file}" + file_name = get_geojson_file_name() data_layer = GDALDataSource(file_name)[0] for feature in data_layer: diff --git a/mobility_data/importers/wfs.py b/mobility_data/importers/wfs.py index 15cf5bd25..b2b04f2b9 100644 --- a/mobility_data/importers/wfs.py +++ b/mobility_data/importers/wfs.py @@ -4,6 +4,7 @@ from django import db from django.conf import settings from django.contrib.gis.gdal import DataSource +from django.contrib.gis.gdal.error import GDALException from django.contrib.gis.geos import GEOSGeometry, MultiPolygon, Polygon from munigeo.models import Municipality @@ -67,22 +68,24 @@ def add_feature(self, feature, config): if config.get("locates_in_turku", False): if not locates_in_turku(feature, source_srid): return False - # If geometry contains multiple polygons and create_multipolygon attribute is True # create one multipolygon from the polygons. - if ( - len(feature.geom.coords) > 1 - and create_multipolygon - and isinstance(feature.geom, gdalgeometries.Polygon) - ): - polygons = [] - for coords in feature.geom.coords: - polygons.append(Polygon(coords, srid=source_srid)) - self.geometry = MultiPolygon(polygons, srid=source_srid) - else: - self.geometry = GEOSGeometry(feature.geom.wkt, srid=source_srid) - self.geometry.transform(settings.DEFAULT_SRID) - + try: + if ( + len(feature.geom.coords) > 1 + and create_multipolygon + and isinstance(feature.geom, gdalgeometries.Polygon) + ): + polygons = [] + for coords in feature.geom.coords: + polygons.append(Polygon(coords, srid=source_srid)) + self.geometry = MultiPolygon(polygons, srid=source_srid) + else: + self.geometry = GEOSGeometry(feature.geom.wkt, srid=source_srid) + self.geometry.transform(settings.DEFAULT_SRID) + except GDALException as ex: + logger.error(ex) + return False if "municipality" in config: municipality = feature[config["municipality"]].as_string() if municipality: @@ -129,8 +132,16 @@ def add_feature(self, feature, config): return True +def get_data_source(config, max_features): + wfs_url = config.get("wfs_url", settings.TURKU_WFS_URL) + url = WFS_URL.format( + wfs_url=wfs_url, wfs_layer=config["wfs_layer"], max_features=max_features + ) + ds = DataSource(url) + return ds + + def import_wfs_feature(config, data_file=None): - max_features = DEFAULT_MAX_FEATURES if "content_type_name" not in config: logger.warning(f"Skipping feature {config}, 'content_type_name' is required.") return False @@ -139,17 +150,13 @@ def import_wfs_feature(config, data_file=None): return False if "max_features" in config: max_features = config["max_features"] - wfs_layer = config["wfs_layer"] + else: + max_features = DEFAULT_MAX_FEATURES objects = [] if data_file: ds = DataSource(data_file) else: - wfs_url = config.get("wfs_url", settings.TURKU_WFS_URL) - - url = WFS_URL.format( - wfs_url=wfs_url, wfs_layer=wfs_layer, max_features=max_features - ) - ds = DataSource(url) + ds = get_data_source(config, max_features) assert len(ds) == 1 layer = ds[0] for feature in layer: @@ -157,5 +164,5 @@ def import_wfs_feature(config, data_file=None): if object.add_feature(feature, config): objects.append(object) content_type = get_or_create_content_type_from_config(config["content_type_name"]) - num_ceated, num_deleted = save_to_database(objects, content_type) - log_imported_message(logger, content_type, num_ceated, num_deleted) + num_created, num_deleted = save_to_database(objects, content_type) + log_imported_message(logger, content_type, num_created, num_deleted) diff --git a/mobility_data/management/commands/_utils.py b/mobility_data/management/commands/_utils.py deleted file mode 100644 index 9c236e3cf..000000000 --- a/mobility_data/management/commands/_utils.py +++ /dev/null @@ -1,13 +0,0 @@ -from django.conf import settings -from django.contrib.gis.gdal import DataSource - -from mobility_data.models import ContentType - - -def get_test_gdal_data_source(file_name): - """ - Returns the given file_name as a GDAL Datasource, - the file must be located in /mobility_data/tests/data/ - """ - path = f"{settings.BASE_DIR}/{ContentType._meta.app_label}/tests/data/" - return DataSource(path + file_name) diff --git a/mobility_data/management/commands/delete_deprecated_units.py b/mobility_data/management/commands/delete_deprecated_units.py index e3e8c6e9c..9cd074122 100644 --- a/mobility_data/management/commands/delete_deprecated_units.py +++ b/mobility_data/management/commands/delete_deprecated_units.py @@ -5,7 +5,7 @@ """ This command removes all units that have a ContentType or GroupType where type_name is not Null. This data is deprecated -as the only the name will be used in future. +as only the name will be used in future. """ diff --git a/mobility_data/management/commands/import_bicycle_stands.py b/mobility_data/management/commands/import_bicycle_stands.py index ca324392e..73c3c4de9 100644 --- a/mobility_data/management/commands/import_bicycle_stands.py +++ b/mobility_data/management/commands/import_bicycle_stands.py @@ -1,5 +1,7 @@ import logging +from django.core.management import BaseCommand + from mobility_data.importers.bicycle_stands import ( BICYCLE_STANDS_URL, CONTENT_TYPE_NAME, @@ -11,31 +13,13 @@ save_to_database, ) -from ._base_import_command import BaseImportCommand -from ._utils import get_test_gdal_data_source - logger = logging.getLogger("mobility_data") -class Command(BaseImportCommand): +class Command(BaseCommand): def handle(self, *args, **options): - logger.info("Importing bicycle stands.") - if options["test_mode"]: - logger.info("Running bicycle stand importer in test mode.") - file_name = options["test_mode"] - data_source = None - ds = get_test_gdal_data_source(file_name) - - if file_name.endswith("gml"): - data_source = ("gml", ds) - elif file_name.endswith("geojson"): - data_source = ("geojson", ds) - - objects = get_bicycle_stand_objects(data_source=data_source) - else: - logger.info("Fetching bicycle stands from: {}".format(BICYCLE_STANDS_URL)) - objects = get_bicycle_stand_objects() - + logger.info("Importing bicycle stands from: {}".format(BICYCLE_STANDS_URL)) + objects = get_bicycle_stand_objects() content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) - num_ceated, num_deleted = save_to_database(objects, content_type) - log_imported_message(logger, content_type, num_ceated, num_deleted) + num_created, num_deleted = save_to_database(objects, content_type) + log_imported_message(logger, content_type, num_created, num_deleted) diff --git a/mobility_data/management/commands/import_bike_service_stations.py b/mobility_data/management/commands/import_bike_service_stations.py index 9bcce1976..b63ab8b34 100644 --- a/mobility_data/management/commands/import_bike_service_stations.py +++ b/mobility_data/management/commands/import_bike_service_stations.py @@ -1,5 +1,7 @@ import logging +from django.core.management import BaseCommand + from mobility_data.importers.bike_service_stations import ( CONTENT_TYPE_NAME, get_bike_service_station_objects, @@ -10,19 +12,13 @@ save_to_database, ) -from ._base_import_command import BaseImportCommand - logger = logging.getLogger("mobility_data") -class Command(BaseImportCommand): +class Command(BaseCommand): def handle(self, *args, **options): logger.info("Importing bike service stations.") - geojson_file = None - if options["test_mode"]: - geojson_file = options["test_mode"] - - objects = get_bike_service_station_objects(geojson_file=geojson_file) + objects = get_bike_service_station_objects() content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) - num_ceated, num_deleted = save_to_database(objects, content_type) - log_imported_message(logger, content_type, num_ceated, num_deleted) + num_created, num_deleted = save_to_database(objects, content_type) + log_imported_message(logger, content_type, num_created, num_deleted) diff --git a/mobility_data/management/commands/import_charging_stations.py b/mobility_data/management/commands/import_charging_stations.py index acd22e813..7c4066cbf 100644 --- a/mobility_data/management/commands/import_charging_stations.py +++ b/mobility_data/management/commands/import_charging_stations.py @@ -1,5 +1,7 @@ import logging +from django.core.management import BaseCommand + from mobility_data.importers.charging_stations import ( CONTENT_TYPE_NAME, get_charging_station_objects, @@ -10,19 +12,13 @@ save_to_database, ) -from ._base_import_command import BaseImportCommand - logger = logging.getLogger("mobility_data") -class Command(BaseImportCommand): +class Command(BaseCommand): def handle(self, *args, **options): logger.info("Importing charging stations...") - csv_file = None - if options["test_mode"]: - logger.info("Running charging_station_importer in test mode.") - csv_file = options["test_mode"] - objects = get_charging_station_objects(csv_file=csv_file) + objects = get_charging_station_objects() content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) num_created, num_deleted = save_to_database(objects, content_type) log_imported_message(logger, content_type, num_created, num_deleted) diff --git a/mobility_data/management/commands/import_disabled_and_no_staff_parkings.py b/mobility_data/management/commands/import_disabled_and_no_staff_parkings.py index b4c21b859..2b468d35a 100644 --- a/mobility_data/management/commands/import_disabled_and_no_staff_parkings.py +++ b/mobility_data/management/commands/import_disabled_and_no_staff_parkings.py @@ -1,5 +1,7 @@ import logging +from django.core.management import BaseCommand + from mobility_data.importers.disabled_and_no_staff_parking import ( DISABLED_PARKING_CONTENT_TYPE_NAME, get_no_staff_parking_objects, @@ -11,32 +13,27 @@ save_to_database, ) -from ._base_import_command import BaseImportCommand - logger = logging.getLogger("mobility_data") -class Command(BaseImportCommand): +class Command(BaseCommand): def handle(self, *args, **options): logger.info("Importing disabled and no staff parkings.") - geojson_file = None - if options["test_mode"]: - geojson_file = options["test_mode"] ( no_stuff_parking_objects, disabled_parking_objects, - ) = get_no_staff_parking_objects(geojson_file=geojson_file) + ) = get_no_staff_parking_objects() content_type = get_or_create_content_type_from_config( NO_STAFF_PARKING_CONTENT_TYPE_NAME ) - num_ceated, num_deleted = save_to_database( + num_created, num_deleted = save_to_database( no_stuff_parking_objects, content_type ) - log_imported_message(logger, content_type, num_ceated, num_deleted) + log_imported_message(logger, content_type, num_created, num_deleted) content_type = get_or_create_content_type_from_config( DISABLED_PARKING_CONTENT_TYPE_NAME ) - num_ceated, num_deleted = save_to_database( + num_created, num_deleted = save_to_database( disabled_parking_objects, content_type ) - log_imported_message(logger, content_type, num_ceated, num_deleted) + log_imported_message(logger, content_type, num_created, num_deleted) diff --git a/mobility_data/management/commands/import_foli_parkandride_stops.py b/mobility_data/management/commands/import_foli_parkandride_stops.py index f2917c459..ba14a74e1 100644 --- a/mobility_data/management/commands/import_foli_parkandride_stops.py +++ b/mobility_data/management/commands/import_foli_parkandride_stops.py @@ -23,11 +23,11 @@ def handle(self, *args, **options): content_type = get_or_create_content_type_from_config( FOLI_PARKANDRIDE_CARS_STOP_CONTENT_TYPE_NAME ) - num_ceated, num_deleted = save_to_database(car_stops, content_type) - log_imported_message(logger, content_type, num_ceated, num_deleted) + num_created, num_deleted = save_to_database(car_stops, content_type) + log_imported_message(logger, content_type, num_created, num_deleted) content_type = get_or_create_content_type_from_config( FOLI_PARKANDRIDE_BIKES_STOP_CONTENT_TYPE_NAME ) bike_stops = get_parkandride_bike_stop_objects() - num_ceated, num_deleted = save_to_database(bike_stops, content_type) - log_imported_message(logger, content_type, num_ceated, num_deleted) + num_created, num_deleted = save_to_database(bike_stops, content_type) + log_imported_message(logger, content_type, num_created, num_deleted) diff --git a/mobility_data/management/commands/import_foli_stops.py b/mobility_data/management/commands/import_foli_stops.py index 022f08281..79fc67a5c 100644 --- a/mobility_data/management/commands/import_foli_stops.py +++ b/mobility_data/management/commands/import_foli_stops.py @@ -17,5 +17,5 @@ def handle(self, *args, **options): logger.info("Importing Föli stops") objects = get_foli_stops() content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) - num_ceated, num_deleted = save_to_database(objects, content_type) - log_imported_message(logger, content_type, num_ceated, num_deleted) + num_created, num_deleted = save_to_database(objects, content_type) + log_imported_message(logger, content_type, num_created, num_deleted) diff --git a/mobility_data/management/commands/import_loading_and_unloading_places.py b/mobility_data/management/commands/import_loading_and_unloading_places.py index 7bef1af63..06cc20f01 100644 --- a/mobility_data/management/commands/import_loading_and_unloading_places.py +++ b/mobility_data/management/commands/import_loading_and_unloading_places.py @@ -1,5 +1,7 @@ import logging +from django.core.management import BaseCommand + from mobility_data.importers.loading_unloading_places import ( CONTENT_TYPE_NAME, get_loading_and_unloading_objects, @@ -10,18 +12,13 @@ save_to_database, ) -from ._base_import_command import BaseImportCommand - logger = logging.getLogger("mobility_data") -class Command(BaseImportCommand): +class Command(BaseCommand): def handle(self, *args, **options): logger.info("Importing loading and unloading places.") - geojson_file = None - if options["test_mode"]: - geojson_file = options["test_mode"] - objects = get_loading_and_unloading_objects(geojson_file=geojson_file) + objects = get_loading_and_unloading_objects() content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) - num_ceated, num_deleted = save_to_database(objects, content_type) - log_imported_message(logger, content_type, num_ceated, num_deleted) + num_created, num_deleted = save_to_database(objects, content_type) + log_imported_message(logger, content_type, num_created, num_deleted) diff --git a/mobility_data/management/commands/import_lounaistieto_shapefiles.py b/mobility_data/management/commands/import_lounaistieto_shapefiles.py index 060b2b3f6..2a2b3c00b 100644 --- a/mobility_data/management/commands/import_lounaistieto_shapefiles.py +++ b/mobility_data/management/commands/import_lounaistieto_shapefiles.py @@ -2,19 +2,18 @@ import os import yaml +from django.core.management import BaseCommand from mobility_data.importers.lounaistieto_shapefiles import ( import_lounaistieto_data_source, ) from mobility_data.importers.utils import delete_mobile_units, get_root_dir -from ._base_import_command import BaseImportCommand - logger = logging.getLogger("mobility_data") CONFIG_FILE = "lounaistieto_shapefiles_config.yml" -class Command(BaseImportCommand): +class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( "-d", diff --git a/mobility_data/management/commands/import_marinas.py b/mobility_data/management/commands/import_marinas.py index c5bd2afd5..ab5a85e8a 100644 --- a/mobility_data/management/commands/import_marinas.py +++ b/mobility_data/management/commands/import_marinas.py @@ -1,5 +1,7 @@ import logging +from django.core.management import BaseCommand + from mobility_data.importers.marinas import ( BOAT_PARKING_CONTENT_TYPE_NAME, get_boat_parkings, @@ -14,28 +16,26 @@ save_to_database, ) -from ._base_import_command import BaseImportCommand - logger = logging.getLogger("mobility_data") -class Command(BaseImportCommand): +class Command(BaseCommand): def handle(self, *args, **options): objects = get_marinas() content_type = get_or_create_content_type_from_config(MARINA_CONTENT_TYPE_NAME) - num_ceated, num_deleted = save_to_database(objects, content_type) - log_imported_message(logger, content_type, num_ceated, num_deleted) + num_created, num_deleted = save_to_database(objects, content_type) + log_imported_message(logger, content_type, num_created, num_deleted) objects = get_boat_parkings() content_type = get_or_create_content_type_from_config( BOAT_PARKING_CONTENT_TYPE_NAME ) - num_ceated, num_deleted = save_to_database(objects, content_type) - log_imported_message(logger, content_type, num_ceated, num_deleted) + num_created, num_deleted = save_to_database(objects, content_type) + log_imported_message(logger, content_type, num_created, num_deleted) objects = get_guest_marinas() content_type = get_or_create_content_type_from_config( GUEST_MARINA_CONTENT_TYPE_NAME ) - num_ceated, num_deleted = save_to_database(objects, content_type) - log_imported_message(logger, content_type, num_ceated, num_deleted) + num_created, num_deleted = save_to_database(objects, content_type) + log_imported_message(logger, content_type, num_created, num_deleted) diff --git a/mobility_data/management/commands/import_outdoor_gym_devices.py b/mobility_data/management/commands/import_outdoor_gym_devices.py index aa1232eb2..b32eb7831 100644 --- a/mobility_data/management/commands/import_outdoor_gym_devices.py +++ b/mobility_data/management/commands/import_outdoor_gym_devices.py @@ -1,5 +1,7 @@ import logging +from django.core.management import BaseCommand + from mobility_data.importers.outdoor_gym_devices import ( CONTENT_TYPE_NAME, get_oudoor_gym_devices, @@ -10,14 +12,12 @@ save_to_database, ) -from ._base_import_command import BaseImportCommand - logger = logging.getLogger("mobility_data") -class Command(BaseImportCommand): +class Command(BaseCommand): def handle(self, *args, **options): objects = get_oudoor_gym_devices() content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) - num_ceated, num_deleted = save_to_database(objects, content_type) - log_imported_message(logger, content_type, num_ceated, num_deleted) + num_created, num_deleted = save_to_database(objects, content_type) + log_imported_message(logger, content_type, num_created, num_deleted) diff --git a/mobility_data/management/commands/import_parking_machines.py b/mobility_data/management/commands/import_parking_machines.py index 8ffadd6b7..97a032f87 100644 --- a/mobility_data/management/commands/import_parking_machines.py +++ b/mobility_data/management/commands/import_parking_machines.py @@ -19,5 +19,5 @@ class Command(BaseCommand): def handle(self, *args, **options): objects = get_parking_machine_objects() content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) - num_ceated, num_deleted = save_to_database(objects, content_type) - log_imported_message(logger, content_type, num_ceated, num_deleted) + num_created, num_deleted = save_to_database(objects, content_type) + log_imported_message(logger, content_type, num_created, num_deleted) diff --git a/mobility_data/management/commands/import_share_car_parking_places.py b/mobility_data/management/commands/import_share_car_parking_places.py index 13e1e6388..8eb3ce2d7 100644 --- a/mobility_data/management/commands/import_share_car_parking_places.py +++ b/mobility_data/management/commands/import_share_car_parking_places.py @@ -1,5 +1,7 @@ import logging +from django.core.management import BaseCommand + from mobility_data.importers.share_car_parking_places import ( CONTENT_TYPE_NAME, get_car_share_parking_place_objects, @@ -10,19 +12,13 @@ save_to_database, ) -from ._base_import_command import BaseImportCommand - logger = logging.getLogger("mobility_data") -class Command(BaseImportCommand): +class Command(BaseCommand): def handle(self, *args, **options): logger.info("Importing car share parking places.") - geojson_file = None - if options["test_mode"]: - geojson_file = options["test_mode"] - - objects = get_car_share_parking_place_objects(geojson_file=geojson_file) + objects = get_car_share_parking_place_objects() content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) - num_ceated, num_deleted = save_to_database(objects, content_type) - log_imported_message(logger, content_type, num_ceated, num_deleted) + num_created, num_deleted = save_to_database(objects, content_type) + log_imported_message(logger, content_type, num_created, num_deleted) diff --git a/mobility_data/management/commands/import_under_and_overpasses.py b/mobility_data/management/commands/import_under_and_overpasses.py index ead258163..79e0202b0 100644 --- a/mobility_data/management/commands/import_under_and_overpasses.py +++ b/mobility_data/management/commands/import_under_and_overpasses.py @@ -22,10 +22,10 @@ def handle(self, *args, **options): content_type = get_or_create_content_type_from_config( UNDERPASS_CONTENT_TYPE_NAME ) - num_ceated, num_deleted = save_to_database(underpass_objects, content_type) - log_imported_message(logger, content_type, num_ceated, num_deleted) + num_created, num_deleted = save_to_database(underpass_objects, content_type) + log_imported_message(logger, content_type, num_created, num_deleted) content_type = get_or_create_content_type_from_config( OVERPASS_CONTENT_TYPE_NAME ) - num_ceated, num_deleted = save_to_database(overpass_objects, content_type) - log_imported_message(logger, content_type, num_ceated, num_deleted) + num_created, num_deleted = save_to_database(overpass_objects, content_type) + log_imported_message(logger, content_type, num_created, num_deleted) diff --git a/mobility_data/tests/test_import_accessories.py b/mobility_data/tests/test_import_accessories.py index 0391b95f3..3e89549cc 100644 --- a/mobility_data/tests/test_import_accessories.py +++ b/mobility_data/tests/test_import_accessories.py @@ -7,28 +7,33 @@ has been removed from the test input data, as it causes GDAL DataSource to fail when loading data. """ +from unittest.mock import patch + import pytest from django.conf import settings from django.contrib.gis.geos import Point -from mobility_data.importers.wfs import DEFAULT_SOURCE_DATA_SRID +from mobility_data.importers.wfs import DEFAULT_SOURCE_DATA_SRID, import_wfs_feature +from mobility_data.management.commands.import_wfs import CONFIG_FILE, get_yaml_config from mobility_data.models import ContentType, MobileUnit -from .utils import import_command +from .utils import get_test_fixture_data_source @pytest.mark.django_db +@patch("mobility_data.importers.wfs.get_data_source") def test_import_accessories( + get_data_source_mock, administrative_division, administrative_division_type, administrative_division_geometry, ): - import_command( - "import_wfs", - ["PublicToilet", "PublicTable", "PublicBench", "PublicFurnitureGroup"], - data_file=f"{settings.BASE_DIR}/mobility_data/tests/data/accessories.gml", - ) - + config = get_yaml_config(CONFIG_FILE) + get_data_source_mock.return_value = get_test_fixture_data_source("accessories.gml") + features = ["PublicToilet", "PublicTable", "PublicBench", "PublicFurnitureGroup"] + for feature in config["features"]: + if feature["content_type_name"] in features: + import_wfs_feature(feature) public_toilet_content_type = ContentType.objects.get(type_name="PublicToilet") public_toilet_units_qs = MobileUnit.objects.filter( content_types=public_toilet_content_type diff --git a/mobility_data/tests/test_import_bicycle_stands.py b/mobility_data/tests/test_import_bicycle_stands.py index c58cc7396..bd67c8624 100644 --- a/mobility_data/tests/test_import_bicycle_stands.py +++ b/mobility_data/tests/test_import_bicycle_stands.py @@ -1,12 +1,31 @@ +from unittest.mock import patch + import pytest +from mobility_data.importers.utils import ( + delete_mobile_units, + get_or_create_content_type_from_config, + save_to_database, +) from mobility_data.models import MobileUnit -from .utils import import_command +from .utils import get_test_fixture_data_source + + +def get_geojson_data_source(file_name): + ds = get_test_fixture_data_source(file_name) + return [("geojson", ds)] + + +def get_gml_data_source(file_name): + ds = get_test_fixture_data_source(file_name) + return [("gml", ds)] @pytest.mark.django_db +@patch("mobility_data.importers.bicycle_stands.get_data_sources") def test_geojson_import( + get_data_sources_mock, municipalities, administrative_division_type, administrative_division, @@ -14,9 +33,19 @@ def test_geojson_import( streets, address, ): - import_command( - "import_bicycle_stands", test_mode="bicycle_stands_for_units.geojson" + from mobility_data.importers.bicycle_stands import ( + CONTENT_TYPE_NAME, + get_bicycle_stand_objects, ) + + get_data_sources_mock.return_value = get_geojson_data_source( + "bicycle_stands_for_units.geojson" + ) + objects = get_bicycle_stand_objects() + content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) + num_created, num_deleted = save_to_database(objects, content_type) + assert num_created == 3 + assert num_deleted == 0 assert MobileUnit.objects.all().count() == 3 kupittaan_maauimala = MobileUnit.objects.get(name="Kupittaan maauimala") assert kupittaan_maauimala @@ -35,10 +64,17 @@ def test_geojson_import( assert turun_amk.extra["hull_lockable"] is True assert turun_amk.extra["covered"] is False assert turun_amk.municipality.name == "Turku" + delete_mobile_units(content_type) + assert ( + MobileUnit.objects.filter(content_types__type_name=CONTENT_TYPE_NAME).count() + == 0 + ) @pytest.mark.django_db -def test_wfs_importer( +@patch("mobility_data.importers.bicycle_stands.get_data_sources") +def test_gml_importer( + get_data_sources_mock, municipalities, administrative_division_type, administrative_division, @@ -46,7 +82,17 @@ def test_wfs_importer( streets, address, ): - import_command("import_bicycle_stands", test_mode="bicycle_stands.gml") + from mobility_data.importers.bicycle_stands import ( + CONTENT_TYPE_NAME, + get_bicycle_stand_objects, + ) + + get_data_sources_mock.return_value = get_gml_data_source("bicycle_stands.gml") + objects = get_bicycle_stand_objects() + content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) + num_created, num_deleted = save_to_database(objects, content_type) + assert num_created == 3 + assert num_deleted == 0 assert MobileUnit.objects.all().count() == 3 # 0 in fixture xml. stand_normal = MobileUnit.objects.first() diff --git a/mobility_data/tests/test_import_bike_service_stations.py b/mobility_data/tests/test_import_bike_service_stations.py index 70fe149bd..98c21acfd 100644 --- a/mobility_data/tests/test_import_bike_service_stations.py +++ b/mobility_data/tests/test_import_bike_service_stations.py @@ -1,17 +1,33 @@ +from unittest.mock import patch + import pytest -from mobility_data.importers.bike_service_stations import CONTENT_TYPE_NAME -from mobility_data.importers.utils import get_content_type_config +from mobility_data.importers.utils import ( + get_content_type_config, + get_or_create_content_type_from_config, + save_to_database, +) from mobility_data.models import ContentType, MobileUnit -from .utils import import_command +from .utils import get_test_fixture_data_layer @pytest.mark.django_db -def test_import_bike_service_stations(): - import_command( - "import_bike_service_stations", test_mode="bike_service_stations.geojson" +@patch("mobility_data.importers.bike_service_stations.get_data_layer") +def test_import_bike_service_stations(get_data_layer_mock): + from mobility_data.importers.bike_service_stations import ( + CONTENT_TYPE_NAME, + get_bike_service_station_objects, + ) + + get_data_layer_mock.return_value = get_test_fixture_data_layer( + "bike_service_stations.geojson" ) + objects = get_bike_service_station_objects() + content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) + num_created, num_deleted = save_to_database(objects, content_type) + assert num_created == 3 + assert num_deleted == 0 assert ContentType.objects.filter(type_name=CONTENT_TYPE_NAME).count() == 1 assert ( MobileUnit.objects.filter(content_types__type_name=CONTENT_TYPE_NAME).count() @@ -41,9 +57,14 @@ def test_import_bike_service_stations(): assert roola.name_en == "Röölä" assert roola.extra["in_terrain"] == "Kyllä" # Test that dublicates are not created - import_command( - "import_bike_service_stations", test_mode="bike_service_stations.geojson" + get_data_layer_mock.return_value = get_test_fixture_data_layer( + "bike_service_stations.geojson" ) + objects = get_bike_service_station_objects() + content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) + num_created, num_deleted = save_to_database(objects, content_type) + assert num_created == 0 + assert num_deleted == 0 assert ContentType.objects.filter(type_name=CONTENT_TYPE_NAME).count() == 1 assert ( MobileUnit.objects.filter(content_types__type_name=CONTENT_TYPE_NAME).count() diff --git a/mobility_data/tests/test_import_charging_stations.py b/mobility_data/tests/test_import_charging_stations.py index 6efb617cc..8b0a99ae7 100644 --- a/mobility_data/tests/test_import_charging_stations.py +++ b/mobility_data/tests/test_import_charging_stations.py @@ -1,18 +1,22 @@ +from unittest.mock import patch + import pytest from munigeo.models import Address -from mobility_data.importers.charging_stations import ( - CHARGING_STATION_SERVICE_NAMES, - CONTENT_TYPE_NAME, +from mobility_data.importers.charging_stations import CHARGING_STATION_SERVICE_NAMES +from mobility_data.importers.utils import ( + get_content_type_config, + get_or_create_content_type_from_config, + get_root_dir, + save_to_database, ) -from mobility_data.importers.utils import get_content_type_config from mobility_data.models import ContentType, MobileUnit -from .utils import import_command - @pytest.mark.django_db +@patch("mobility_data.importers.charging_stations.get_csv_file_name") def test_import_charging_stations( + get_csv_file_name_mock, municipalities, administrative_division_type, administrative_division, @@ -20,7 +24,18 @@ def test_import_charging_stations( streets, address, ): - import_command("import_charging_stations", test_mode="charging_stations.csv") + from mobility_data.importers.charging_stations import ( + CONTENT_TYPE_NAME, + get_charging_station_objects, + ) + + file_name = f"{get_root_dir()}/mobility_data/tests/data/charging_stations.csv" + get_csv_file_name_mock.return_value = file_name + content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) + objects = get_charging_station_objects() + num_created, num_deleted = save_to_database(objects, content_type) + assert num_created == 3 + assert num_deleted == 0 assert ContentType.objects.filter(type_name=CONTENT_TYPE_NAME).count() == 1 assert ( MobileUnit.objects.filter(content_types__type_name=CONTENT_TYPE_NAME).count() @@ -64,7 +79,12 @@ def test_import_charging_stations( == f"{CHARGING_STATION_SERVICE_NAMES['en']}, Ratapihankatu 53" ) # Test that dublicates are not created - import_command("import_charging_stations", test_mode="charging_stations.csv") + get_csv_file_name_mock.return_vale = file_name + content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) + objects = get_charging_station_objects() + num_created, num_deleted = save_to_database(objects, content_type) + assert num_created == 0 + assert num_deleted == 0 assert ContentType.objects.filter(type_name=CONTENT_TYPE_NAME).count() == 1 assert ( MobileUnit.objects.filter(content_types__type_name=CONTENT_TYPE_NAME).count() diff --git a/mobility_data/tests/test_import_disabled_and_no_staff_parkings.py b/mobility_data/tests/test_import_disabled_and_no_staff_parkings.py index c56527a78..16950e664 100644 --- a/mobility_data/tests/test_import_disabled_and_no_staff_parkings.py +++ b/mobility_data/tests/test_import_disabled_and_no_staff_parkings.py @@ -1,21 +1,41 @@ +from unittest.mock import patch + import pytest from munigeo.models import Municipality -from mobility_data.importers.disabled_and_no_staff_parking import ( - DISABLED_PARKING_CONTENT_TYPE_NAME, - NO_STAFF_PARKING_CONTENT_TYPE_NAME, +from mobility_data.importers.utils import ( + get_or_create_content_type_from_config, + get_root_dir, + save_to_database, ) from mobility_data.models import MobileUnit -from .utils import import_command - @pytest.mark.django_db -def test_geojson_import(municipalities): - import_command( - "import_disabled_and_no_staff_parkings", - test_mode="autopysäköinti_eihlö.geojson", +@patch("mobility_data.importers.disabled_and_no_staff_parking.get_geojson_file_name") +def test_geojson_import(get_geojson_file_name_mock, municipalities): + from mobility_data.importers.disabled_and_no_staff_parking import ( + DISABLED_PARKING_CONTENT_TYPE_NAME, + get_no_staff_parking_objects, + NO_STAFF_PARKING_CONTENT_TYPE_NAME, + ) + + get_geojson_file_name_mock.return_value = ( + f"{get_root_dir()}/mobility_data/tests/data/autopysäköinti_eihlö.geojson" + ) + no_stuff_parking_objects, disabled_parking_objects = get_no_staff_parking_objects() + content_type = get_or_create_content_type_from_config( + NO_STAFF_PARKING_CONTENT_TYPE_NAME + ) + num_created, num_deleted = save_to_database(no_stuff_parking_objects, content_type) + assert num_created == 2 + assert num_deleted == 0 + content_type = get_or_create_content_type_from_config( + DISABLED_PARKING_CONTENT_TYPE_NAME ) + num_created, num_deleted = save_to_database(disabled_parking_objects, content_type) + assert num_created == 1 + assert num_deleted == 0 assert MobileUnit.objects.all().count() == 3 try: turku_muni = Municipality.objects.get(name="Turku") diff --git a/mobility_data/tests/test_import_gas_filling_stations.py b/mobility_data/tests/test_import_gas_filling_stations.py index 5bd913c35..71e04a91a 100644 --- a/mobility_data/tests/test_import_gas_filling_stations.py +++ b/mobility_data/tests/test_import_gas_filling_stations.py @@ -1,3 +1,5 @@ +from unittest.mock import patch + import pytest from mobility_data.importers.utils import ( @@ -10,14 +12,17 @@ @pytest.mark.django_db -def test_importer(municipalities): +@patch("mobility_data.importers.gas_filling_station.get_json_data") +def test_importer(get_json_data_mock, municipalities): from mobility_data.importers.gas_filling_station import ( CONTENT_TYPE_NAME, get_filtered_gas_filling_station_objects, ) - json_data = get_test_fixture_json_data("gas_filling_stations.json") - objects = get_filtered_gas_filling_station_objects(json_data=json_data) + get_json_data_mock.return_value = get_test_fixture_json_data( + "gas_filling_stations.json" + ) + objects = get_filtered_gas_filling_station_objects() content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) num_created, num_deleted = save_to_database(objects, content_type) # Two will be created as One item in the fixture data locates outside Southwest Finland diff --git a/mobility_data/tests/test_import_loading_and_unloading_places.py b/mobility_data/tests/test_import_loading_and_unloading_places.py index 519f8f5dc..7b17af605 100644 --- a/mobility_data/tests/test_import_loading_and_unloading_places.py +++ b/mobility_data/tests/test_import_loading_and_unloading_places.py @@ -1,19 +1,31 @@ +from unittest.mock import patch + import pytest from munigeo.models import Municipality -from mobility_data.importers.loading_unloading_places import CONTENT_TYPE_NAME +from mobility_data.importers.utils import ( + get_or_create_content_type_from_config, + get_root_dir, + save_to_database, +) from mobility_data.models import ContentType, MobileUnit -from .utils import import_command - @pytest.mark.django_db -@pytest.mark.django_db -def test_import(municipalities): - import_command( - "import_loading_and_unloading_places", - test_mode="loading_and_unloading_places.geojson", +@patch("mobility_data.importers.loading_unloading_places.get_geojson_file_name") +def test_import(get_geojson_file_name_mock, municipalities): + from mobility_data.importers.loading_unloading_places import ( + CONTENT_TYPE_NAME, + get_loading_and_unloading_objects, ) + + file_name = f"{get_root_dir()}/mobility_data/tests/data/loading_and_unloading_places.geojson" + get_geojson_file_name_mock.return_value = file_name + content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) + objects = get_loading_and_unloading_objects() + num_created, num_deleted = save_to_database(objects, content_type) + assert num_created == 3 + assert num_deleted == 0 assert ContentType.objects.all().count() == 1 assert MobileUnit.objects.all().count() == 3 turku_muni = None diff --git a/mobility_data/tests/test_import_payment_zones.py b/mobility_data/tests/test_import_payment_zones.py index c8a91adf9..5f9e7f6fc 100644 --- a/mobility_data/tests/test_import_payment_zones.py +++ b/mobility_data/tests/test_import_payment_zones.py @@ -6,23 +6,30 @@ has been removed from the test input data, as it causes GDAL DataSource to fail when loading data. """ +from unittest.mock import patch import pytest from django.conf import settings from django.contrib.gis.geos import Point, Polygon +from mobility_data.importers.wfs import import_wfs_feature +from mobility_data.management.commands.import_wfs import CONFIG_FILE, get_yaml_config from mobility_data.models import ContentType, MobileUnit -from .utils import import_command +from .utils import get_test_fixture_data_source @pytest.mark.django_db -def test_import_payment_zones(): - import_command( - "import_wfs", - "PaymentZone", - data_file=f"{settings.BASE_DIR}/mobility_data/tests/data/payment_zones.gml", +@patch("mobility_data.importers.wfs.get_data_source") +def test_import_payment_zones(get_data_source_mock): + config = get_yaml_config(CONFIG_FILE) + get_data_source_mock.return_value = get_test_fixture_data_source( + "payment_zones.gml" ) + features = ["PaymentZone"] + for feature in config["features"]: + if feature["content_type_name"] in features: + import_wfs_feature(feature) assert ContentType.objects.all().count() == 1 content_type = ContentType.objects.first() assert content_type.type_name == "PaymentZone" diff --git a/mobility_data/tests/test_import_scooter_restrictions.py b/mobility_data/tests/test_import_scooter_restrictions.py index b0ce709a7..d1a62e8ad 100644 --- a/mobility_data/tests/test_import_scooter_restrictions.py +++ b/mobility_data/tests/test_import_scooter_restrictions.py @@ -1,14 +1,17 @@ +from unittest.mock import patch + import pytest from django.conf import settings from django.contrib.gis.geos import Point -from mobility_data.importers.wfs import DEFAULT_SOURCE_DATA_SRID +from mobility_data.importers.wfs import DEFAULT_SOURCE_DATA_SRID, import_wfs_feature +from mobility_data.management.commands.import_wfs import CONFIG_FILE, get_yaml_config from mobility_data.models import ContentType, MobileUnit -from .utils import import_command +from .utils import get_test_fixture_data_source """ -Note, namespace declarations has beenremoved from the test input data, as it causes GDAL +Note, namespace declarations has been removed from the test input data, as it causes GDAL DataSource to fail when loading data. scooter_parkings.gml: xsi:schemaLocation="http://www.opengis.net/wfs @@ -31,12 +34,16 @@ @pytest.mark.django_db -def test_import_scooter_restrictions(): - import_command( - "import_wfs", - "ScooterParkingArea", - data_file=f"{settings.BASE_DIR}/mobility_data/tests/data/scooter_parkings.gml", +@patch("mobility_data.importers.wfs.get_data_source") +def test_import_scooter_restrictions(get_data_source_mock): + config = get_yaml_config(CONFIG_FILE) + get_data_source_mock.return_value = get_test_fixture_data_source( + "scooter_parkings.gml" ) + features = ["ScooterParkingArea"] + for feature in config["features"]: + if feature["content_type_name"] in features: + import_wfs_feature(feature) # Test scooter parking parking_content_type = ContentType.objects.get(type_name="ScooterParkingArea") assert parking_content_type @@ -47,11 +54,13 @@ def test_import_scooter_restrictions(): parking_unit.content_types.first() == parking_content_type point = Point(239576.42, 6711050.26, srid=DEFAULT_SOURCE_DATA_SRID) parking_unit.geometry.equals_exact(point, tolerance=0.0001) - import_command( - "import_wfs", - "ScooterSpeedLimitArea", - data_file=f"{settings.BASE_DIR}/mobility_data/tests/data/scooter_speed_limits.gml", + get_data_source_mock.return_value = get_test_fixture_data_source( + "scooter_speed_limits.gml" ) + features = ["ScooterSpeedLimitArea"] + for feature in config["features"]: + if feature["content_type_name"] in features: + import_wfs_feature(feature) # Test scooter speed limits speed_limit_content_type = ContentType.objects.get( type_name="ScooterSpeedLimitArea" @@ -67,11 +76,13 @@ def test_import_scooter_restrictions(): # Scooter speed limit unit locates in the market square(kauppator) assert speed_limit_unit.geometry.contains(market_square) is True assert speed_limit_unit.geometry.contains(turku_cathedral) is False - import_command( - "import_wfs", - "ScooterNoParkingArea", - data_file=f"{settings.BASE_DIR}/mobility_data/tests/data/scooter_no_parking_zones.gml", + get_data_source_mock.return_value = get_test_fixture_data_source( + "scooter_no_parking_zones.gml" ) + features = ["ScooterNoParkingArea"] + for feature in config["features"]: + if feature["content_type_name"] in features: + import_wfs_feature(feature) # Test scooter no parking zones no_parking_content_type = ContentType.objects.get(type_name="ScooterNoParkingArea") assert no_parking_content_type diff --git a/mobility_data/tests/test_import_share_car_parking_places.py b/mobility_data/tests/test_import_share_car_parking_places.py index 0292d3425..71fd61e5f 100644 --- a/mobility_data/tests/test_import_share_car_parking_places.py +++ b/mobility_data/tests/test_import_share_car_parking_places.py @@ -1,16 +1,31 @@ +from unittest.mock import patch + import pytest -from mobility_data.importers.share_car_parking_places import CONTENT_TYPE_NAME +from mobility_data.importers.utils import ( + get_or_create_content_type_from_config, + get_root_dir, + save_to_database, +) from mobility_data.models import ContentType, MobileUnit -from .utils import import_command - @pytest.mark.django_db -def test_import_car_share_parking_places(): - import_command( - "import_share_car_parking_places", test_mode="share_car_parking_places.geojson" +@patch("mobility_data.importers.share_car_parking_places.get_geojson_file_name") +def test_import_car_share_parking_places(get_geojson_file_name_mock): + from mobility_data.importers.share_car_parking_places import ( + CONTENT_TYPE_NAME, + get_car_share_parking_place_objects, + ) + + file_name = ( + f"{get_root_dir()}/mobility_data/tests/data/share_car_parking_places.geojson" ) + get_geojson_file_name_mock.return_value = file_name + content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) + objects = get_car_share_parking_place_objects() + num_created, num_deleted = save_to_database(objects, content_type) + assert num_created == 3 assert ContentType.objects.filter(type_name=CONTENT_TYPE_NAME).count() == 1 assert ( MobileUnit.objects.filter(content_types__type_name=CONTENT_TYPE_NAME).count() diff --git a/mobility_data/tests/utils.py b/mobility_data/tests/utils.py index 8d59f5192..aacd8d477 100644 --- a/mobility_data/tests/utils.py +++ b/mobility_data/tests/utils.py @@ -29,9 +29,21 @@ def get_test_fixture_json_data(file_name): return data -def get_test_fixture_data_layer(file_name): +def get_data_source(file_name): + """ + Returns the given file_name as a GDAL Datasource, + the file must be located in /mobility_data/tests/data/ + """ data_path = os.path.join(os.path.dirname(__file__), "data") file = os.path.join(data_path, file_name) - ds = DataSource(file) + return DataSource(file) + + +def get_test_fixture_data_layer(file_name): + ds = get_data_source(file_name) assert len(ds) == 1 return ds[0] + + +def get_test_fixture_data_source(file_name): + return get_data_source(file_name) diff --git a/pytest.ini b/pytest.ini index 9538447d3..62da235ab 100644 --- a/pytest.ini +++ b/pytest.ini @@ -2,4 +2,7 @@ DJANGO_SETTINGS_MODULE = smbackend.settings addopts = -m "not test_import_counter_data" markers = - to test counter importers run '-m test_import_counter_data' \ No newline at end of file + test_import_counter_data: mark a test for (eco) counter data, to test counter importers run 'pytest -m test_import_counter_data' + + + \ No newline at end of file diff --git a/requirements-dev.txt b/requirements-dev.txt index 90df87811..e823f0278 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -8,7 +8,7 @@ asttokens==2.0.5 # via stack-data backcall==0.2.0 # via ipython -black==21.9b0 +black==22.6.0 # via # -c requirements.txt # ipython @@ -56,30 +56,18 @@ ptyprocess==0.7.0 # via pexpect pure-eval==0.2.2 # via stack-data -pygments==2.10.0 +pygments==2.15.0 # via ipython -regex==2021.9.24 - # via - # -c requirements.txt - # black six==1.16.0 # via # -c requirements.txt # asttokens stack-data==0.2.0 # via ipython -tomli==1.2.1 - # via - # -c requirements.txt - # black traitlets==5.1.0 # via # ipython # matplotlib-inline -typing-extensions==3.10.0.2 - # via - # -c requirements.txt - # black wcwidth==0.2.5 # via # -c requirements.txt diff --git a/requirements.in b/requirements.in index 6cdaf4f58..d8abc2146 100644 --- a/requirements.in +++ b/requirements.in @@ -14,7 +14,7 @@ django-extensions psycopg2-binary<2.9 django-mptt lxml>=4.9.1 -raven~=6.10.0 +sentry-sdk>=1.14.0 pip-tools python-dateutil pytest-django @@ -42,4 +42,4 @@ numpy>=1.22 pyshp polyline drf-spectacular -xmltodict \ No newline at end of file +xmltodict diff --git a/requirements.txt b/requirements.txt index 9cbc1857e..06c388b79 100644 --- a/requirements.txt +++ b/requirements.txt @@ -29,8 +29,10 @@ celery==5.2.3 # -r requirements.in # django-celery-beat # django-celery-results -certifi==2022.12.7 - # via requests +certifi==2023.7.22 + # via + # requests + # sentry-sdk charset-normalizer==2.0.6 # via requests click==8.0.3 @@ -51,7 +53,7 @@ coverage==5.5 # via pytest-cov cron-descriptor==1.2.35 # via django-celery-beat -django==4.1.7 +django==4.1.10 # via # -r requirements.in # django-celery-beat @@ -197,15 +199,13 @@ pytz==2021.3 # celery # django-timezone-field # pandas -pyyaml==5.4.1 +pyyaml==6.0.1 # via # django-munigeo # drf-spectacular -raven==6.10.0 - # via -r requirements.in redis==4.4.4 # via -r requirements.in -requests==2.26.0 +requests==2.31.0 # via # -r requirements.in # django-munigeo @@ -215,6 +215,8 @@ requests-cache==0.8.1 # via -r requirements.in requests-mock==1.9.3 # via -r requirements.in +sentry-sdk==1.30.0 + # via -r requirements.in shapely==1.8.0 # via -r requirements.in six==1.16.0 @@ -226,7 +228,7 @@ six==1.16.0 # python-dateutil # requests-mock # url-normalize -sqlparse==0.4.2 +sqlparse==0.4.4 # via django toml==0.10.2 # via @@ -246,10 +248,11 @@ uritemplate==4.1.1 # via drf-spectacular url-normalize==1.4.3 # via requests-cache -urllib3==1.26.7 +urllib3==1.26.16 # via # requests # requests-cache + # sentry-sdk vine==5.0.0 # via # amqp diff --git a/services/api.py b/services/api.py index d37244eba..5fd368141 100644 --- a/services/api.py +++ b/services/api.py @@ -51,8 +51,6 @@ else: DEFAULT_RENDERERS = () -# This allows us to find a serializer for Haystack search results -serializers_by_model = {} all_views = [] @@ -63,14 +61,6 @@ def register_view(klass, name, basename=None): entry["basename"] = basename all_views.append(entry) - if ( - klass.serializer_class - and hasattr(klass.serializer_class, "Meta") - and hasattr(klass.serializer_class.Meta, "model") - ): - model = klass.serializer_class.Meta.model - serializers_by_model[model] = klass.serializer_class - LANGUAGES = [x[0] for x in settings.LANGUAGES] @@ -210,6 +200,32 @@ def root_service_nodes(services): ) +def resolve_divisions(divisions): + div_list = [] + for division_path in divisions: + if division_path.startswith("ocd-division"): + muni_ocd_id = division_path + else: + ocd_id_base = r"[\w0-9~_.-]+" + match_re = r"(%s)/([\w_-]+):(%s)" % (ocd_id_base, ocd_id_base) + m = re.match(match_re, division_path, re.U) + if not m: + raise ParseError("'division' must be of form 'muni/type:id'") + + arr = division_path.split("/") + muni_ocd_id = make_muni_ocd_id(arr.pop(0), "/".join(arr)) + try: + div = AdministrativeDivision.objects.select_related("geometry").get( + ocd_id=muni_ocd_id + ) + except AdministrativeDivision.DoesNotExist: + raise ParseError( + "administrative division with OCD ID '%s' not found" % muni_ocd_id + ) + div_list.append(div) + return div_list + + class JSONAPISerializer(serializers.ModelSerializer): def __init__(self, *args, **kwargs): super(JSONAPISerializer, self).__init__(*args, **kwargs) @@ -300,7 +316,13 @@ def root_service_nodes(self, obj): class Meta: model = ServiceNode - fields = "__all__" + exclude = ( + "search_column_fi", + "search_column_sv", + "search_column_en", + "syllables_fi", + "service_reference", + ) class ServiceSerializer(TranslatedModelSerializer, JSONAPISerializer): @@ -315,6 +337,16 @@ def to_representation(self, obj): total += unit_count.count ret["unit_count"]["municipality"][div_name] = unit_count.count ret["unit_count"]["total"] = total + + divisions = self.context.get("divisions", []) + include_fields = self.context.get("include", []) + if "unit_count_per_division" in include_fields and divisions: + ret["unit_count_per_division"] = {} + div_list = resolve_divisions(divisions) + for div in div_list: + ret["unit_count_per_division"][div.name] = Unit.objects.filter( + services=obj.pk, location__within=div.geometry.boundary + ).count() return ret class Meta: @@ -530,6 +562,13 @@ class ServiceViewSet(JSONAPIViewSet, viewsets.ReadOnlyModelViewSet): queryset = Service.objects.all() serializer_class = ServiceSerializer + def get_serializer_context(self): + ret = super(ServiceViewSet, self).get_serializer_context() + query_params = self.request.query_params + division = query_params.get("division", "") + ret["divisions"] = [x.strip() for x in division.split(",") if x] + return ret + def get_queryset(self): queryset = ( super(ServiceViewSet, self) @@ -720,6 +759,7 @@ class Meta: "accessibility_property_hash", "identifier_hash", "public", + "syllables_fi", "search_column_fi", "search_column_sv", "search_column_en", @@ -916,30 +956,7 @@ def validate_service_node_ids(service_node_ids): # Divisions can be specified with form: # division=helsinki/kaupunginosa:kallio,vantaa/äänestysalue:5 d_list = filters["division"].lower().split(",") - div_list = [] - for division_path in d_list: - if division_path.startswith("ocd-division"): - muni_ocd_id = division_path - else: - ocd_id_base = r"[\w0-9~_.-]+" - match_re = r"(%s)/([\w_-]+):(%s)" % (ocd_id_base, ocd_id_base) - m = re.match(match_re, division_path, re.U) - if not m: - raise ParseError("'division' must be of form 'muni/type:id'") - - arr = division_path.split("/") - muni_ocd_id = make_muni_ocd_id(arr.pop(0), "/".join(arr)) - try: - div = AdministrativeDivision.objects.select_related("geometry").get( - ocd_id=muni_ocd_id - ) - except AdministrativeDivision.DoesNotExist: - raise ParseError( - "administrative division with OCD ID '%s' not found" - % muni_ocd_id - ) - div_list.append(div) - + div_list = resolve_divisions(d_list) if div_list: mp = div_list.pop(0).geometry.boundary for div in div_list: @@ -1085,52 +1102,6 @@ def list(self, request, **kwargs): register_view(UnitViewSet, "unit") -class SearchSerializer(serializers.Serializer): - def __init__(self, *args, **kwargs): - super(SearchSerializer, self).__init__(*args, **kwargs) - self.serializer_by_model = {} - - def _strip_context(self, context, model): - if model == Unit: - key = "unit" - elif model == Service: - key = "service" - else: - key = "service_node" - for spec in ["include", "only"]: - if spec in context: - context[spec] = context[spec].get(key, []) - if "only" in context and context["only"] == []: - context.pop("only") - return context - - def get_result_serializer(self, model, instance): - ser = self.serializer_by_model.get(model) - if not ser: - ser_class = serializers_by_model[model] - assert model in serializers_by_model, "Serializer for %s not found" % model - context = self._strip_context(self.context.copy(), model) - ser = ser_class(context=context, many=False) - self.serializer_by_model[model] = ser - # TODO: another way to serialize with new data without - # costly Serializer instantiation - ser.instance = instance - if hasattr(ser, "_data"): - del ser._data - return ser - - def to_representation(self, search_result): - if not search_result or not search_result.model: - return None - model = search_result.model - serializer = self.get_result_serializer(model, search_result.object) - data = serializer.data - data["sort_index"] = search_result._sort_index - data["object_type"] = model._meta.model_name - data["score"] = search_result.score - return data - - class AccessibilityRuleView(viewsets.ViewSetMixin, generics.ListAPIView): serializer_class = None @@ -1147,13 +1118,13 @@ def list(self, request, *args, **kwargs): class AdministrativeDivisionSerializer(munigeo_api.AdministrativeDivisionSerializer): def to_representation(self, obj): ret = super(AdministrativeDivisionSerializer, self).to_representation(obj) - if "request" not in self.context: return ret query_params = self.context["request"].query_params unit_include = query_params.get("unit_include", None) service_point_id = ret["service_point_id"] + if service_point_id and unit_include: try: unit = Unit.objects.get(id=service_point_id) @@ -1167,6 +1138,19 @@ def to_representation(self, obj): ser = UnitSerializer(unit, context={"only": unit_include.split(",")}) ret["unit"] = ser.data + unit_ids = ret["units"] + if unit_ids and unit_include: + units = Unit.objects.filter(id__in=unit_ids) + if units: + units_data = [] + for unit in units: + units_data.append( + UnitSerializer( + unit, context={"only": unit_include.split(",")} + ).data + ) + ret["units"] = units_data + include_fields = query_params.get("include", []) if "centroid" in include_fields and obj.geometry: centroid = obj.geometry.boundary.centroid diff --git a/services/fixtures/exclusion_rules.json b/services/fixtures/exclusion_rules.json new file mode 100644 index 000000000..2880b98a8 --- /dev/null +++ b/services/fixtures/exclusion_rules.json @@ -0,0 +1,10 @@ +[ + { + "model": "services.exclusionrule", + "pk": 1, + "fields": { + "word": "tekojää", + "exclusion": "-nurmi" + } + } + ] \ No newline at end of file diff --git a/services/migrations/0095_exclusionrule.py b/services/migrations/0095_exclusionrule.py new file mode 100644 index 000000000..0e6b61e4a --- /dev/null +++ b/services/migrations/0095_exclusionrule.py @@ -0,0 +1,37 @@ +# Generated by Django 4.1.7 on 2023-07-20 05:59 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("services", "0094_create_syllables_fi_columns"), + ] + + operations = [ + migrations.CreateModel( + name="ExclusionRule", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("word", models.CharField(max_length=100, verbose_name="Word")), + ( + "exclusion", + models.CharField(max_length=100, verbose_name="Exclusion"), + ), + ], + options={ + "verbose_name": "Exclusion rule", + "verbose_name_plural": "Exclusion rules", + "ordering": ["-id"], + }, + ), + ] diff --git a/services/migrations/0096_alter_unitconnection_section_type.py b/services/migrations/0096_alter_unitconnection_section_type.py new file mode 100644 index 000000000..01fbf1a06 --- /dev/null +++ b/services/migrations/0096_alter_unitconnection_section_type.py @@ -0,0 +1,32 @@ +# Generated by Django 4.1.7 on 2023-07-21 07:02 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("services", "0095_exclusionrule"), + ] + + operations = [ + migrations.AlterField( + model_name="unitconnection", + name="section_type", + field=models.PositiveSmallIntegerField( + choices=[ + (1, "PHONE_OR_EMAIL"), + (2, "LINK"), + (3, "TOPICAL"), + (4, "OTHER_INFO"), + (5, "OPENING_HOURS"), + (6, "SOCIAL_MEDIA_LINK"), + (7, "OTHER_ADDRESS"), + (8, "HIGHLIGHT"), + (9, "ESERVICE_LINK"), + (10, "PRICE"), + ], + null=True, + ), + ), + ] diff --git a/services/migrations/0097_update_unitconnection_names.py b/services/migrations/0097_update_unitconnection_names.py new file mode 100644 index 000000000..652631c5f --- /dev/null +++ b/services/migrations/0097_update_unitconnection_names.py @@ -0,0 +1,33 @@ +# Generated by Django 4.1.7 on 2023-07-21 07:07 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("services", "0096_alter_unitconnection_section_type"), + ] + + operations = [ + migrations.AlterField( + model_name="unitconnection", + name="name", + field=models.CharField(max_length=2100), + ), + migrations.AlterField( + model_name="unitconnection", + name="name_en", + field=models.CharField(max_length=2100, null=True), + ), + migrations.AlterField( + model_name="unitconnection", + name="name_fi", + field=models.CharField(max_length=2100, null=True), + ), + migrations.AlterField( + model_name="unitconnection", + name="name_sv", + field=models.CharField(max_length=2100, null=True), + ), + ] diff --git a/services/migrations/0098_alter_unitconnection_section_type.py b/services/migrations/0098_alter_unitconnection_section_type.py new file mode 100644 index 000000000..e1d15fbc5 --- /dev/null +++ b/services/migrations/0098_alter_unitconnection_section_type.py @@ -0,0 +1,33 @@ +# Generated by Django 4.1.7 on 2023-07-21 07:09 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("services", "0097_update_unitconnection_names"), + ] + + operations = [ + migrations.AlterField( + model_name="unitconnection", + name="section_type", + field=models.PositiveSmallIntegerField( + choices=[ + (1, "PHONE_OR_EMAIL"), + (2, "LINK"), + (3, "TOPICAL"), + (4, "OTHER_INFO"), + (5, "OPENING_HOURS"), + (6, "SOCIAL_MEDIA_LINK"), + (7, "OTHER_ADDRESS"), + (8, "HIGHLIGHT"), + (9, "ESERVICE_LINK"), + (10, "PRICE"), + (11, "SUBGROUP"), + ], + null=True, + ), + ), + ] diff --git a/services/migrations/0099_unitconnection_tags.py b/services/migrations/0099_unitconnection_tags.py new file mode 100644 index 000000000..ec9be3874 --- /dev/null +++ b/services/migrations/0099_unitconnection_tags.py @@ -0,0 +1,24 @@ +# Generated by Django 4.1.7 on 2023-07-21 07:13 + +import django.contrib.postgres.fields +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("services", "0098_alter_unitconnection_section_type"), + ] + + operations = [ + migrations.AddField( + model_name="unitconnection", + name="tags", + field=django.contrib.postgres.fields.ArrayField( + base_field=models.CharField(max_length=200), + default=list, + null=True, + size=None, + ), + ), + ] diff --git a/services/migrations/0100_alter_unitconnection_section_type.py b/services/migrations/0100_alter_unitconnection_section_type.py new file mode 100644 index 000000000..a44163825 --- /dev/null +++ b/services/migrations/0100_alter_unitconnection_section_type.py @@ -0,0 +1,34 @@ +# Generated by Django 4.1.7 on 2023-07-21 07:18 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("services", "0099_unitconnection_tags"), + ] + + operations = [ + migrations.AlterField( + model_name="unitconnection", + name="section_type", + field=models.PositiveSmallIntegerField( + choices=[ + (1, "PHONE_OR_EMAIL"), + (2, "LINK"), + (3, "TOPICAL"), + (4, "OTHER_INFO"), + (5, "OPENING_HOURS"), + (6, "SOCIAL_MEDIA_LINK"), + (7, "OTHER_ADDRESS"), + (8, "HIGHLIGHT"), + (9, "ESERVICE_LINK"), + (10, "PRICE"), + (11, "SUBGROUP"), + (12, "OPENING_HOUR_OBJECT"), + ], + null=True, + ), + ), + ] diff --git a/services/models/__init__.py b/services/models/__init__.py index be9d191a0..57037a8e9 100644 --- a/services/models/__init__.py +++ b/services/models/__init__.py @@ -2,6 +2,7 @@ from .department import Department from .keyword import Keyword from .notification import Announcement, ErrorMessage +from .search_rule import ExclusionRule from .service import Service, UnitServiceDetails from .service_mapping import ServiceMapping from .service_node import ServiceNode diff --git a/services/models/search_rule.py b/services/models/search_rule.py new file mode 100644 index 000000000..78c9c32b9 --- /dev/null +++ b/services/models/search_rule.py @@ -0,0 +1,15 @@ +from django.db import models +from django.utils.translation import gettext_lazy as _ + + +class ExclusionRule(models.Model): + word = models.CharField(max_length=100, verbose_name=_("Word")) + exclusion = models.CharField(max_length=100, verbose_name=_("Exclusion")) + + class Meta: + ordering = ["-id"] + verbose_name = _("Exclusion rule") + verbose_name_plural = _("Exclusion rules") + + def __str__(self): + return "%s : %s" % (self.word, self.exclusion) diff --git a/services/models/unit_connection.py b/services/models/unit_connection.py index e018cde9c..c9b888a39 100644 --- a/services/models/unit_connection.py +++ b/services/models/unit_connection.py @@ -1,3 +1,4 @@ +from django.contrib.postgres.fields import ArrayField from django.db import models from .unit import Unit @@ -13,6 +14,9 @@ class UnitConnection(models.Model): OTHER_ADDRESS_TYPE = 7 HIGHLIGHT_TYPE = 8 ESERVICE_LINK_TYPE = 9 + PRICE_TYPE = 10 + SUBGROUP_TYPE = 11 + OPENING_HOUR_OBJECT = 12 SECTION_TYPES = ( (PHONE_OR_EMAIL_TYPE, "PHONE_OR_EMAIL"), @@ -24,18 +28,22 @@ class UnitConnection(models.Model): (OTHER_ADDRESS_TYPE, "OTHER_ADDRESS"), (HIGHLIGHT_TYPE, "HIGHLIGHT"), (ESERVICE_LINK_TYPE, "ESERVICE_LINK"), + (PRICE_TYPE, "PRICE"), + (SUBGROUP_TYPE, "SUBGROUP"), + (OPENING_HOUR_OBJECT, "OPENING_HOUR_OBJECT"), ) unit = models.ForeignKey( Unit, db_index=True, related_name="connections", on_delete=models.CASCADE ) - name = models.CharField(max_length=600) + name = models.CharField(max_length=2100) www = models.URLField(null=True, max_length=400) section_type = models.PositiveSmallIntegerField(choices=SECTION_TYPES, null=True) email = models.EmailField(max_length=100, null=True) phone = models.CharField(max_length=50, null=True) contact_person = models.CharField(max_length=80, null=True) order = models.PositiveSmallIntegerField(default=0) + tags = ArrayField(models.CharField(max_length=200), null=True, default=list) class Meta: ordering = ["order"] diff --git a/services/search/api.py b/services/search/api.py index 7c9f4d0d0..10408ef68 100644 --- a/services/search/api.py +++ b/services/search/api.py @@ -19,7 +19,6 @@ """ import logging import re -from distutils.util import strtobool from itertools import chain from django.db import connection, reset_queries @@ -42,6 +41,7 @@ Unit, UnitAccessibilityShortcomings, ) +from services.utils import strtobool from .constants import ( DEFAULT_MODEL_LIMIT_VALUE, @@ -54,6 +54,7 @@ from .utils import ( get_all_ids_from_sql_results, get_preserved_order, + get_search_exclusions, get_service_node_results, get_trigram_results, set_address_fields, @@ -212,6 +213,14 @@ def get(self, request): else: trigram_threshold = DEFAULT_TRIGRAM_THRESHOLD + if "use_websearch" in params: + try: + use_websearch = strtobool(params["use_websearch"]) + except ValueError: + raise ParseError("'use_websearch' needs to be a boolean") + else: + use_websearch = True + if "geometry" in params: try: show_geometry = strtobool(params["geometry"]) @@ -266,7 +275,7 @@ def get(self, request): config_language = LANGUAGES[language_short] search_query_str = None # Used in the raw sql # Build conditional query string that is used in the SQL query. - # split my "," or whitespace + # split by "," or whitespace q_vals = re.split(r",\s+|\s+", q_val) q_vals = [s.strip().replace("'", "") for s in q_vals] for q in q_vals: @@ -279,12 +288,17 @@ def get(self, request): search_query_str += f"& {q}:*" else: search_query_str = f"{q}:*" - + search_fn = "to_tsquery" + if use_websearch: + exclusions = get_search_exclusions(q) + if exclusions: + search_fn = "websearch_to_tsquery" + search_query_str += f" {exclusions}" # This is ~100 times faster than using Djangos SearchRank and allows searching using wildard "|*" # and by rankig gives better results, e.g. extra fields weight is counted. sql = f""" SELECT id, type_name, name_{language_short}, ts_rank_cd(search_column_{language_short}, search_query) - AS rank FROM search_view, to_tsquery('{config_language}','{search_query_str}') search_query + AS rank FROM search_view, {search_fn}('{config_language}','{search_query_str}') search_query WHERE search_query @@ search_column_{language_short} ORDER BY rank DESC LIMIT {sql_query_limit}; """ diff --git a/services/search/specification.swagger.yaml b/services/search/specification.swagger.yaml index c59950cba..e02142fbf 100644 --- a/services/search/specification.swagger.yaml +++ b/services/search/specification.swagger.yaml @@ -34,6 +34,16 @@ components: type: string example: fi default: fi + use_websearch_param: + name: use_websearch + in: query + schema: + type: boolean + default: true + description: > + "websearch_to_tsquery is a simplified version of to_tsquery with an alternative syntax, similar to the one used by web search engines." + If disabled, uses 'to_tsquery' function to convert the query to 'tsquery'. + If enabled, exclusion rules are used when generating the query as it support the not "-" operator. order_units_by_num_services_param: name: order_units_by_num_services in: query @@ -173,6 +183,7 @@ paths: - $ref: "#/components/parameters/q_param" - $ref: "#/components/parameters/language_param" - $ref: "#/components/parameters/use_trigram_param" + - $ref: "#/components/parameters/use_websearch_param" - $ref: "#/components/parameters/trigram_threshold_param" - $ref: "#/components/parameters/order_units_by_num_services_param" - $ref: "#/components/parameters/geometry_param" diff --git a/services/search/tests/conftest.py b/services/search/tests/conftest.py index cd4e5820b..3d7bd4cde 100644 --- a/services/search/tests/conftest.py +++ b/services/search/tests/conftest.py @@ -12,7 +12,10 @@ ) from rest_framework.test import APIClient -from services.management.commands.index_search_columns import get_search_column +from services.management.commands.index_search_columns import ( + generate_syllables, + get_search_column, +) from services.management.commands.services_import.services import ( update_service_counts, update_service_node_counts, @@ -20,6 +23,7 @@ ) from services.models import ( Department, + ExclusionRule, Service, ServiceNode, Unit, @@ -80,9 +84,32 @@ def units( ) unit.services.add(3) unit.save() + # id=4 is the "Tekonurmikentät" service + service = Service.objects.get(id=4) + unit = Unit.objects.create( + id=4, + name="Kupittaan tekonurmikentät", + service_names_fi=[service.name_fi], + last_modified_time=now(), + municipality=municipality, + ) + unit.services.add(4) + unit.save() + # id=5 is the "tekojääradat" service + service = Service.objects.get(id=5) + unit = Unit.objects.create( + id=5, + name="Parkin kenttä", + service_names_fi=[service.name_fi], + last_modified_time=now(), + municipality=municipality, + ) + unit.services.add(5) + unit.save() update_service_root_service_nodes() update_service_counts() update_service_node_counts() + generate_syllables(Unit) Unit.objects.update(search_column_fi=get_search_column(Unit, "fi")) return Unit.objects.all() @@ -101,8 +128,9 @@ def department(municipality): @pytest.mark.django_db @pytest.fixture def accessibility_shortcoming(units): + unit = Unit.objects.get(name="Biologinen museo") return UnitAccessibilityShortcomings.objects.create( - unit=units[1], accessibility_shortcoming_count={"rollator": 5, "stroller": 1} + unit=unit, accessibility_shortcoming_count={"rollator": 5, "stroller": 1} ) @@ -127,6 +155,19 @@ def services(): name_sv="Simhall", last_modified_time=now(), ) + Service.objects.create( + id=4, + name="Tekonurmikentät", + name_sv="Konstgräsplaner", + last_modified_time=now(), + ) + Service.objects.create( + id=5, + name="tekojääkentät", + name_sv="konstisbanor", + last_modified_time=now(), + ) + generate_syllables(Service) Service.objects.update(search_column_fi=get_search_column(Service, "fi")) return Service.objects.all() @@ -244,3 +285,10 @@ def streets(): Street.objects.create(id=43, name="Markulantie", municipality_id="turku") Street.objects.create(id=44, name="Yliopistonkatu", municipality_id="turku") return Street.objects.all() + + +@pytest.mark.django_db +@pytest.fixture +def exclusion_rules(): + ExclusionRule.objects.create(id=1, word="tekojää", exclusion="-nurmi") + return ExclusionRule.objects.all() diff --git a/services/search/tests/test_api.py b/services/search/tests/test_api.py index 2d78d4419..d79c0cbb2 100644 --- a/services/search/tests/test_api.py +++ b/services/search/tests/test_api.py @@ -13,6 +13,7 @@ def test_search( administrative_division, accessibility_shortcoming, municipality, + exclusion_rules, ): # Search for "museo" in entities: units,services and servicenods url = reverse("search") + "?q=museo&type=unit,service,servicenode" @@ -30,7 +31,6 @@ def test_search( assert biological_museum_unit["street_address"] == "Neitsytpolku 1" assert biological_museum_unit["municipality"] == "turku" assert biological_museum_unit["contract_type"]["id"] == "municipal_service" - assert ( biological_museum_unit["contract_type"]["description"]["fi"] == "municipal_service" @@ -133,3 +133,23 @@ def test_search( results = response.json()["results"] assert results[0]["object_type"] == "administrativedivision" assert results[0]["name"]["fi"] == "Turku" + + # Test exclusion rules used with websearch. By default (use_websearch=True) should only find Parkin kenttä + url = reverse("search") + "?q=tekojää&type=unit,service,servicenode" + response = api_client.get(url) + results = response.json()["results"] + assert len(results) == 2 + parkin_kentta = results[0] + assert parkin_kentta["object_type"] == "unit" + assert parkin_kentta["name"]["fi"] == "Parkin kenttä" + tekojaa_service = results[1] + assert tekojaa_service["object_type"] == "service" + assert tekojaa_service["name"]["fi"] == "tekojääkentät" + # Disabling use_websearch, should return both 'tekojääkentät', 'tekonurmikentät' services and their units. + # as syllable 'teko' is indexed from the compound words. + url = ( + reverse("search") + + "?q=tekojää&type=unit,service,servicenode&use_websearch=false" + ) + response = api_client.get(url) + assert len(response.json()["results"]) == 4 diff --git a/services/search/utils.py b/services/search/utils.py index 472849b18..e8611b990 100644 --- a/services/search/utils.py +++ b/services/search/utils.py @@ -2,7 +2,7 @@ from django.db import connection from django.db.models import Case, When -from services.models import ServiceNode, ServiceNodeUnitCount, Unit +from services.models import ExclusionRule, ServiceNode, ServiceNodeUnitCount, Unit from services.search.constants import ( DEFAULT_TRIGRAM_THRESHOLD, LENGTH_OF_HYPHENATED_WORDS, @@ -194,3 +194,14 @@ def get_trigram_results( ids = [row[0] for row in all_results] objs = model.objects.filter(id__in=ids) return objs + + +def get_search_exclusions(q): + """ + To add/modify search exclusion rules edit: services/fixtures/exclusion_rules + To import rules: ./manage.py loaddata services/fixtures/exclusion_rules.json + """ + rule = ExclusionRule.objects.filter(word__iexact=q).first() + if rule: + return rule.exclusion + return "" diff --git a/services/utils/__init__.py b/services/utils/__init__.py index 1cadcc62e..edd6e8b68 100644 --- a/services/utils/__init__.py +++ b/services/utils/__init__.py @@ -1,3 +1,4 @@ from .accessibility_shortcoming_calculator import AccessibilityShortcomingCalculator from .models import check_valid_concrete_field from .translator import get_translated +from .types import strtobool diff --git a/services/utils/types.py b/services/utils/types.py new file mode 100644 index 000000000..b300509ab --- /dev/null +++ b/services/utils/types.py @@ -0,0 +1,14 @@ +def strtobool(val): + """Convert a string representation of truth to true (1) or false (0). + + True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values + are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if + 'val' is anything else. + """ + val = val.lower() + if val in ("y", "yes", "t", "true", "on", "1"): + return 1 + elif val in ("n", "no", "f", "false", "off", "0"): + return 0 + else: + raise ValueError("invalid truth value %r" % (val,)) diff --git a/setup.py b/setup.py index ff0dc934c..db52eb478 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ setup( name="smbackend", - version="210929", + version="230717", license="AGPLv3", packages=find_packages(), include_package_data=True, diff --git a/smbackend/settings.py b/smbackend/settings.py index 91306fdb0..1615fb9bd 100644 --- a/smbackend/settings.py +++ b/smbackend/settings.py @@ -1,29 +1,36 @@ +import logging.config import os +from pathlib import Path import environ +import sentry_sdk from django.conf.global_settings import LANGUAGES as GLOBAL_LANGUAGES from django.core.exceptions import ImproperlyConfigured +from sentry_sdk.integrations.django import DjangoIntegration CONFIG_FILE_NAME = "config_dev.env" +GDAL_LIBRARY_PATH = os.environ.get("GDAL_LIBRARY_PATH") +GEOS_LIBRARY_PATH = os.environ.get("GEOS_LIBRARY_PATH") - -root = environ.Path(__file__) - 2 # two levels back in hierarchy +# Build paths inside the project like this: os.path.join(BASE_DIR, ...) +BASE_DIR = str(Path(__file__).resolve().parent.parent) env = environ.Env( DEBUG=(bool, False), LANGUAGES=(list, ["fi", "sv", "en"]), DATABASE_URL=(str, "postgis:///servicemap"), + SECRET_KEY=(str, "temp_key"), TRUST_X_FORWARDED_HOST=(bool, False), SECURE_PROXY_SSL_HEADER=(tuple, None), ALLOWED_HOSTS=(list, []), - SENTRY_DSN=(str, None), - SENTRY_ENVIRONMENT=(str, "development"), + SENTRY_DSN=(str, ""), + SENTRY_ENVIRONMENT=(str, ""), COOKIE_PREFIX=(str, "servicemap"), INTERNAL_IPS=(list, []), CELERY_BROKER_URL=(str, "amqp://guest:guest@localhost:5672"), - MEDIA_ROOT=(environ.Path(), root("media")), - STATIC_ROOT=(environ.Path(), root("static")), - MEDIA_URL=(str, "/media/"), + STATIC_ROOT=(str, BASE_DIR + "/static"), + MEDIA_ROOT=(str, BASE_DIR + "/media"), STATIC_URL=(str, "/static/"), + MEDIA_URL=(str, "/media/"), OPEN311_URL_BASE=(str, None), OPEN311_API_KEY=(str, None), OPEN311_INTERNAL_API_KEY=(str, None), @@ -63,10 +70,16 @@ EMAIL_PORT=(int, None), EMAIL_USE_TLS=(bool, None), TELRAAM_TOKEN=(str, None), + DJANGO_LOG_LEVEL=(str, "INFO"), + TURKU_SERVICES_IMPORT_LOG_LEVEL=(str, "INFO"), + SEARCH_LOG_LEVEL=(str, "INFO"), + IOT_LOG_LEVEL=(str, "INFO"), + ECO_COUNTER_LOG_LEVEL=(str, "INFO"), + MOBILITY_DATA_LOG_LEVEL=(str, "INFO"), + BICYCLE_NETWORK_LOG_LEVEL=(str, "INFO"), + STREET_MAINTENANCE_LOG_LEVEL=(str, "INFO"), ) -# Build paths inside the project like this: os.path.join(BASE_DIR, ...) -BASE_DIR = root() # Django environ has a nasty habit of complanining at level # WARN about env file not being preset. Here we pre-empt it. @@ -77,8 +90,17 @@ environ.Env.read_env(env_file_path) DEBUG = env("DEBUG") +SECRET_KEY = env("SECRET_KEY") TEMPLATE_DEBUG = False ALLOWED_HOSTS = env("ALLOWED_HOSTS") +DJANGO_LOG_LEVEL = env("DJANGO_LOG_LEVEL") +TURKU_SERVICES_IMPORT_LOG_LEVEL = env("TURKU_SERVICES_IMPORT_LOG_LEVEL") +SEARCH_LOG_LEVEL = env("SEARCH_LOG_LEVEL") +IOT_LOG_LEVEL = env("IOT_LOG_LEVEL") +ECO_COUNTER_LOG_LEVEL = env("ECO_COUNTER_LOG_LEVEL") +MOBILITY_DATA_LOG_LEVEL = env("MOBILITY_DATA_LOG_LEVEL") +BICYCLE_NETWORK_LOG_LEVEL = env("BICYCLE_NETWORK_LOG_LEVEL") +STREET_MAINTENANCE_LOG_LEVEL = env("STREET_MAINTENANCE_LOG_LEVEL") # Application definition INSTALLED_APPS = [ @@ -90,7 +112,6 @@ "django.contrib.staticfiles", "django.contrib.gis", "django.contrib.postgres", - "raven.contrib.django.raven_compat", "rest_framework.authtoken", "rest_framework", "corsheaders", @@ -161,7 +182,6 @@ def gettext(s): TIME_ZONE = "Europe/Helsinki" USE_I18N = True -USE_L10N = True USE_TZ = True USE_X_FORWARDED_HOST = env("TRUST_X_FORWARDED_HOST") @@ -292,16 +312,26 @@ def gettext(s): "blackhole": {"class": "logging.NullHandler"}, }, "loggers": { - "django": {"handlers": ["console"], "level": "INFO"}, - "turku_services_import": {"handlers": ["console"], "level": "DEBUG"}, - "search": {"handlers": ["console"], "level": "DEBUG"}, - "iot": {"handlers": ["console"], "level": "INFO"}, - "eco_counter": {"handlers": ["console"], "level": "INFO"}, - "mobility_data": {"handlers": ["console"], "level": "INFO"}, - "bicycle_network": {"handlers": ["console"], "level": "INFO"}, - "street_maintenance": {"handlers": ["console"], "level": "INFO"}, + "django": {"handlers": ["console"], "level": DJANGO_LOG_LEVEL}, + "turku_services_import": { + "handlers": ["console"], + "level": TURKU_SERVICES_IMPORT_LOG_LEVEL, + }, + "search": {"handlers": ["console"], "level": SEARCH_LOG_LEVEL}, + "iot": {"handlers": ["console"], "level": IOT_LOG_LEVEL}, + "eco_counter": {"handlers": ["console"], "level": ECO_COUNTER_LOG_LEVEL}, + "mobility_data": {"handlers": ["console"], "level": MOBILITY_DATA_LOG_LEVEL}, + "bicycle_network": { + "handlers": ["console"], + "level": BICYCLE_NETWORK_LOG_LEVEL, + }, + "street_maintenance": { + "handlers": ["console"], + "level": STREET_MAINTENANCE_LOG_LEVEL, + }, }, } +logging.config.dictConfig(LOGGING) # Define the endpoints for API documentation with drf-spectacular. DOC_ENDPOINTS = [ @@ -334,11 +364,6 @@ def preprocessing_filter_spec(endpoints): LOCALE_PATHS = (os.path.join(BASE_DIR, "locale"),) -SENTRY_DSN = env("SENTRY_DSN") -SENTRY_ENVIRONMENT = env("SENTRY_ENVIRONMENT") - -import raven # noqa - # Celery CELERY_BROKER_URL = env("CELERY_BROKER_URL") CELERY_RESULT_BACKEND = "django-db" @@ -366,47 +391,16 @@ def preprocessing_filter_spec(endpoints): } } - -if SENTRY_DSN: - RAVEN_CONFIG = { - "dsn": SENTRY_DSN, - # Needs to change if settings.py is not in an immediate child of the project - "release": raven.fetch_git_sha(os.path.dirname(os.pardir)), - "environment": SENTRY_ENVIRONMENT, - } - +sentry_sdk.init( + dsn=env.str("SENTRY_DSN"), + environment=env.str("SENTRY_ENVIRONMENT"), + traces_sample_rate=1.0, + send_default_pii=True, + integrations=[DjangoIntegration()], +) COOKIE_PREFIX = env("COOKIE_PREFIX") INTERNAL_IPS = env("INTERNAL_IPS") - -if "SECRET_KEY" not in locals(): - secret_file = os.path.join(BASE_DIR, ".django_secret") - try: - SECRET_KEY = open(secret_file).read().strip() - except IOError: - import random - - system_random = random.SystemRandom() - try: - SECRET_KEY = "".join( - [ - system_random.choice( - "abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)" - ) - for i in range(64) - ] - ) - secret = open(secret_file, "w") - import os - - os.chmod(secret_file, 0o0600) - secret.write(SECRET_KEY) - secret.close() - except IOError: - Exception( - "Please create a %s file with random characters to generate your secret key!" - % secret_file - ) TURKU_WFS_URL = env("TURKU_WFS_URL") PTV_ID_OFFSET = env("PTV_ID_OFFSET") GEO_SEARCH_LOCATION = env("GEO_SEARCH_LOCATION") diff --git a/smbackend_turku/importers/bicycle_stands.py b/smbackend_turku/importers/bicycle_stands.py index f53ce8bf8..08d619614 100644 --- a/smbackend_turku/importers/bicycle_stands.py +++ b/smbackend_turku/importers/bicycle_stands.py @@ -7,10 +7,9 @@ class BicycleStandImporter(BaseExternalSource): - def __init__(self, logger=None, config=None, test_data=None): + def __init__(self, logger=None, config=None): super().__init__(config) self.logger = logger - self.test_data = test_data def import_bicycle_stands(self): self.logger.info("Importing Bicycle Stands...") diff --git a/smbackend_turku/importers/bike_service_stations.py b/smbackend_turku/importers/bike_service_stations.py index 4c2bec58b..797f2fe06 100644 --- a/smbackend_turku/importers/bike_service_stations.py +++ b/smbackend_turku/importers/bike_service_stations.py @@ -7,15 +7,14 @@ class BikeServiceStationImporter(BaseExternalSource): - def __init__(self, config=None, logger=None, test_data=None): + def __init__(self, config=None, logger=None): super().__init__(config) self.logger = logger - self.test_data = test_data def import_bike_service_stations(self): self.logger.info("Importing Bike service stations...") content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) - filtered_objects = get_bike_service_station_objects(geojson_file=self.test_data) + filtered_objects = get_bike_service_station_objects() super().save_objects_as_units(filtered_objects, content_type) diff --git a/smbackend_turku/importers/stations.py b/smbackend_turku/importers/stations.py index c4dbd0fb0..eae38bdf4 100644 --- a/smbackend_turku/importers/stations.py +++ b/smbackend_turku/importers/stations.py @@ -11,31 +11,27 @@ class GasFillingStationImporter(BaseExternalSource): - def __init__(self, config=None, logger=None, test_data=None): + def __init__(self, config=None, logger=None): super().__init__(config) self.logger = logger - self.test_data = test_data def import_gas_filling_stations(self): self.logger.info("Importing gas filling stations...") content_type = get_or_create_content_type_from_config( GAS_FILLING_STATION_CONTENT_TYPE_NAME ) - filtered_objects = get_filtered_gas_filling_station_objects( - json_data=self.test_data - ) + filtered_objects = get_filtered_gas_filling_station_objects() super().save_objects_as_units(filtered_objects, content_type) class ChargingStationImporter(BaseExternalSource): - def __init__(self, logger=None, config=None, importer=None, test_data=None): + def __init__(self, logger=None, config=None, importer=None): super().__init__(config) self.logger = logger - self.test_data = test_data def import_charging_stations(self): self.logger.info("Importing charging stations...") - filtered_objects = get_charging_station_objects(csv_file=self.test_data) + filtered_objects = get_charging_station_objects() content_type = get_or_create_content_type_from_config( CHARGING_STATION_CONTENT_TYPE_NAME ) diff --git a/smbackend_turku/tests/test_bike_service_stations.py b/smbackend_turku/tests/test_bike_service_stations.py index b8dfe7fc1..3dcda5b90 100644 --- a/smbackend_turku/tests/test_bike_service_stations.py +++ b/smbackend_turku/tests/test_bike_service_stations.py @@ -1,17 +1,21 @@ import logging from datetime import datetime +from unittest.mock import patch import pytest import pytz from munigeo.models import Municipality +from mobility_data.tests.utils import get_test_fixture_data_layer from services.models import Service, ServiceNode, Unit from smbackend_turku.importers.bike_service_stations import import_bike_service_stations from smbackend_turku.importers.utils import get_external_source_config @pytest.mark.django_db +@patch("mobility_data.importers.bike_service_stations.get_data_layer") def test_bike_service_stations_import( + get_data_layer_mock, municipality, administrative_division, administrative_division_type, @@ -24,10 +28,12 @@ def test_bike_service_stations_import( ServiceNode.objects.create( id=42, name="Vapaa-aika", last_modified_time=datetime.now(utc_timezone) ) + get_data_layer_mock.return_value = get_test_fixture_data_layer( + "bike_service_stations.geojson" + ) import_bike_service_stations( logger=logger, config=config, - test_data="bike_service_stations.geojson", ) assert Unit.objects.all().count() == 3 Service.objects.all().count() == 1 diff --git a/smbackend_turku/tests/test_charging_stations.py b/smbackend_turku/tests/test_charging_stations.py index 095ec1bfe..618e83c17 100644 --- a/smbackend_turku/tests/test_charging_stations.py +++ b/smbackend_turku/tests/test_charging_stations.py @@ -1,8 +1,10 @@ import logging from datetime import datetime +from unittest.mock import patch import pytest import pytz +from django.conf import settings from services.models import Service, ServiceNode, Unit from smbackend_turku.importers.stations import import_charging_stations @@ -10,7 +12,9 @@ @pytest.mark.django_db +@patch("mobility_data.importers.charging_stations.get_csv_file_name") def test_charging_stations_import( + get_csv_file_name_mock, municipality, administrative_division, administrative_division_type, @@ -27,10 +31,11 @@ def test_charging_stations_import( ServiceNode.objects.create( id=42, name="Vapaa-aika", last_modified_time=datetime.now(utc_timezone) ) + file_name = f"{settings.BASE_DIR}/mobility_data/tests/data/charging_stations.csv" + get_csv_file_name_mock.return_value = file_name import_charging_stations( logger=logger, config=config, - test_data="charging_stations.csv", ) assert Unit.objects.all().count() == 3 Service.objects.all().count() == 1 diff --git a/smbackend_turku/tests/test_gas_filling_stations.py b/smbackend_turku/tests/test_gas_filling_stations.py index 80032a60b..bbf8303a5 100644 --- a/smbackend_turku/tests/test_gas_filling_stations.py +++ b/smbackend_turku/tests/test_gas_filling_stations.py @@ -1,5 +1,6 @@ import logging from datetime import datetime +from unittest.mock import patch import pytest import pytz @@ -12,10 +13,16 @@ @pytest.mark.django_db -def test_gas_filling_stations_import(): +@patch("mobility_data.importers.gas_filling_station.get_json_data") +def test_gas_filling_stations_import(get_json_data_mock): + logger = logging.getLogger(__name__) + # For reasons unknown this mock does not work, + # The return value of the actual function call is used. + get_json_data_mock.return_value = get_test_fixture_json_data( + "gas_filling_stations.json" + ) config = get_external_source_config("gas_filling_stations") - utc_timezone = pytz.timezone("UTC") # create root servicenode to which the imported service_node will connect root_service_node = ServiceNode.objects.create( @@ -24,17 +31,16 @@ def test_gas_filling_stations_import(): # Municipality must be created in order to update_service_node_count() # to execute without errors create_municipalities() - # Import using fixture data import_gas_filling_stations( logger=logger, config=config, - test_data=get_test_fixture_json_data("gas_filling_stations.json"), ) service = Service.objects.get(name=config["service"]["name"]["fi"]) assert service.id == config["service"]["id"] service_node = ServiceNode.objects.get(name=config["service_node"]["name"]["fi"]) assert service_node.id == config["service_node"]["id"] assert service_node.parent.id == root_service_node.id + # See line 20, commented as the mock does not work. assert Unit.objects.all().count() == 2 assert Unit.objects.all()[1].id == config["units_offset"] assert Unit.objects.get(name="Raisio Kuninkoja") diff --git a/specification.swagger.yaml b/specification.swagger.yaml index 6f3151ef8..6af3edfc1 100644 --- a/specification.swagger.yaml +++ b/specification.swagger.yaml @@ -300,7 +300,25 @@ paths: schema: type: integer example: 811 - - $ref: "#/components/parameters/include_param" + - name: include + in: query + style: form + explode: false + description: "Enable count service by division with: "include=unit_count_per_division"" + type: string + - name: division + in: query + style: form + explode: false + description: A comma-separated list of administrative divisions to be used when unit + counting by service and division. Use either full division ids or shorthands of the form + muni/type\:id + required: false + schema: + type: array + items: + type: string + example: ocd-division/country:fi/kunta:raisio - $ref: "#/components/parameters/only_param" - $ref: "#/components/parameters/geometry_param" /service/: @@ -313,7 +331,7 @@ paths: - service parameters: - $ref: "#/components/parameters/page_param" - - $ref: "#/components/parameters/pagesize_param" + - $ref: "#/components/parameters/pagesize_param" - name: id in: query style: form @@ -324,6 +342,7 @@ paths: items: type: integer example: 811,663 + responses: "200": description: List of services, paginated