From 39f6d78b192f66fcd7cf65484cb913657eb5cd75 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Wed, 14 Jun 2023 12:26:49 +0300 Subject: [PATCH 01/84] Replace deprecated distutils.util.strtobool --- bicycle_network/api/views.py | 3 +-- mobility_data/api/views.py | 2 +- mobility_data/importers/berths.py | 3 ++- services/search/api.py | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/bicycle_network/api/views.py b/bicycle_network/api/views.py index d7bb73423..f72fb002e 100644 --- a/bicycle_network/api/views.py +++ b/bicycle_network/api/views.py @@ -1,5 +1,3 @@ -from distutils.util import strtobool - from django.contrib.gis.db.models.functions import Distance from django.contrib.gis.gdal import SpatialReference from django.contrib.gis.geos import Point @@ -10,6 +8,7 @@ from rest_framework.exceptions import ParseError from services.api_pagination import Pagination +from services.utils import strtobool from ..models import BicycleNetwork, BicycleNetworkPart from .serializers import ( diff --git a/mobility_data/api/views.py b/mobility_data/api/views.py index 4e1ff1285..59660ed98 100644 --- a/mobility_data/api/views.py +++ b/mobility_data/api/views.py @@ -1,6 +1,5 @@ import logging import types -from distutils.util import strtobool from django.contrib.gis.gdal import SpatialReference from django.core.exceptions import ValidationError @@ -12,6 +11,7 @@ from rest_framework.response import Response from services.models import Unit +from services.utils import strtobool from ..models import ContentType, GroupType, MobileUnit, MobileUnitGroup from .serializers import ( diff --git a/mobility_data/importers/berths.py b/mobility_data/importers/berths.py index 9c0d40af3..d45338503 100644 --- a/mobility_data/importers/berths.py +++ b/mobility_data/importers/berths.py @@ -1,9 +1,10 @@ import csv import os -from distutils.util import strtobool from django.contrib.gis.geos import Point +from services.utils import strtobool + from .utils import FieldTypes, get_file_name_from_data_source, get_root_dir # Default name of the file, if not added to DataSource. diff --git a/services/search/api.py b/services/search/api.py index 7c9f4d0d0..d4c681290 100644 --- a/services/search/api.py +++ b/services/search/api.py @@ -19,7 +19,6 @@ """ import logging import re -from distutils.util import strtobool from itertools import chain from django.db import connection, reset_queries @@ -42,6 +41,7 @@ Unit, UnitAccessibilityShortcomings, ) +from services.utils import strtobool from .constants import ( DEFAULT_MODEL_LIMIT_VALUE, From 7c343dc552d93382700151145ff5e95bc2c0ee99 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Wed, 14 Jun 2023 12:29:35 +0300 Subject: [PATCH 02/84] Add reimplemtation of distutils.util.strtobool --- services/utils/types.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 services/utils/types.py diff --git a/services/utils/types.py b/services/utils/types.py new file mode 100644 index 000000000..b300509ab --- /dev/null +++ b/services/utils/types.py @@ -0,0 +1,14 @@ +def strtobool(val): + """Convert a string representation of truth to true (1) or false (0). + + True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values + are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if + 'val' is anything else. + """ + val = val.lower() + if val in ("y", "yes", "t", "true", "on", "1"): + return 1 + elif val in ("n", "no", "f", "false", "off", "0"): + return 0 + else: + raise ValueError("invalid truth value %r" % (val,)) From 8be4ca6b8bdfb445163f121f678a6c6bda35862e Mon Sep 17 00:00:00 2001 From: juuso-j Date: Wed, 14 Jun 2023 12:51:35 +0300 Subject: [PATCH 03/84] Import strtobool --- services/utils/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/services/utils/__init__.py b/services/utils/__init__.py index 1cadcc62e..edd6e8b68 100644 --- a/services/utils/__init__.py +++ b/services/utils/__init__.py @@ -1,3 +1,4 @@ from .accessibility_shortcoming_calculator import AccessibilityShortcomingCalculator from .models import check_valid_concrete_field from .translator import get_translated +from .types import strtobool From 4f6733347e0d30a6e596ff2028c836bc352ec04c Mon Sep 17 00:00:00 2001 From: juuso-j Date: Wed, 14 Jun 2023 14:47:32 +0300 Subject: [PATCH 04/84] Add missing test_import_counter_data marker --- pytest.ini | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pytest.ini b/pytest.ini index 9538447d3..62da235ab 100644 --- a/pytest.ini +++ b/pytest.ini @@ -2,4 +2,7 @@ DJANGO_SETTINGS_MODULE = smbackend.settings addopts = -m "not test_import_counter_data" markers = - to test counter importers run '-m test_import_counter_data' \ No newline at end of file + test_import_counter_data: mark a test for (eco) counter data, to test counter importers run 'pytest -m test_import_counter_data' + + + \ No newline at end of file From d9467d08072b3eebfde0ddc19c3df1601c311a70 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Wed, 14 Jun 2023 14:48:21 +0300 Subject: [PATCH 05/84] Change deprecated 'method_whitelist' -> 'allowed_methods' --- eco_counter/constants.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eco_counter/constants.py b/eco_counter/constants.py index 7744b4229..ab296ad4b 100644 --- a/eco_counter/constants.py +++ b/eco_counter/constants.py @@ -127,7 +127,7 @@ retry_strategy = Retry( total=10, status_forcelist=[429], - method_whitelist=["GET", "POST"], + allowed_methods=["GET", "POST"], backoff_factor=30, # 30, 60, 120 , 240, ..seconds ) adapter = HTTPAdapter(max_retries=retry_strategy) From 47fcc5e7d79c514025d35634feb81fee757e8e39 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Thu, 15 Jun 2023 14:20:44 +0300 Subject: [PATCH 06/84] ADD 'TR' choice --- eco_counter/api/views.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/eco_counter/api/views.py b/eco_counter/api/views.py index 04d3ea3b1..a910bf85a 100644 --- a/eco_counter/api/views.py +++ b/eco_counter/api/views.py @@ -62,7 +62,9 @@ def list(self, request): if counter_type in str(CSV_DATA_SOURCES): queryset = Station.objects.filter(csv_data_source=counter_type) else: - raise ParseError("Valid 'counter_type' choices are: 'EC','TC' or 'LC'.") + raise ParseError( + "Valid 'counter_type' choices are: 'EC', 'TC', 'TR' or 'LC'." + ) page = self.paginate_queryset(queryset) serializer = StationSerializer(page, many=True) return self.get_paginated_response(serializer.data) From f975289fa5bd74bead082363a0507c9df81eaa39 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Thu, 15 Jun 2023 14:21:54 +0300 Subject: [PATCH 07/84] Serialize bus data --- eco_counter/api/serializers.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/eco_counter/api/serializers.py b/eco_counter/api/serializers.py index 7b9f81713..d81362679 100644 --- a/eco_counter/api/serializers.py +++ b/eco_counter/api/serializers.py @@ -23,6 +23,9 @@ "value_jk", "value_jp", "value_jt", + "value_bk", + "value_bp", + "value_bt", ] From bc4a850696bd9841995e15f784a86986daa74251 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Thu, 22 Jun 2023 11:01:57 +0300 Subject: [PATCH 08/84] Add data_type param --- eco_counter/specification.swagger2.0.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/eco_counter/specification.swagger2.0.yaml b/eco_counter/specification.swagger2.0.yaml index 9505177f7..485d109c2 100755 --- a/eco_counter/specification.swagger2.0.yaml +++ b/eco_counter/specification.swagger2.0.yaml @@ -251,6 +251,10 @@ paths: description: "The type of the counter EC(Eco Counter), TC(Traffic Counter), LC(LAM Counter), TR(Telraam Counter)" name: counter_type type: string + - in: query + description: "The data type of the counter: A(car), B(bus), J(pedestrian) or P(bicycle). Returns stations containing data of the specifiec type." + name: data_type + type: string responses: 200: description: "List of stations." From 293c0b3dae64195d41fedf270e5f4465278108b2 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Thu, 22 Jun 2023 11:02:30 +0300 Subject: [PATCH 09/84] Add data_type filter param to Station list view --- eco_counter/api/views.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/eco_counter/api/views.py b/eco_counter/api/views.py index a910bf85a..695e6f0ec 100644 --- a/eco_counter/api/views.py +++ b/eco_counter/api/views.py @@ -65,6 +65,21 @@ def list(self, request): raise ParseError( "Valid 'counter_type' choices are: 'EC', 'TC', 'TR' or 'LC'." ) + if "data_type" in filters: + data_type = filters["data_type"].lower() + data_types = ["a", "j", "b", "p"] + if data_type not in data_types: + raise ParseError( + f"Valid 'data_type' choices are: {', '.join(data_types)}" + ) + ids = [] + data_type = data_type + "t" + for station in Station.objects.all(): + filter = {"station": station, f"value_{data_type}__gt": 0} + if YearData.objects.filter(**filter).count() > 0: + ids.append(station.id) + queryset = Station.objects.filter(id__in=ids) + page = self.paginate_queryset(queryset) serializer = StationSerializer(page, many=True) return self.get_paginated_response(serializer.data) From 50500b3be6b07dd6d634513499056c37b78916b5 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Thu, 22 Jun 2023 11:17:52 +0300 Subject: [PATCH 10/84] Add data_type param tests --- eco_counter/tests/test_api.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/eco_counter/tests/test_api.py b/eco_counter/tests/test_api.py index f10ee7f9c..8f0afc7bb 100644 --- a/eco_counter/tests/test_api.py +++ b/eco_counter/tests/test_api.py @@ -271,3 +271,10 @@ def test__station(api_client, stations, year_datas): assert response.status_code == 200 assert response.json()["results"][0]["name"] == TEST_EC_STATION_NAME assert response.json()["results"][0]["sensor_types"] == ["at"] + # Test retrieving station by data type + url = reverse("eco_counter:stations-list") + "?data_type=a" + response = api_client.get(url) + assert response.json()["count"] == 1 + url = reverse("eco_counter:stations-list") + "?data_type=p" + response = api_client.get(url) + assert response.json()["count"] == 0 From 6ab422f7333c88280bb7e17b5f93d2575dbd35f1 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Thu, 22 Jun 2023 11:22:36 +0300 Subject: [PATCH 11/84] Fix typo --- eco_counter/specification.swagger2.0.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eco_counter/specification.swagger2.0.yaml b/eco_counter/specification.swagger2.0.yaml index 485d109c2..8302a6194 100755 --- a/eco_counter/specification.swagger2.0.yaml +++ b/eco_counter/specification.swagger2.0.yaml @@ -252,7 +252,7 @@ paths: name: counter_type type: string - in: query - description: "The data type of the counter: A(car), B(bus), J(pedestrian) or P(bicycle). Returns stations containing data of the specifiec type." + description: "The data type of the counter: A(car), B(bus), J(pedestrian) or P(bicycle). Returns stations containing data of the specified type." name: data_type type: string responses: From 1cdcc7970488210f1712e3081cc082dd78d45b53 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Mon, 17 Jul 2023 14:48:16 +0300 Subject: [PATCH 12/84] Remove obsolete elasticsearch related code --- services/api.py | 56 ------------------------------------------------- 1 file changed, 56 deletions(-) diff --git a/services/api.py b/services/api.py index d37244eba..4c232c751 100644 --- a/services/api.py +++ b/services/api.py @@ -51,8 +51,6 @@ else: DEFAULT_RENDERERS = () -# This allows us to find a serializer for Haystack search results -serializers_by_model = {} all_views = [] @@ -63,14 +61,6 @@ def register_view(klass, name, basename=None): entry["basename"] = basename all_views.append(entry) - if ( - klass.serializer_class - and hasattr(klass.serializer_class, "Meta") - and hasattr(klass.serializer_class.Meta, "model") - ): - model = klass.serializer_class.Meta.model - serializers_by_model[model] = klass.serializer_class - LANGUAGES = [x[0] for x in settings.LANGUAGES] @@ -1085,52 +1075,6 @@ def list(self, request, **kwargs): register_view(UnitViewSet, "unit") -class SearchSerializer(serializers.Serializer): - def __init__(self, *args, **kwargs): - super(SearchSerializer, self).__init__(*args, **kwargs) - self.serializer_by_model = {} - - def _strip_context(self, context, model): - if model == Unit: - key = "unit" - elif model == Service: - key = "service" - else: - key = "service_node" - for spec in ["include", "only"]: - if spec in context: - context[spec] = context[spec].get(key, []) - if "only" in context and context["only"] == []: - context.pop("only") - return context - - def get_result_serializer(self, model, instance): - ser = self.serializer_by_model.get(model) - if not ser: - ser_class = serializers_by_model[model] - assert model in serializers_by_model, "Serializer for %s not found" % model - context = self._strip_context(self.context.copy(), model) - ser = ser_class(context=context, many=False) - self.serializer_by_model[model] = ser - # TODO: another way to serialize with new data without - # costly Serializer instantiation - ser.instance = instance - if hasattr(ser, "_data"): - del ser._data - return ser - - def to_representation(self, search_result): - if not search_result or not search_result.model: - return None - model = search_result.model - serializer = self.get_result_serializer(model, search_result.object) - data = serializer.data - data["sort_index"] = search_result._sort_index - data["object_type"] = model._meta.model_name - data["score"] = search_result.score - return data - - class AccessibilityRuleView(viewsets.ViewSetMixin, generics.ListAPIView): serializer_class = None From c7476a8a8681328ddcf9bdd0110f2020cdd14dae Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 Jul 2023 11:57:34 +0000 Subject: [PATCH 13/84] Bump django from 4.1.7 to 4.1.10 Bumps [django](https://github.com/django/django) from 4.1.7 to 4.1.10. - [Commits](https://github.com/django/django/compare/4.1.7...4.1.10) --- updated-dependencies: - dependency-name: django dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- requirements.txt | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/requirements.txt b/requirements.txt index 9cbc1857e..bd9e47f0f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -51,7 +51,7 @@ coverage==5.5 # via pytest-cov cron-descriptor==1.2.35 # via django-celery-beat -django==4.1.7 +django==4.1.10 # via # -r requirements.in # django-celery-beat @@ -233,9 +233,7 @@ toml==0.10.2 # pytest # pytest-cov tomli==1.2.1 - # via - # black - # pep517 + # via pep517 tqdm==4.62.3 # via -r requirements.in tzdata==2022.1 From 86035401059035aed78a7d18b11b458232fe5206 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 Jul 2023 12:06:50 +0000 Subject: [PATCH 14/84] Bump requests from 2.26.0 to 2.31.0 Bumps [requests](https://github.com/psf/requests) from 2.26.0 to 2.31.0. - [Release notes](https://github.com/psf/requests/releases) - [Changelog](https://github.com/psf/requests/blob/main/HISTORY.md) - [Commits](https://github.com/psf/requests/compare/v2.26.0...v2.31.0) --- updated-dependencies: - dependency-name: requests dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index bd9e47f0f..3eba795be 100644 --- a/requirements.txt +++ b/requirements.txt @@ -205,7 +205,7 @@ raven==6.10.0 # via -r requirements.in redis==4.4.4 # via -r requirements.in -requests==2.26.0 +requests==2.31.0 # via # -r requirements.in # django-munigeo From c1aed39216ce5025ec4efbaeeb9e6b591dfceaf5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 Jul 2023 12:10:37 +0000 Subject: [PATCH 15/84] Bump sqlparse from 0.4.2 to 0.4.4 Bumps [sqlparse](https://github.com/andialbrecht/sqlparse) from 0.4.2 to 0.4.4. - [Changelog](https://github.com/andialbrecht/sqlparse/blob/master/CHANGELOG) - [Commits](https://github.com/andialbrecht/sqlparse/compare/0.4.2...0.4.4) --- updated-dependencies: - dependency-name: sqlparse dependency-type: indirect ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index bd9e47f0f..1290480a3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -226,7 +226,7 @@ six==1.16.0 # python-dateutil # requests-mock # url-normalize -sqlparse==0.4.2 +sqlparse==0.4.4 # via django toml==0.10.2 # via From 6c891074cb33d033b06e4cd39d6f65cf87eb9c1b Mon Sep 17 00:00:00 2001 From: juuso-j Date: Tue, 18 Jul 2023 10:18:54 +0300 Subject: [PATCH 16/84] Add support for unit_include with multiple units --- services/api.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/services/api.py b/services/api.py index d37244eba..244d7d3eb 100644 --- a/services/api.py +++ b/services/api.py @@ -1147,13 +1147,13 @@ def list(self, request, *args, **kwargs): class AdministrativeDivisionSerializer(munigeo_api.AdministrativeDivisionSerializer): def to_representation(self, obj): ret = super(AdministrativeDivisionSerializer, self).to_representation(obj) - if "request" not in self.context: return ret query_params = self.context["request"].query_params unit_include = query_params.get("unit_include", None) service_point_id = ret["service_point_id"] + if service_point_id and unit_include: try: unit = Unit.objects.get(id=service_point_id) @@ -1167,6 +1167,19 @@ def to_representation(self, obj): ser = UnitSerializer(unit, context={"only": unit_include.split(",")}) ret["unit"] = ser.data + unit_ids = ret["units"] + if unit_ids and unit_include: + units = Unit.objects.filter(id__in=unit_ids) + if units: + units_data = [] + for unit in units: + units_data.append( + UnitSerializer( + unit, context={"only": unit_include.split(",")} + ).data + ) + ret["units"] = units_data + include_fields = query_params.get("include", []) if "centroid" in include_fields and obj.geometry: centroid = obj.geometry.boundary.centroid From 1c3135e0e62544733039b494f751fc4f9cbef4c9 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Tue, 18 Jul 2023 14:48:25 +0300 Subject: [PATCH 17/84] Count service units by division --- services/api.py | 77 +++++++++++++++++++++++++++++++++---------------- 1 file changed, 52 insertions(+), 25 deletions(-) diff --git a/services/api.py b/services/api.py index d37244eba..4148ad780 100644 --- a/services/api.py +++ b/services/api.py @@ -210,6 +210,32 @@ def root_service_nodes(services): ) +def resolve_divisions(divisions): + div_list = [] + for division_path in divisions: + if division_path.startswith("ocd-division"): + muni_ocd_id = division_path + else: + ocd_id_base = r"[\w0-9~_.-]+" + match_re = r"(%s)/([\w_-]+):(%s)" % (ocd_id_base, ocd_id_base) + m = re.match(match_re, division_path, re.U) + if not m: + raise ParseError("'division' must be of form 'muni/type:id'") + + arr = division_path.split("/") + muni_ocd_id = make_muni_ocd_id(arr.pop(0), "/".join(arr)) + try: + div = AdministrativeDivision.objects.select_related("geometry").get( + ocd_id=muni_ocd_id + ) + except AdministrativeDivision.DoesNotExist: + raise ParseError( + "administrative division with OCD ID '%s' not found" % muni_ocd_id + ) + div_list.append(div) + return div_list + + class JSONAPISerializer(serializers.ModelSerializer): def __init__(self, *args, **kwargs): super(JSONAPISerializer, self).__init__(*args, **kwargs) @@ -300,7 +326,13 @@ def root_service_nodes(self, obj): class Meta: model = ServiceNode - fields = "__all__" + exclude = ( + "search_column_fi", + "search_column_sv", + "search_column_en", + "syllables_fi", + "service_reference", + ) class ServiceSerializer(TranslatedModelSerializer, JSONAPISerializer): @@ -315,6 +347,16 @@ def to_representation(self, obj): total += unit_count.count ret["unit_count"]["municipality"][div_name] = unit_count.count ret["unit_count"]["total"] = total + + divisions = self.context.get("divisions", []) + include_fields = self.context.get("include", []) + if "unit_count_per_division" in include_fields and divisions: + ret["unit_count_per_division"] = {} + div_list = resolve_divisions(divisions) + for div in div_list: + ret["unit_count_per_division"][div.name] = Unit.objects.filter( + services=obj.pk, location__within=div.geometry.boundary + ).count() return ret class Meta: @@ -530,6 +572,13 @@ class ServiceViewSet(JSONAPIViewSet, viewsets.ReadOnlyModelViewSet): queryset = Service.objects.all() serializer_class = ServiceSerializer + def get_serializer_context(self): + ret = super(ServiceViewSet, self).get_serializer_context() + query_params = self.request.query_params + division = query_params.get("division", "") + ret["divisions"] = [x.strip() for x in division.split(",") if x] + return ret + def get_queryset(self): queryset = ( super(ServiceViewSet, self) @@ -720,6 +769,7 @@ class Meta: "accessibility_property_hash", "identifier_hash", "public", + "syllables_fi", "search_column_fi", "search_column_sv", "search_column_en", @@ -916,30 +966,7 @@ def validate_service_node_ids(service_node_ids): # Divisions can be specified with form: # division=helsinki/kaupunginosa:kallio,vantaa/äänestysalue:5 d_list = filters["division"].lower().split(",") - div_list = [] - for division_path in d_list: - if division_path.startswith("ocd-division"): - muni_ocd_id = division_path - else: - ocd_id_base = r"[\w0-9~_.-]+" - match_re = r"(%s)/([\w_-]+):(%s)" % (ocd_id_base, ocd_id_base) - m = re.match(match_re, division_path, re.U) - if not m: - raise ParseError("'division' must be of form 'muni/type:id'") - - arr = division_path.split("/") - muni_ocd_id = make_muni_ocd_id(arr.pop(0), "/".join(arr)) - try: - div = AdministrativeDivision.objects.select_related("geometry").get( - ocd_id=muni_ocd_id - ) - except AdministrativeDivision.DoesNotExist: - raise ParseError( - "administrative division with OCD ID '%s' not found" - % muni_ocd_id - ) - div_list.append(div) - + div_list = resolve_divisions(d_list) if div_list: mp = div_list.pop(0).geometry.boundary for div in div_list: From b2c0e55eaa1f0ca4eacc97f6fe3f5b51aa004b39 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Tue, 18 Jul 2023 15:04:32 +0300 Subject: [PATCH 18/84] Add include and division params to /service endpoint --- specification.swagger.yaml | 23 +++++++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/specification.swagger.yaml b/specification.swagger.yaml index 6f3151ef8..6af3edfc1 100644 --- a/specification.swagger.yaml +++ b/specification.swagger.yaml @@ -300,7 +300,25 @@ paths: schema: type: integer example: 811 - - $ref: "#/components/parameters/include_param" + - name: include + in: query + style: form + explode: false + description: "Enable count service by division with: "include=unit_count_per_division"" + type: string + - name: division + in: query + style: form + explode: false + description: A comma-separated list of administrative divisions to be used when unit + counting by service and division. Use either full division ids or shorthands of the form + muni/type\:id + required: false + schema: + type: array + items: + type: string + example: ocd-division/country:fi/kunta:raisio - $ref: "#/components/parameters/only_param" - $ref: "#/components/parameters/geometry_param" /service/: @@ -313,7 +331,7 @@ paths: - service parameters: - $ref: "#/components/parameters/page_param" - - $ref: "#/components/parameters/pagesize_param" + - $ref: "#/components/parameters/pagesize_param" - name: id in: query style: form @@ -324,6 +342,7 @@ paths: items: type: integer example: 811,663 + responses: "200": description: List of services, paginated From 584a4367089d1f6bf40c768dfa98b569f56c56cc Mon Sep 17 00:00:00 2001 From: Juuso Jokiniemi <68938778+juuso-j@users.noreply.github.com> Date: Tue, 18 Jul 2023 15:36:11 +0300 Subject: [PATCH 19/84] Change action to v3 --- .github/workflows/run-tests.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index c72649628..feeb729fd 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -19,9 +19,9 @@ jobs: LAM_COUNTER_API_BASE_URL: https://tie.digitraffic.fi/api/tms/v1/history steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python 3.10.0 - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: 3.10.0 - name: Install required Ubuntu packages @@ -50,7 +50,7 @@ jobs: pip install coverage coverage report -m - name: Upload Coverage to Codecov - uses: codecov/codecov-action@v2 + uses: codecov/codecov-action@v3 # Majority of the tests require database services: # Label used to access the service container From 63c81b763e6809f89b1bbd41dbeff7bf2936e499 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Wed, 19 Jul 2023 11:09:10 +0300 Subject: [PATCH 20/84] Change v2 to v3 --- .github/workflows/run-tests.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index c72649628..feeb729fd 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -19,9 +19,9 @@ jobs: LAM_COUNTER_API_BASE_URL: https://tie.digitraffic.fi/api/tms/v1/history steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python 3.10.0 - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: 3.10.0 - name: Install required Ubuntu packages @@ -50,7 +50,7 @@ jobs: pip install coverage coverage report -m - name: Upload Coverage to Codecov - uses: codecov/codecov-action@v2 + uses: codecov/codecov-action@v3 # Majority of the tests require database services: # Label used to access the service container From 5aebd577c09c53277608d662e93b3eb79a2407f9 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Wed, 19 Jul 2023 11:09:49 +0300 Subject: [PATCH 21/84] Change pyyaml version to 5.3.1 See: https://github.com/yaml/pyyaml/issues/724 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 9cbc1857e..cd8a7a45b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -197,7 +197,7 @@ pytz==2021.3 # celery # django-timezone-field # pandas -pyyaml==5.4.1 +pyyaml==5.3.1 # via # django-munigeo # drf-spectacular From f5bf3ac8e7bb8cbb49c56711ef730ed6e6dc6f99 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Thu, 20 Jul 2023 14:14:11 +0300 Subject: [PATCH 22/84] Add info of importing exclude rules --- README.md | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index e86ba9c9c..91f269449 100644 --- a/README.md +++ b/README.md @@ -141,8 +141,12 @@ For Turku specific imports see smbackend_turku/README.md. ./manage.py geo_import helsinki --divisions ./manage.py index_search_columns ``` - +Import exclude rules fixtures used by the search: +``` +./manage.py loaddata services/fixtures/exclusion_rules.json +``` 7. Redis + Redis is used for caching and as a message broker for Celery. Install Redis. Ubuntu: `sudo apt-get install redis-server` @@ -198,3 +202,6 @@ psql template1 -c 'CREATE EXTENSION IF NOT EXISTS pg_trgm;' Mobility platform ----------------- The mobility data platform of the service map is being developed as part of European Union Horizon 2020 programme funded SCALE-UP project (grant agreement no. 955332). + +For more information see: mobility_data/README.mk + From 12fa6fd2b19dde0419021820f60e6e19022bc24a Mon Sep 17 00:00:00 2001 From: juuso-j Date: Thu, 20 Jul 2023 14:15:22 +0300 Subject: [PATCH 23/84] Add initial exclusion rules --- services/fixtures/exclusion_rules.json | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 services/fixtures/exclusion_rules.json diff --git a/services/fixtures/exclusion_rules.json b/services/fixtures/exclusion_rules.json new file mode 100644 index 000000000..2880b98a8 --- /dev/null +++ b/services/fixtures/exclusion_rules.json @@ -0,0 +1,10 @@ +[ + { + "model": "services.exclusionrule", + "pk": 1, + "fields": { + "word": "tekojää", + "exclusion": "-nurmi" + } + } + ] \ No newline at end of file From 5fe61376406fcaa6ad17cdca25f0b5d262a8862f Mon Sep 17 00:00:00 2001 From: juuso-j Date: Thu, 20 Jul 2023 14:20:06 +0300 Subject: [PATCH 24/84] Add model ExclusionRule --- services/migrations/0095_exclusionrule.py | 37 +++++++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 services/migrations/0095_exclusionrule.py diff --git a/services/migrations/0095_exclusionrule.py b/services/migrations/0095_exclusionrule.py new file mode 100644 index 000000000..0e6b61e4a --- /dev/null +++ b/services/migrations/0095_exclusionrule.py @@ -0,0 +1,37 @@ +# Generated by Django 4.1.7 on 2023-07-20 05:59 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("services", "0094_create_syllables_fi_columns"), + ] + + operations = [ + migrations.CreateModel( + name="ExclusionRule", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("word", models.CharField(max_length=100, verbose_name="Word")), + ( + "exclusion", + models.CharField(max_length=100, verbose_name="Exclusion"), + ), + ], + options={ + "verbose_name": "Exclusion rule", + "verbose_name_plural": "Exclusion rules", + "ordering": ["-id"], + }, + ), + ] From bf044acfe4e2b45ce18ada48de3202982dcbd29d Mon Sep 17 00:00:00 2001 From: juuso-j Date: Thu, 20 Jul 2023 14:20:46 +0300 Subject: [PATCH 25/84] Import ExclusionRule --- services/models/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/services/models/__init__.py b/services/models/__init__.py index be9d191a0..57037a8e9 100644 --- a/services/models/__init__.py +++ b/services/models/__init__.py @@ -2,6 +2,7 @@ from .department import Department from .keyword import Keyword from .notification import Announcement, ErrorMessage +from .search_rule import ExclusionRule from .service import Service, UnitServiceDetails from .service_mapping import ServiceMapping from .service_node import ServiceNode From 848d52fb1cc8d23801581287d538bb4e2f2a6443 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Thu, 20 Jul 2023 14:21:27 +0300 Subject: [PATCH 26/84] Add model ExclusionRule --- services/models/search_rule.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 services/models/search_rule.py diff --git a/services/models/search_rule.py b/services/models/search_rule.py new file mode 100644 index 000000000..78c9c32b9 --- /dev/null +++ b/services/models/search_rule.py @@ -0,0 +1,15 @@ +from django.db import models +from django.utils.translation import gettext_lazy as _ + + +class ExclusionRule(models.Model): + word = models.CharField(max_length=100, verbose_name=_("Word")) + exclusion = models.CharField(max_length=100, verbose_name=_("Exclusion")) + + class Meta: + ordering = ["-id"] + verbose_name = _("Exclusion rule") + verbose_name_plural = _("Exclusion rules") + + def __str__(self): + return "%s : %s" % (self.word, self.exclusion) From a40e0e70f5202519464d8945ab7bf375350c12a3 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Thu, 20 Jul 2023 14:22:38 +0300 Subject: [PATCH 27/84] Add websearch and exclusion rules --- services/search/api.py | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/services/search/api.py b/services/search/api.py index d4c681290..10408ef68 100644 --- a/services/search/api.py +++ b/services/search/api.py @@ -54,6 +54,7 @@ from .utils import ( get_all_ids_from_sql_results, get_preserved_order, + get_search_exclusions, get_service_node_results, get_trigram_results, set_address_fields, @@ -212,6 +213,14 @@ def get(self, request): else: trigram_threshold = DEFAULT_TRIGRAM_THRESHOLD + if "use_websearch" in params: + try: + use_websearch = strtobool(params["use_websearch"]) + except ValueError: + raise ParseError("'use_websearch' needs to be a boolean") + else: + use_websearch = True + if "geometry" in params: try: show_geometry = strtobool(params["geometry"]) @@ -266,7 +275,7 @@ def get(self, request): config_language = LANGUAGES[language_short] search_query_str = None # Used in the raw sql # Build conditional query string that is used in the SQL query. - # split my "," or whitespace + # split by "," or whitespace q_vals = re.split(r",\s+|\s+", q_val) q_vals = [s.strip().replace("'", "") for s in q_vals] for q in q_vals: @@ -279,12 +288,17 @@ def get(self, request): search_query_str += f"& {q}:*" else: search_query_str = f"{q}:*" - + search_fn = "to_tsquery" + if use_websearch: + exclusions = get_search_exclusions(q) + if exclusions: + search_fn = "websearch_to_tsquery" + search_query_str += f" {exclusions}" # This is ~100 times faster than using Djangos SearchRank and allows searching using wildard "|*" # and by rankig gives better results, e.g. extra fields weight is counted. sql = f""" SELECT id, type_name, name_{language_short}, ts_rank_cd(search_column_{language_short}, search_query) - AS rank FROM search_view, to_tsquery('{config_language}','{search_query_str}') search_query + AS rank FROM search_view, {search_fn}('{config_language}','{search_query_str}') search_query WHERE search_query @@ search_column_{language_short} ORDER BY rank DESC LIMIT {sql_query_limit}; """ From bf98d3d5f2a3ee15c4c249b1db0479bfb4897d5b Mon Sep 17 00:00:00 2001 From: juuso-j Date: Thu, 20 Jul 2023 14:24:07 +0300 Subject: [PATCH 28/84] Add information about use_websearch param --- services/search/specification.swagger.yaml | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/services/search/specification.swagger.yaml b/services/search/specification.swagger.yaml index c59950cba..e02142fbf 100644 --- a/services/search/specification.swagger.yaml +++ b/services/search/specification.swagger.yaml @@ -34,6 +34,16 @@ components: type: string example: fi default: fi + use_websearch_param: + name: use_websearch + in: query + schema: + type: boolean + default: true + description: > + "websearch_to_tsquery is a simplified version of to_tsquery with an alternative syntax, similar to the one used by web search engines." + If disabled, uses 'to_tsquery' function to convert the query to 'tsquery'. + If enabled, exclusion rules are used when generating the query as it support the not "-" operator. order_units_by_num_services_param: name: order_units_by_num_services in: query @@ -173,6 +183,7 @@ paths: - $ref: "#/components/parameters/q_param" - $ref: "#/components/parameters/language_param" - $ref: "#/components/parameters/use_trigram_param" + - $ref: "#/components/parameters/use_websearch_param" - $ref: "#/components/parameters/trigram_threshold_param" - $ref: "#/components/parameters/order_units_by_num_services_param" - $ref: "#/components/parameters/geometry_param" From dcd3f38e45905e538d325b2b8a421ad0085ceb41 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Thu, 20 Jul 2023 14:27:20 +0300 Subject: [PATCH 29/84] Add get_search_exclusions function --- services/search/utils.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/services/search/utils.py b/services/search/utils.py index 472849b18..e8611b990 100644 --- a/services/search/utils.py +++ b/services/search/utils.py @@ -2,7 +2,7 @@ from django.db import connection from django.db.models import Case, When -from services.models import ServiceNode, ServiceNodeUnitCount, Unit +from services.models import ExclusionRule, ServiceNode, ServiceNodeUnitCount, Unit from services.search.constants import ( DEFAULT_TRIGRAM_THRESHOLD, LENGTH_OF_HYPHENATED_WORDS, @@ -194,3 +194,14 @@ def get_trigram_results( ids = [row[0] for row in all_results] objs = model.objects.filter(id__in=ids) return objs + + +def get_search_exclusions(q): + """ + To add/modify search exclusion rules edit: services/fixtures/exclusion_rules + To import rules: ./manage.py loaddata services/fixtures/exclusion_rules.json + """ + rule = ExclusionRule.objects.filter(word__iexact=q).first() + if rule: + return rule.exclusion + return "" From 94dd7a2f6ba86abacb1d101ed6b76718c1b23aa5 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Thu, 20 Jul 2023 14:32:43 +0300 Subject: [PATCH 30/84] Add fixtures for testing exclusion rules --- services/search/tests/conftest.py | 52 +++++++++++++++++++++++++++++-- 1 file changed, 50 insertions(+), 2 deletions(-) diff --git a/services/search/tests/conftest.py b/services/search/tests/conftest.py index cd4e5820b..3d7bd4cde 100644 --- a/services/search/tests/conftest.py +++ b/services/search/tests/conftest.py @@ -12,7 +12,10 @@ ) from rest_framework.test import APIClient -from services.management.commands.index_search_columns import get_search_column +from services.management.commands.index_search_columns import ( + generate_syllables, + get_search_column, +) from services.management.commands.services_import.services import ( update_service_counts, update_service_node_counts, @@ -20,6 +23,7 @@ ) from services.models import ( Department, + ExclusionRule, Service, ServiceNode, Unit, @@ -80,9 +84,32 @@ def units( ) unit.services.add(3) unit.save() + # id=4 is the "Tekonurmikentät" service + service = Service.objects.get(id=4) + unit = Unit.objects.create( + id=4, + name="Kupittaan tekonurmikentät", + service_names_fi=[service.name_fi], + last_modified_time=now(), + municipality=municipality, + ) + unit.services.add(4) + unit.save() + # id=5 is the "tekojääradat" service + service = Service.objects.get(id=5) + unit = Unit.objects.create( + id=5, + name="Parkin kenttä", + service_names_fi=[service.name_fi], + last_modified_time=now(), + municipality=municipality, + ) + unit.services.add(5) + unit.save() update_service_root_service_nodes() update_service_counts() update_service_node_counts() + generate_syllables(Unit) Unit.objects.update(search_column_fi=get_search_column(Unit, "fi")) return Unit.objects.all() @@ -101,8 +128,9 @@ def department(municipality): @pytest.mark.django_db @pytest.fixture def accessibility_shortcoming(units): + unit = Unit.objects.get(name="Biologinen museo") return UnitAccessibilityShortcomings.objects.create( - unit=units[1], accessibility_shortcoming_count={"rollator": 5, "stroller": 1} + unit=unit, accessibility_shortcoming_count={"rollator": 5, "stroller": 1} ) @@ -127,6 +155,19 @@ def services(): name_sv="Simhall", last_modified_time=now(), ) + Service.objects.create( + id=4, + name="Tekonurmikentät", + name_sv="Konstgräsplaner", + last_modified_time=now(), + ) + Service.objects.create( + id=5, + name="tekojääkentät", + name_sv="konstisbanor", + last_modified_time=now(), + ) + generate_syllables(Service) Service.objects.update(search_column_fi=get_search_column(Service, "fi")) return Service.objects.all() @@ -244,3 +285,10 @@ def streets(): Street.objects.create(id=43, name="Markulantie", municipality_id="turku") Street.objects.create(id=44, name="Yliopistonkatu", municipality_id="turku") return Street.objects.all() + + +@pytest.mark.django_db +@pytest.fixture +def exclusion_rules(): + ExclusionRule.objects.create(id=1, word="tekojää", exclusion="-nurmi") + return ExclusionRule.objects.all() From be19bb7574fbcd0ae48982275e943be886b4a990 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Thu, 20 Jul 2023 14:34:50 +0300 Subject: [PATCH 31/84] Add exclusion rules tests --- services/search/tests/test_api.py | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/services/search/tests/test_api.py b/services/search/tests/test_api.py index 2d78d4419..d79c0cbb2 100644 --- a/services/search/tests/test_api.py +++ b/services/search/tests/test_api.py @@ -13,6 +13,7 @@ def test_search( administrative_division, accessibility_shortcoming, municipality, + exclusion_rules, ): # Search for "museo" in entities: units,services and servicenods url = reverse("search") + "?q=museo&type=unit,service,servicenode" @@ -30,7 +31,6 @@ def test_search( assert biological_museum_unit["street_address"] == "Neitsytpolku 1" assert biological_museum_unit["municipality"] == "turku" assert biological_museum_unit["contract_type"]["id"] == "municipal_service" - assert ( biological_museum_unit["contract_type"]["description"]["fi"] == "municipal_service" @@ -133,3 +133,23 @@ def test_search( results = response.json()["results"] assert results[0]["object_type"] == "administrativedivision" assert results[0]["name"]["fi"] == "Turku" + + # Test exclusion rules used with websearch. By default (use_websearch=True) should only find Parkin kenttä + url = reverse("search") + "?q=tekojää&type=unit,service,servicenode" + response = api_client.get(url) + results = response.json()["results"] + assert len(results) == 2 + parkin_kentta = results[0] + assert parkin_kentta["object_type"] == "unit" + assert parkin_kentta["name"]["fi"] == "Parkin kenttä" + tekojaa_service = results[1] + assert tekojaa_service["object_type"] == "service" + assert tekojaa_service["name"]["fi"] == "tekojääkentät" + # Disabling use_websearch, should return both 'tekojääkentät', 'tekonurmikentät' services and their units. + # as syllable 'teko' is indexed from the compound words. + url = ( + reverse("search") + + "?q=tekojää&type=unit,service,servicenode&use_websearch=false" + ) + response = api_client.get(url) + assert len(response.json()["results"]) == 4 From 108c4cb64708534f24abdbfa412928601b161de8 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Fri, 21 Jul 2023 10:06:20 +0300 Subject: [PATCH 32/84] Add new unit connection section type for price type --- services/models/unit_connection.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/services/models/unit_connection.py b/services/models/unit_connection.py index e018cde9c..9f76eef2b 100644 --- a/services/models/unit_connection.py +++ b/services/models/unit_connection.py @@ -13,6 +13,7 @@ class UnitConnection(models.Model): OTHER_ADDRESS_TYPE = 7 HIGHLIGHT_TYPE = 8 ESERVICE_LINK_TYPE = 9 + PRICE_TYPE = 10 SECTION_TYPES = ( (PHONE_OR_EMAIL_TYPE, "PHONE_OR_EMAIL"), @@ -24,6 +25,7 @@ class UnitConnection(models.Model): (OTHER_ADDRESS_TYPE, "OTHER_ADDRESS"), (HIGHLIGHT_TYPE, "HIGHLIGHT"), (ESERVICE_LINK_TYPE, "ESERVICE_LINK"), + (PRICE_TYPE, "PRICE"), ) unit = models.ForeignKey( From 8f730cd82aae40c7178caca5f2a681ac51765e57 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Fri, 21 Jul 2023 10:08:14 +0300 Subject: [PATCH 33/84] Increase unitconnection name max length to 2100 --- services/models/unit_connection.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/models/unit_connection.py b/services/models/unit_connection.py index 9f76eef2b..0fbcbcabb 100644 --- a/services/models/unit_connection.py +++ b/services/models/unit_connection.py @@ -31,7 +31,7 @@ class UnitConnection(models.Model): unit = models.ForeignKey( Unit, db_index=True, related_name="connections", on_delete=models.CASCADE ) - name = models.CharField(max_length=600) + name = models.CharField(max_length=2100) www = models.URLField(null=True, max_length=400) section_type = models.PositiveSmallIntegerField(choices=SECTION_TYPES, null=True) email = models.EmailField(max_length=100, null=True) From 63e54e4596c0c8f0ae5e69da8420834ff46c7c37 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Fri, 21 Jul 2023 10:10:37 +0300 Subject: [PATCH 34/84] Add SUBGROUP to section types --- services/models/unit_connection.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/services/models/unit_connection.py b/services/models/unit_connection.py index 0fbcbcabb..0200229ff 100644 --- a/services/models/unit_connection.py +++ b/services/models/unit_connection.py @@ -14,7 +14,7 @@ class UnitConnection(models.Model): HIGHLIGHT_TYPE = 8 ESERVICE_LINK_TYPE = 9 PRICE_TYPE = 10 - + SUBGROUP_TYPE = 11 SECTION_TYPES = ( (PHONE_OR_EMAIL_TYPE, "PHONE_OR_EMAIL"), (LINK_TYPE, "LINK"), @@ -26,6 +26,7 @@ class UnitConnection(models.Model): (HIGHLIGHT_TYPE, "HIGHLIGHT"), (ESERVICE_LINK_TYPE, "ESERVICE_LINK"), (PRICE_TYPE, "PRICE"), + (SUBGROUP_TYPE, "SUBGROUP"), ) unit = models.ForeignKey( From e0a40361800a688635efaf843e095205b42346fb Mon Sep 17 00:00:00 2001 From: juuso-j Date: Fri, 21 Jul 2023 10:14:38 +0300 Subject: [PATCH 35/84] Add tags-support for UnitConnections --- .../migrations/0099_unitconnection_tags.py | 24 +++++++++++++++++++ services/models/unit_connection.py | 2 ++ 2 files changed, 26 insertions(+) create mode 100644 services/migrations/0099_unitconnection_tags.py diff --git a/services/migrations/0099_unitconnection_tags.py b/services/migrations/0099_unitconnection_tags.py new file mode 100644 index 000000000..ec9be3874 --- /dev/null +++ b/services/migrations/0099_unitconnection_tags.py @@ -0,0 +1,24 @@ +# Generated by Django 4.1.7 on 2023-07-21 07:13 + +import django.contrib.postgres.fields +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("services", "0098_alter_unitconnection_section_type"), + ] + + operations = [ + migrations.AddField( + model_name="unitconnection", + name="tags", + field=django.contrib.postgres.fields.ArrayField( + base_field=models.CharField(max_length=200), + default=list, + null=True, + size=None, + ), + ), + ] diff --git a/services/models/unit_connection.py b/services/models/unit_connection.py index 0200229ff..f1b0b8e8b 100644 --- a/services/models/unit_connection.py +++ b/services/models/unit_connection.py @@ -1,3 +1,4 @@ +from django.contrib.postgres.fields import ArrayField from django.db import models from .unit import Unit @@ -39,6 +40,7 @@ class UnitConnection(models.Model): phone = models.CharField(max_length=50, null=True) contact_person = models.CharField(max_length=80, null=True) order = models.PositiveSmallIntegerField(default=0) + tags = ArrayField(models.CharField(max_length=200), null=True, default=list) class Meta: ordering = ["order"] From 27957dd76b4fbec9ccbdc7b5ccc681bfce02b867 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Fri, 21 Jul 2023 10:16:41 +0300 Subject: [PATCH 36/84] Increase unitconnection name max length to 2100 --- .../0097_update_unitconnection_names.py | 33 +++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 services/migrations/0097_update_unitconnection_names.py diff --git a/services/migrations/0097_update_unitconnection_names.py b/services/migrations/0097_update_unitconnection_names.py new file mode 100644 index 000000000..652631c5f --- /dev/null +++ b/services/migrations/0097_update_unitconnection_names.py @@ -0,0 +1,33 @@ +# Generated by Django 4.1.7 on 2023-07-21 07:07 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("services", "0096_alter_unitconnection_section_type"), + ] + + operations = [ + migrations.AlterField( + model_name="unitconnection", + name="name", + field=models.CharField(max_length=2100), + ), + migrations.AlterField( + model_name="unitconnection", + name="name_en", + field=models.CharField(max_length=2100, null=True), + ), + migrations.AlterField( + model_name="unitconnection", + name="name_fi", + field=models.CharField(max_length=2100, null=True), + ), + migrations.AlterField( + model_name="unitconnection", + name="name_sv", + field=models.CharField(max_length=2100, null=True), + ), + ] From 19f1577bbe9c501fdae69d0b7ecc15ae1a457653 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Fri, 21 Jul 2023 10:17:06 +0300 Subject: [PATCH 37/84] Add new unit connection section type for price type --- .../0096_alter_unitconnection_section_type.py | 32 +++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100644 services/migrations/0096_alter_unitconnection_section_type.py diff --git a/services/migrations/0096_alter_unitconnection_section_type.py b/services/migrations/0096_alter_unitconnection_section_type.py new file mode 100644 index 000000000..01fbf1a06 --- /dev/null +++ b/services/migrations/0096_alter_unitconnection_section_type.py @@ -0,0 +1,32 @@ +# Generated by Django 4.1.7 on 2023-07-21 07:02 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("services", "0095_exclusionrule"), + ] + + operations = [ + migrations.AlterField( + model_name="unitconnection", + name="section_type", + field=models.PositiveSmallIntegerField( + choices=[ + (1, "PHONE_OR_EMAIL"), + (2, "LINK"), + (3, "TOPICAL"), + (4, "OTHER_INFO"), + (5, "OPENING_HOURS"), + (6, "SOCIAL_MEDIA_LINK"), + (7, "OTHER_ADDRESS"), + (8, "HIGHLIGHT"), + (9, "ESERVICE_LINK"), + (10, "PRICE"), + ], + null=True, + ), + ), + ] From 6d06f04bd178b9cb637d4d131f9bc06c67bc8e74 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Fri, 21 Jul 2023 10:17:34 +0300 Subject: [PATCH 38/84] Add SUBGROUP to section types --- .../0098_alter_unitconnection_section_type.py | 33 +++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 services/migrations/0098_alter_unitconnection_section_type.py diff --git a/services/migrations/0098_alter_unitconnection_section_type.py b/services/migrations/0098_alter_unitconnection_section_type.py new file mode 100644 index 000000000..e1d15fbc5 --- /dev/null +++ b/services/migrations/0098_alter_unitconnection_section_type.py @@ -0,0 +1,33 @@ +# Generated by Django 4.1.7 on 2023-07-21 07:09 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("services", "0097_update_unitconnection_names"), + ] + + operations = [ + migrations.AlterField( + model_name="unitconnection", + name="section_type", + field=models.PositiveSmallIntegerField( + choices=[ + (1, "PHONE_OR_EMAIL"), + (2, "LINK"), + (3, "TOPICAL"), + (4, "OTHER_INFO"), + (5, "OPENING_HOURS"), + (6, "SOCIAL_MEDIA_LINK"), + (7, "OTHER_ADDRESS"), + (8, "HIGHLIGHT"), + (9, "ESERVICE_LINK"), + (10, "PRICE"), + (11, "SUBGROUP"), + ], + null=True, + ), + ), + ] From afe893ad0816a56f400b6d2fafd4b6181ba3910d Mon Sep 17 00:00:00 2001 From: juuso-j Date: Fri, 21 Jul 2023 10:20:03 +0300 Subject: [PATCH 39/84] Add new connection type: OPENING_HOUR_OBJECT --- .../0100_alter_unitconnection_section_type.py | 34 +++++++++++++++++++ services/models/unit_connection.py | 3 ++ 2 files changed, 37 insertions(+) create mode 100644 services/migrations/0100_alter_unitconnection_section_type.py diff --git a/services/migrations/0100_alter_unitconnection_section_type.py b/services/migrations/0100_alter_unitconnection_section_type.py new file mode 100644 index 000000000..a44163825 --- /dev/null +++ b/services/migrations/0100_alter_unitconnection_section_type.py @@ -0,0 +1,34 @@ +# Generated by Django 4.1.7 on 2023-07-21 07:18 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("services", "0099_unitconnection_tags"), + ] + + operations = [ + migrations.AlterField( + model_name="unitconnection", + name="section_type", + field=models.PositiveSmallIntegerField( + choices=[ + (1, "PHONE_OR_EMAIL"), + (2, "LINK"), + (3, "TOPICAL"), + (4, "OTHER_INFO"), + (5, "OPENING_HOURS"), + (6, "SOCIAL_MEDIA_LINK"), + (7, "OTHER_ADDRESS"), + (8, "HIGHLIGHT"), + (9, "ESERVICE_LINK"), + (10, "PRICE"), + (11, "SUBGROUP"), + (12, "OPENING_HOUR_OBJECT"), + ], + null=True, + ), + ), + ] diff --git a/services/models/unit_connection.py b/services/models/unit_connection.py index f1b0b8e8b..c9b888a39 100644 --- a/services/models/unit_connection.py +++ b/services/models/unit_connection.py @@ -16,6 +16,8 @@ class UnitConnection(models.Model): ESERVICE_LINK_TYPE = 9 PRICE_TYPE = 10 SUBGROUP_TYPE = 11 + OPENING_HOUR_OBJECT = 12 + SECTION_TYPES = ( (PHONE_OR_EMAIL_TYPE, "PHONE_OR_EMAIL"), (LINK_TYPE, "LINK"), @@ -28,6 +30,7 @@ class UnitConnection(models.Model): (ESERVICE_LINK_TYPE, "ESERVICE_LINK"), (PRICE_TYPE, "PRICE"), (SUBGROUP_TYPE, "SUBGROUP"), + (OPENING_HOUR_OBJECT, "OPENING_HOUR_OBJECT"), ) unit = models.ForeignKey( From d9c8859b46b09c5803428aeca61d5b895f1f488a Mon Sep 17 00:00:00 2001 From: juuso-j Date: Fri, 21 Jul 2023 11:55:31 +0300 Subject: [PATCH 40/84] Read SECREY_KEY from env --- smbackend/settings.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/smbackend/settings.py b/smbackend/settings.py index 91306fdb0..a9709de17 100644 --- a/smbackend/settings.py +++ b/smbackend/settings.py @@ -12,6 +12,7 @@ DEBUG=(bool, False), LANGUAGES=(list, ["fi", "sv", "en"]), DATABASE_URL=(str, "postgis:///servicemap"), + SECRET_KEY=(str, ""), TRUST_X_FORWARDED_HOST=(bool, False), SECURE_PROXY_SSL_HEADER=(tuple, None), ALLOWED_HOSTS=(list, []), @@ -77,6 +78,7 @@ environ.Env.read_env(env_file_path) DEBUG = env("DEBUG") +SECRET_KEY = env("SECRET_KEY") TEMPLATE_DEBUG = False ALLOWED_HOSTS = env("ALLOWED_HOSTS") @@ -379,6 +381,7 @@ def preprocessing_filter_spec(endpoints): COOKIE_PREFIX = env("COOKIE_PREFIX") INTERNAL_IPS = env("INTERNAL_IPS") +# NOTE, Helsinki has removed generation of the SECRET_KEY if "SECRET_KEY" not in locals(): secret_file = os.path.join(BASE_DIR, ".django_secret") try: @@ -403,10 +406,11 @@ def preprocessing_filter_spec(endpoints): secret.write(SECRET_KEY) secret.close() except IOError: - Exception( + raise Exception( "Please create a %s file with random characters to generate your secret key!" % secret_file ) + TURKU_WFS_URL = env("TURKU_WFS_URL") PTV_ID_OFFSET = env("PTV_ID_OFFSET") GEO_SEARCH_LOCATION = env("GEO_SEARCH_LOCATION") From 86fe4a5be19ed92b09c7d4bba8872133872b1bcd Mon Sep 17 00:00:00 2001 From: juuso-j Date: Fri, 21 Jul 2023 12:47:55 +0300 Subject: [PATCH 41/84] Remove secret key generation --- smbackend/settings.py | 31 ------------------------------- 1 file changed, 31 deletions(-) diff --git a/smbackend/settings.py b/smbackend/settings.py index a9709de17..e9fca554a 100644 --- a/smbackend/settings.py +++ b/smbackend/settings.py @@ -380,37 +380,6 @@ def preprocessing_filter_spec(endpoints): COOKIE_PREFIX = env("COOKIE_PREFIX") INTERNAL_IPS = env("INTERNAL_IPS") - -# NOTE, Helsinki has removed generation of the SECRET_KEY -if "SECRET_KEY" not in locals(): - secret_file = os.path.join(BASE_DIR, ".django_secret") - try: - SECRET_KEY = open(secret_file).read().strip() - except IOError: - import random - - system_random = random.SystemRandom() - try: - SECRET_KEY = "".join( - [ - system_random.choice( - "abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)" - ) - for i in range(64) - ] - ) - secret = open(secret_file, "w") - import os - - os.chmod(secret_file, 0o0600) - secret.write(SECRET_KEY) - secret.close() - except IOError: - raise Exception( - "Please create a %s file with random characters to generate your secret key!" - % secret_file - ) - TURKU_WFS_URL = env("TURKU_WFS_URL") PTV_ID_OFFSET = env("PTV_ID_OFFSET") GEO_SEARCH_LOCATION = env("GEO_SEARCH_LOCATION") From 2299269294dfbee786123a9a96cf869f1e1b0411 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Fri, 21 Jul 2023 13:43:18 +0300 Subject: [PATCH 42/84] Add logging examples --- config_dev.env.example | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/config_dev.env.example b/config_dev.env.example index 296a49ad6..dc66fbe57 100644 --- a/config_dev.env.example +++ b/config_dev.env.example @@ -145,6 +145,23 @@ EMAIL_HOST_USER=example@example.com EMAIL_PORT=25 EMAIL_USE_TLS=True +# Django project log level, default INFO +DJANGO_LOG_LEVEL= +# Turku services importers log level, default DEBUG +TURKU_SERVICES_IMPORT_LOG_LEVEL= +# Search log level, default INFO +SEARCH_LOG_LEVEL= +# IoT APP, default INFO +IOT_LOG_LEVEL= +# Eco counter, default INFO +ECO_COUNTER_LOG_LEVEL= +# Mobility data (includes importers), default INFO +MOBILITY_DATA_LOG_LEVEL= +# Bicycle networks APP, default INFO +BICYCLE_NETWORK_LOG_LEVEL= +# Street maintenance, default INFO +STREET_MAINTENANCE_LOG_LEVEL= + # Settings needed for enabling Turku area: #ADDITIONAL_INSTALLED_APPS=smbackend_turku,ptv #TURKU_API_KEY=secret @@ -184,4 +201,4 @@ YIT_TOKEN_URL=https://login.microsoftonline.com/86792d09-0d81-4899-8d66-95dfc96c KUNTEC_KEY= # Telraam API token, required when fetching Telraam data to csv (import_telraam_to_csv.py) # https://telraam.helpspace-docs.io/article/27/you-wish-more-data-and-statistics-telraam-api -TELRAAM_TOKEN= \ No newline at end of file +TELRAAM_TOKEN= From 3c1e5b7b5ca8b7d9607ff09c9651ce5245cd3243 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Fri, 21 Jul 2023 13:44:14 +0300 Subject: [PATCH 43/84] Read logging from env, improve default values --- smbackend/settings.py | 56 ++++++++++++++++++++++++++++++------------- 1 file changed, 40 insertions(+), 16 deletions(-) diff --git a/smbackend/settings.py b/smbackend/settings.py index e9fca554a..914bc1243 100644 --- a/smbackend/settings.py +++ b/smbackend/settings.py @@ -1,4 +1,5 @@ import os +from pathlib import Path import environ from django.conf.global_settings import LANGUAGES as GLOBAL_LANGUAGES @@ -6,13 +7,13 @@ CONFIG_FILE_NAME = "config_dev.env" - -root = environ.Path(__file__) - 2 # two levels back in hierarchy +# Build paths inside the project like this: os.path.join(BASE_DIR, ...) +BASE_DIR = str(Path(__file__).resolve().parent.parent) env = environ.Env( DEBUG=(bool, False), LANGUAGES=(list, ["fi", "sv", "en"]), DATABASE_URL=(str, "postgis:///servicemap"), - SECRET_KEY=(str, ""), + SECRET_KEY=(str, "temp_key"), TRUST_X_FORWARDED_HOST=(bool, False), SECURE_PROXY_SSL_HEADER=(tuple, None), ALLOWED_HOSTS=(list, []), @@ -21,10 +22,10 @@ COOKIE_PREFIX=(str, "servicemap"), INTERNAL_IPS=(list, []), CELERY_BROKER_URL=(str, "amqp://guest:guest@localhost:5672"), - MEDIA_ROOT=(environ.Path(), root("media")), - STATIC_ROOT=(environ.Path(), root("static")), - MEDIA_URL=(str, "/media/"), + STATIC_ROOT=(str, BASE_DIR + "/static"), + MEDIA_ROOT=(str, BASE_DIR + "/media"), STATIC_URL=(str, "/static/"), + MEDIA_URL=(str, "/media/"), OPEN311_URL_BASE=(str, None), OPEN311_API_KEY=(str, None), OPEN311_INTERNAL_API_KEY=(str, None), @@ -64,10 +65,16 @@ EMAIL_PORT=(int, None), EMAIL_USE_TLS=(bool, None), TELRAAM_TOKEN=(str, None), + DJANGO_LOG_LEVEL=(str, "INFO"), + TURKU_SERVICES_IMPORT_LOG_LEVEL=(str, "INFO"), + SEARCH_LOG_LEVEL=(str, "INFO"), + IOT_LOG_LEVEL=(str, "INFO"), + ECO_COUNTER_LOG_LEVEL=(str, "INFO"), + MOBILITY_DATA_LOG_LEVEL=(str, "INFO"), + BICYCLE_NETWORK_LOG_LEVEL=(str, "INFO"), + STREET_MAINTENANCE_LOG_LEVEL=(str, "INFO"), ) -# Build paths inside the project like this: os.path.join(BASE_DIR, ...) -BASE_DIR = root() # Django environ has a nasty habit of complanining at level # WARN about env file not being preset. Here we pre-empt it. @@ -81,6 +88,14 @@ SECRET_KEY = env("SECRET_KEY") TEMPLATE_DEBUG = False ALLOWED_HOSTS = env("ALLOWED_HOSTS") +DJANGO_LOG_LEVEL = env("DJANGO_LOG_LEVEL") +TURKU_SERVICES_IMPORT_LOG_LEVEL = env("TURKU_SERVICES_IMPORT_LOG_LEVEL") +SEARCH_LOG_LEVEL = env("SEARCH_LOG_LEVEL") +IOT_LOG_LEVEL = env("IOT_LOG_LEVEL") +ECO_COUNTER_LOG_LEVEL = env("ECO_COUNTER_LOG_LEVEL") +MOBILITY_DATA_LOG_LEVEL = env("MOBILITY_DATA_LOG_LEVEL") +BICYCLE_NETWORK_LOG_LEVEL = env("BICYCLE_NETWORK_LOG_LEVEL") +STREET_MAINTENANCE_LOG_LEVEL = env("STREET_MAINTENANCE_LOG_LEVEL") # Application definition INSTALLED_APPS = [ @@ -294,14 +309,23 @@ def gettext(s): "blackhole": {"class": "logging.NullHandler"}, }, "loggers": { - "django": {"handlers": ["console"], "level": "INFO"}, - "turku_services_import": {"handlers": ["console"], "level": "DEBUG"}, - "search": {"handlers": ["console"], "level": "DEBUG"}, - "iot": {"handlers": ["console"], "level": "INFO"}, - "eco_counter": {"handlers": ["console"], "level": "INFO"}, - "mobility_data": {"handlers": ["console"], "level": "INFO"}, - "bicycle_network": {"handlers": ["console"], "level": "INFO"}, - "street_maintenance": {"handlers": ["console"], "level": "INFO"}, + "django": {"handlers": ["console"], "level": DJANGO_LOG_LEVEL}, + "turku_services_import": { + "handlers": ["console"], + "level": TURKU_SERVICES_IMPORT_LOG_LEVEL, + }, + "search": {"handlers": ["console"], "level": SEARCH_LOG_LEVEL}, + "iot": {"handlers": ["console"], "level": IOT_LOG_LEVEL}, + "eco_counter": {"handlers": ["console"], "level": ECO_COUNTER_LOG_LEVEL}, + "mobility_data": {"handlers": ["console"], "level": MOBILITY_DATA_LOG_LEVEL}, + "bicycle_network": { + "handlers": ["console"], + "level": BICYCLE_NETWORK_LOG_LEVEL, + }, + "street_maintenance": { + "handlers": ["console"], + "level": STREET_MAINTENANCE_LOG_LEVEL, + }, }, } From 53fcf1bbbcdcd4adbb7ebbb63d843eb04b7e799b Mon Sep 17 00:00:00 2001 From: juuso-j Date: Fri, 21 Jul 2023 13:53:03 +0300 Subject: [PATCH 44/84] Remove deprecated USE_L10N-setting --- smbackend/settings.py | 1 - 1 file changed, 1 deletion(-) diff --git a/smbackend/settings.py b/smbackend/settings.py index 914bc1243..9693881b0 100644 --- a/smbackend/settings.py +++ b/smbackend/settings.py @@ -178,7 +178,6 @@ def gettext(s): TIME_ZONE = "Europe/Helsinki" USE_I18N = True -USE_L10N = True USE_TZ = True USE_X_FORWARDED_HOST = env("TRUST_X_FORWARDED_HOST") From 960fae70a5282dd6872443848bbeff07847fe2dc Mon Sep 17 00:00:00 2001 From: juuso-j Date: Fri, 21 Jul 2023 14:12:38 +0300 Subject: [PATCH 45/84] Replace Raven with Sentry SDK --- requirements.in | 2 +- requirements.txt | 13 +++++++++---- smbackend/settings.py | 28 +++++++++++----------------- 3 files changed, 21 insertions(+), 22 deletions(-) diff --git a/requirements.in b/requirements.in index 6cdaf4f58..d29f20c45 100644 --- a/requirements.in +++ b/requirements.in @@ -14,7 +14,7 @@ django-extensions psycopg2-binary<2.9 django-mptt lxml>=4.9.1 -raven~=6.10.0 +sentry-sdk pip-tools python-dateutil pytest-django diff --git a/requirements.txt b/requirements.txt index 163484fce..94f3a0300 100644 --- a/requirements.txt +++ b/requirements.txt @@ -30,7 +30,9 @@ celery==5.2.3 # django-celery-beat # django-celery-results certifi==2022.12.7 - # via requests + # via + # requests + # sentry-sdk charset-normalizer==2.0.6 # via requests click==8.0.3 @@ -201,8 +203,6 @@ pyyaml==5.3.1 # via # django-munigeo # drf-spectacular -raven==6.10.0 - # via -r requirements.in redis==4.4.4 # via -r requirements.in requests==2.26.0 @@ -215,6 +215,8 @@ requests-cache==0.8.1 # via -r requirements.in requests-mock==1.9.3 # via -r requirements.in +sentry-sdk==1.9.0 + # via -r requirements.in shapely==1.8.0 # via -r requirements.in six==1.16.0 @@ -233,7 +235,9 @@ toml==0.10.2 # pytest # pytest-cov tomli==1.2.1 - # via pep517 + # via + # black + # pep517 tqdm==4.62.3 # via -r requirements.in tzdata==2022.1 @@ -248,6 +252,7 @@ urllib3==1.26.7 # via # requests # requests-cache + # sentry-sdk vine==5.0.0 # via # amqp diff --git a/smbackend/settings.py b/smbackend/settings.py index 9693881b0..83b4f4aeb 100644 --- a/smbackend/settings.py +++ b/smbackend/settings.py @@ -2,8 +2,10 @@ from pathlib import Path import environ +import sentry_sdk from django.conf.global_settings import LANGUAGES as GLOBAL_LANGUAGES from django.core.exceptions import ImproperlyConfigured +from sentry_sdk.integrations.django import DjangoIntegration CONFIG_FILE_NAME = "config_dev.env" @@ -17,8 +19,8 @@ TRUST_X_FORWARDED_HOST=(bool, False), SECURE_PROXY_SSL_HEADER=(tuple, None), ALLOWED_HOSTS=(list, []), - SENTRY_DSN=(str, None), - SENTRY_ENVIRONMENT=(str, "development"), + SENTRY_DSN=(str, ""), + SENTRY_ENVIRONMENT=(str, ""), COOKIE_PREFIX=(str, "servicemap"), INTERNAL_IPS=(list, []), CELERY_BROKER_URL=(str, "amqp://guest:guest@localhost:5672"), @@ -107,7 +109,6 @@ "django.contrib.staticfiles", "django.contrib.gis", "django.contrib.postgres", - "raven.contrib.django.raven_compat", "rest_framework.authtoken", "rest_framework", "corsheaders", @@ -359,11 +360,6 @@ def preprocessing_filter_spec(endpoints): LOCALE_PATHS = (os.path.join(BASE_DIR, "locale"),) -SENTRY_DSN = env("SENTRY_DSN") -SENTRY_ENVIRONMENT = env("SENTRY_ENVIRONMENT") - -import raven # noqa - # Celery CELERY_BROKER_URL = env("CELERY_BROKER_URL") CELERY_RESULT_BACKEND = "django-db" @@ -391,15 +387,13 @@ def preprocessing_filter_spec(endpoints): } } - -if SENTRY_DSN: - RAVEN_CONFIG = { - "dsn": SENTRY_DSN, - # Needs to change if settings.py is not in an immediate child of the project - "release": raven.fetch_git_sha(os.path.dirname(os.pardir)), - "environment": SENTRY_ENVIRONMENT, - } - +sentry_sdk.init( + dsn=env.str("SENTRY_DSN"), + environment=env.str("SENTRY_ENVIRONMENT"), + traces_sample_rate=1.0, + send_default_pii=True, + integrations=[DjangoIntegration()], +) COOKIE_PREFIX = env("COOKIE_PREFIX") INTERNAL_IPS = env("INTERNAL_IPS") From e667b8debb6b67b0d840f4ec81d5db912a6a8f38 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Fri, 21 Jul 2023 14:17:17 +0300 Subject: [PATCH 46/84] Utilize GDAL and GEOS paths for Django --- smbackend/settings.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/smbackend/settings.py b/smbackend/settings.py index 83b4f4aeb..dcaa13c07 100644 --- a/smbackend/settings.py +++ b/smbackend/settings.py @@ -8,6 +8,8 @@ from sentry_sdk.integrations.django import DjangoIntegration CONFIG_FILE_NAME = "config_dev.env" +GDAL_LIBRARY_PATH = os.environ.get("GDAL_LIBRARY_PATH") +GEOS_LIBRARY_PATH = os.environ.get("GEOS_LIBRARY_PATH") # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = str(Path(__file__).resolve().parent.parent) From fc176a349650bf5c698d4a47467f3d1573fe5c35 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Fri, 21 Jul 2023 14:28:49 +0300 Subject: [PATCH 47/84] Update logging to work with dict config --- smbackend/settings.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/smbackend/settings.py b/smbackend/settings.py index dcaa13c07..1615fb9bd 100644 --- a/smbackend/settings.py +++ b/smbackend/settings.py @@ -1,3 +1,4 @@ +import logging.config import os from pathlib import Path @@ -330,6 +331,7 @@ def gettext(s): }, }, } +logging.config.dictConfig(LOGGING) # Define the endpoints for API documentation with drf-spectacular. DOC_ENDPOINTS = [ From 1f37f74215355c751829691ec38d1ad31846f45e Mon Sep 17 00:00:00 2001 From: juuso-j Date: Fri, 21 Jul 2023 14:30:46 +0300 Subject: [PATCH 48/84] Update version --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index ff0dc934c..db52eb478 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ setup( name="smbackend", - version="210929", + version="230717", license="AGPLv3", packages=find_packages(), include_package_data=True, From ecb26b8dfa1618f493ceb9e7773936a8ac598ab4 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Tue, 25 Jul 2023 15:00:04 +0300 Subject: [PATCH 49/84] Add function for getting data sources --- mobility_data/importers/bicycle_stands.py | 30 +++++++++++------------ 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/mobility_data/importers/bicycle_stands.py b/mobility_data/importers/bicycle_stands.py index 53ea9bc68..672dfff6a 100644 --- a/mobility_data/importers/bicycle_stands.py +++ b/mobility_data/importers/bicycle_stands.py @@ -198,24 +198,24 @@ def set_gml_feature(self, feature): self.prefix_name = {k: f"{NAME_PREFIX[k]} {v}" for k, v in self.name.items()} -def get_bicycle_stand_objects(data_source=None): +def get_data_sources(): + data_sources = [] + # Add the WFS datasource that is in GML format + ds = DataSource(BICYCLE_STANDS_URL) + data_sources.append(("gml", ds)) + # Add the GEOJSON datasource which is a file + data_path = os.path.join(get_root_dir(), "mobility_data/data") + file_path = os.path.join(data_path, GEOJSON_FILENAME) + ds = DataSource(file_path) + data_sources.append(("geojson", ds)) + return data_sources + + +def get_bicycle_stand_objects(): """ Returns a list containg instances of BicycleStand class. """ - data_sources = [] - - if data_source: - data_sources.append(data_source) - else: - # Add the WFS datasource that is in GML format - ds = DataSource(BICYCLE_STANDS_URL) - data_sources.append(("gml", ds)) - # Add the GEOJSON datasource which is a file - data_path = os.path.join(get_root_dir(), "mobility_data/data") - file_path = os.path.join(data_path, GEOJSON_FILENAME) - ds = DataSource(file_path) - data_sources.append(("geojson", ds)) - + data_sources = get_data_sources() bicycle_stands = [] """ external_stands dict is used to keep track of the names of imported external stands From 5bc1b3f421aed31f5b09da55d3be2a6acbe34fcf Mon Sep 17 00:00:00 2001 From: juuso-j Date: Tue, 25 Jul 2023 15:00:55 +0300 Subject: [PATCH 50/84] Add function for getting the data layer --- .../importers/bike_service_stations.py | 24 +++++++++---------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/mobility_data/importers/bike_service_stations.py b/mobility_data/importers/bike_service_stations.py index 1a5870c00..3dff7e082 100644 --- a/mobility_data/importers/bike_service_stations.py +++ b/mobility_data/importers/bike_service_stations.py @@ -64,20 +64,18 @@ def __init__(self, feature): self.extra["in_terrain"] = feature["Maastossa"].as_string() -def get_bike_service_station_objects(geojson_file=None): - bicycle_repair_points = [] - file_name = None - if not geojson_file: - file_name = get_file_name_from_data_source(CONTENT_TYPE_NAME) - if not file_name: - file_name = f"{get_root_dir()}/mobility_data/data/{GEOJSON_FILENAME}" - else: - file_name = f"{get_root_dir()}/mobility_data/data/{GEOJSON_FILENAME}" +def get_data_layer(): + file_name = get_file_name_from_data_source(CONTENT_TYPE_NAME) + if not file_name: + file_name = f"{get_root_dir()}/mobility_data/data/{GEOJSON_FILENAME}" else: - # Use the test data file - file_name = f"{get_root_dir()}/mobility_data/tests/data/{geojson_file}" - + file_name = f"{get_root_dir()}/mobility_data/data/{GEOJSON_FILENAME}" data_layer = GDALDataSource(file_name)[0] - for feature in data_layer: + return data_layer + + +def get_bike_service_station_objects(): + bicycle_repair_points = [] + for feature in get_data_layer(): bicycle_repair_points.append(BikeServiceStation(feature)) return bicycle_repair_points From 0c0f9659d76d0c9a337f91c63a05c41e0193a8c6 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Tue, 25 Jul 2023 15:06:56 +0300 Subject: [PATCH 51/84] Move geojson file name retrieving to a function --- mobility_data/importers/charging_stations.py | 18 ++++++++--------- .../disabled_and_no_staff_parking.py | 20 +++++++++---------- 2 files changed, 18 insertions(+), 20 deletions(-) diff --git a/mobility_data/importers/charging_stations.py b/mobility_data/importers/charging_stations.py index fbebc6441..21f80a9ad 100644 --- a/mobility_data/importers/charging_stations.py +++ b/mobility_data/importers/charging_stations.py @@ -111,18 +111,18 @@ def get_number_of_rows(file_name): return number_of_rows -def get_charging_station_objects(csv_file=None): +def get_csv_file_name(): + file_name = get_file_name_from_data_source(CONTENT_TYPE_NAME) + if file_name: + return file_name + return f"{get_root_dir()}/mobility_data/data/{SOURCE_DATA_FILE_NAME}" + + +def get_charging_station_objects(): # Store the imported stations to dict, the index is the key. + file_name = get_csv_file_name() charging_stations = {} column_mappings = {} - if not csv_file: - file_name = get_file_name_from_data_source(CONTENT_TYPE_NAME) - if not file_name: - file_name = f"{get_root_dir()}/mobility_data/data/{SOURCE_DATA_FILE_NAME}" - else: - # Use the test data file - file_name = f"{get_root_dir()}/mobility_data/tests/data/{csv_file}" - number_of_rows = get_number_of_rows(file_name) with open(file_name, encoding="utf-8-sig") as csv_file: csv_reader = csv.reader(csv_file, delimiter=";") diff --git a/mobility_data/importers/disabled_and_no_staff_parking.py b/mobility_data/importers/disabled_and_no_staff_parking.py index 1fd6982d7..4c54ec8d7 100644 --- a/mobility_data/importers/disabled_and_no_staff_parking.py +++ b/mobility_data/importers/disabled_and_no_staff_parking.py @@ -135,19 +135,17 @@ def __init__(self, feature): self.extra[field_name] = feature[field].as_double() -def get_no_staff_parking_objects(geojson_file=None): - no_staff_parkings = [] - disabled_parkings = [] - file_name = None +def get_geojson_file_name(): + file_name = get_file_name_from_data_source(NO_STAFF_PARKING_CONTENT_TYPE_NAME) + if file_name: + return file_name + return f"{get_root_dir()}/mobility_data/data/{GEOJSON_FILENAME}" - if not geojson_file: - file_name = get_file_name_from_data_source(NO_STAFF_PARKING_CONTENT_TYPE_NAME) - if not file_name: - file_name = f"{get_root_dir()}/mobility_data/data/{GEOJSON_FILENAME}" - else: - # Use the test data file - file_name = f"{get_root_dir()}/mobility_data/tests/data/{geojson_file}" +def get_no_staff_parking_objects(): + no_staff_parkings = [] + disabled_parkings = [] + file_name = get_geojson_file_name() data_layer = GDALDataSource(file_name)[0] for feature in data_layer: From 6e0d5c3d3ced652f6cb3c92334f07584883ff0e0 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Tue, 25 Jul 2023 15:08:14 +0300 Subject: [PATCH 52/84] Move geojson file name retrieving to a function --- .../importers/loading_unloading_places.py | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/mobility_data/importers/loading_unloading_places.py b/mobility_data/importers/loading_unloading_places.py index 6a5e03f00..0e6e9d7ca 100644 --- a/mobility_data/importers/loading_unloading_places.py +++ b/mobility_data/importers/loading_unloading_places.py @@ -104,18 +104,16 @@ def __init__(self, feature): self.extra[field_name] = feature[field].as_int() -def get_loading_and_unloading_objects(geojson_file=None): - objects = [] - file_name = None +def get_geojson_file_name(): + file_name = get_file_name_from_data_source(CONTENT_TYPE_NAME) + if file_name: + return file_name + return f"{get_root_dir()}/mobility_data/data/{GEOJSON_FILENAME}" - if not geojson_file: - file_name = get_file_name_from_data_source(CONTENT_TYPE_NAME) - if not file_name: - file_name = f"{get_root_dir()}/mobility_data/data/{GEOJSON_FILENAME}" - else: - # Use the test data file - file_name = f"{get_root_dir()}/mobility_data/tests/data/{geojson_file}" +def get_loading_and_unloading_objects(): + objects = [] + file_name = get_geojson_file_name() data_layer = GDALDataSource(file_name)[0] for feature in data_layer: objects.append(LoadingPlace(feature)) From ec75be24cec0f73f3f378ce2d62aa92ccfd11201 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Tue, 25 Jul 2023 15:08:49 +0300 Subject: [PATCH 53/84] Fix typo --- mobility_data/importers/lounaistieto_shapefiles.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mobility_data/importers/lounaistieto_shapefiles.py b/mobility_data/importers/lounaistieto_shapefiles.py index 14ce4fd73..395b76a3a 100644 --- a/mobility_data/importers/lounaistieto_shapefiles.py +++ b/mobility_data/importers/lounaistieto_shapefiles.py @@ -123,5 +123,5 @@ def import_lounaistieto_data_source(config): if obj.add_feature(feature, config, srid): objects.append(obj) content_type = get_or_create_content_type_from_config(config["content_type_name"]) - num_ceated, num_deleted = save_to_database(objects, content_type) - log_imported_message(logger, content_type, num_ceated, num_deleted) + num_created, num_deleted = save_to_database(objects, content_type) + log_imported_message(logger, content_type, num_created, num_deleted) From f1a2dc64438e58a15fe6f00c9fc13f06b3a42531 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Tue, 25 Jul 2023 15:09:40 +0300 Subject: [PATCH 54/84] Move geojson file name retrieving to a function --- .../importers/share_car_parking_places.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/mobility_data/importers/share_car_parking_places.py b/mobility_data/importers/share_car_parking_places.py index eb31dd927..bed241ed1 100644 --- a/mobility_data/importers/share_car_parking_places.py +++ b/mobility_data/importers/share_car_parking_places.py @@ -54,16 +54,16 @@ def __init__(self, feature): self.extra[self.RESTRICTION_FIELD][language] = restrictions[i].strip() +def get_geojson_file_name(): + file_name = get_file_name_from_data_source(CONTENT_TYPE_NAME) + if file_name: + return file_name + return f"{get_root_dir()}/mobility_data/data/{GEOJSON_FILENAME}" + + def get_car_share_parking_place_objects(geojson_file=None): car_share_parking_places = [] - file_name = None - if not geojson_file: - file_name = get_file_name_from_data_source(CONTENT_TYPE_NAME) - if not file_name: - file_name = f"{get_root_dir()}/mobility_data/data/{GEOJSON_FILENAME}" - else: - # Use the test data file - file_name = f"{get_root_dir()}/mobility_data/tests/data/{geojson_file}" + file_name = get_geojson_file_name() data_layer = GDALDataSource(file_name)[0] for feature in data_layer: From e9a71690382816dece222a9cce5d6bf333dd2c02 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Tue, 25 Jul 2023 15:10:24 +0300 Subject: [PATCH 55/84] Catch GDAL exception, add function to get the data source --- mobility_data/importers/wfs.py | 54 +++++++++++++++++++--------------- 1 file changed, 30 insertions(+), 24 deletions(-) diff --git a/mobility_data/importers/wfs.py b/mobility_data/importers/wfs.py index 15cf5bd25..2c89e6e70 100644 --- a/mobility_data/importers/wfs.py +++ b/mobility_data/importers/wfs.py @@ -4,6 +4,7 @@ from django import db from django.conf import settings from django.contrib.gis.gdal import DataSource +from django.contrib.gis.gdal.error import GDALException from django.contrib.gis.geos import GEOSGeometry, MultiPolygon, Polygon from munigeo.models import Municipality @@ -67,22 +68,24 @@ def add_feature(self, feature, config): if config.get("locates_in_turku", False): if not locates_in_turku(feature, source_srid): return False - # If geometry contains multiple polygons and create_multipolygon attribute is True # create one multipolygon from the polygons. - if ( - len(feature.geom.coords) > 1 - and create_multipolygon - and isinstance(feature.geom, gdalgeometries.Polygon) - ): - polygons = [] - for coords in feature.geom.coords: - polygons.append(Polygon(coords, srid=source_srid)) - self.geometry = MultiPolygon(polygons, srid=source_srid) - else: - self.geometry = GEOSGeometry(feature.geom.wkt, srid=source_srid) - self.geometry.transform(settings.DEFAULT_SRID) - + try: + if ( + len(feature.geom.coords) > 1 + and create_multipolygon + and isinstance(feature.geom, gdalgeometries.Polygon) + ): + polygons = [] + for coords in feature.geom.coords: + polygons.append(Polygon(coords, srid=source_srid)) + self.geometry = MultiPolygon(polygons, srid=source_srid) + else: + self.geometry = GEOSGeometry(feature.geom.wkt, srid=source_srid) + self.geometry.transform(settings.DEFAULT_SRID) + except GDALException as ex: + logger.error(ex) + return False if "municipality" in config: municipality = feature[config["municipality"]].as_string() if municipality: @@ -129,8 +132,17 @@ def add_feature(self, feature, config): return True -def import_wfs_feature(config, data_file=None): +def get_data_source(config): max_features = DEFAULT_MAX_FEATURES + wfs_url = config.get("wfs_url", settings.TURKU_WFS_URL) + url = WFS_URL.format( + wfs_url=wfs_url, wfs_layer=config["wfs_layer"], max_features=max_features + ) + ds = DataSource(url) + return ds + + +def import_wfs_feature(config, data_file=None): if "content_type_name" not in config: logger.warning(f"Skipping feature {config}, 'content_type_name' is required.") return False @@ -139,17 +151,11 @@ def import_wfs_feature(config, data_file=None): return False if "max_features" in config: max_features = config["max_features"] - wfs_layer = config["wfs_layer"] objects = [] if data_file: ds = DataSource(data_file) else: - wfs_url = config.get("wfs_url", settings.TURKU_WFS_URL) - - url = WFS_URL.format( - wfs_url=wfs_url, wfs_layer=wfs_layer, max_features=max_features - ) - ds = DataSource(url) + ds = get_data_source(config) assert len(ds) == 1 layer = ds[0] for feature in layer: @@ -157,5 +163,5 @@ def import_wfs_feature(config, data_file=None): if object.add_feature(feature, config): objects.append(object) content_type = get_or_create_content_type_from_config(config["content_type_name"]) - num_ceated, num_deleted = save_to_database(objects, content_type) - log_imported_message(logger, content_type, num_ceated, num_deleted) + num_created, num_deleted = save_to_database(objects, content_type) + log_imported_message(logger, content_type, num_created, num_deleted) From 14d7078dfde60da6a2c8ffc2aa7f9ff2ddd840a2 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Tue, 25 Jul 2023 15:12:33 +0300 Subject: [PATCH 56/84] Remove empty file, functionality moved --- mobility_data/management/commands/_utils.py | 13 ------------- 1 file changed, 13 deletions(-) delete mode 100644 mobility_data/management/commands/_utils.py diff --git a/mobility_data/management/commands/_utils.py b/mobility_data/management/commands/_utils.py deleted file mode 100644 index 9c236e3cf..000000000 --- a/mobility_data/management/commands/_utils.py +++ /dev/null @@ -1,13 +0,0 @@ -from django.conf import settings -from django.contrib.gis.gdal import DataSource - -from mobility_data.models import ContentType - - -def get_test_gdal_data_source(file_name): - """ - Returns the given file_name as a GDAL Datasource, - the file must be located in /mobility_data/tests/data/ - """ - path = f"{settings.BASE_DIR}/{ContentType._meta.app_label}/tests/data/" - return DataSource(path + file_name) From 36bb0a8a599e5cb90ae3ab2350c0891b5e1ffc54 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Tue, 25 Jul 2023 15:14:17 +0300 Subject: [PATCH 57/84] Remove obsolete 'the' --- mobility_data/management/commands/delete_deprecated_units.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mobility_data/management/commands/delete_deprecated_units.py b/mobility_data/management/commands/delete_deprecated_units.py index e3e8c6e9c..9cd074122 100644 --- a/mobility_data/management/commands/delete_deprecated_units.py +++ b/mobility_data/management/commands/delete_deprecated_units.py @@ -5,7 +5,7 @@ """ This command removes all units that have a ContentType or GroupType where type_name is not Null. This data is deprecated -as the only the name will be used in future. +as only the name will be used in future. """ From 04d6774ee9a108ab6568a1b1e2c381bfba8a5017 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Tue, 25 Jul 2023 15:15:42 +0300 Subject: [PATCH 58/84] Fix typo --- .../management/commands/import_foli_parkandride_stops.py | 8 ++++---- mobility_data/management/commands/import_foli_stops.py | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/mobility_data/management/commands/import_foli_parkandride_stops.py b/mobility_data/management/commands/import_foli_parkandride_stops.py index f2917c459..ba14a74e1 100644 --- a/mobility_data/management/commands/import_foli_parkandride_stops.py +++ b/mobility_data/management/commands/import_foli_parkandride_stops.py @@ -23,11 +23,11 @@ def handle(self, *args, **options): content_type = get_or_create_content_type_from_config( FOLI_PARKANDRIDE_CARS_STOP_CONTENT_TYPE_NAME ) - num_ceated, num_deleted = save_to_database(car_stops, content_type) - log_imported_message(logger, content_type, num_ceated, num_deleted) + num_created, num_deleted = save_to_database(car_stops, content_type) + log_imported_message(logger, content_type, num_created, num_deleted) content_type = get_or_create_content_type_from_config( FOLI_PARKANDRIDE_BIKES_STOP_CONTENT_TYPE_NAME ) bike_stops = get_parkandride_bike_stop_objects() - num_ceated, num_deleted = save_to_database(bike_stops, content_type) - log_imported_message(logger, content_type, num_ceated, num_deleted) + num_created, num_deleted = save_to_database(bike_stops, content_type) + log_imported_message(logger, content_type, num_created, num_deleted) diff --git a/mobility_data/management/commands/import_foli_stops.py b/mobility_data/management/commands/import_foli_stops.py index 022f08281..79fc67a5c 100644 --- a/mobility_data/management/commands/import_foli_stops.py +++ b/mobility_data/management/commands/import_foli_stops.py @@ -17,5 +17,5 @@ def handle(self, *args, **options): logger.info("Importing Föli stops") objects = get_foli_stops() content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) - num_ceated, num_deleted = save_to_database(objects, content_type) - log_imported_message(logger, content_type, num_ceated, num_deleted) + num_created, num_deleted = save_to_database(objects, content_type) + log_imported_message(logger, content_type, num_created, num_deleted) From 44b3570a9469c79ec7d1ce22195aa06103074185 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Tue, 25 Jul 2023 15:17:09 +0300 Subject: [PATCH 59/84] Replace inheritance from BaseImportCommand with BaseCommand --- .../management/commands/import_lounaistieto_shapefiles.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/mobility_data/management/commands/import_lounaistieto_shapefiles.py b/mobility_data/management/commands/import_lounaistieto_shapefiles.py index 060b2b3f6..2a2b3c00b 100644 --- a/mobility_data/management/commands/import_lounaistieto_shapefiles.py +++ b/mobility_data/management/commands/import_lounaistieto_shapefiles.py @@ -2,19 +2,18 @@ import os import yaml +from django.core.management import BaseCommand from mobility_data.importers.lounaistieto_shapefiles import ( import_lounaistieto_data_source, ) from mobility_data.importers.utils import delete_mobile_units, get_root_dir -from ._base_import_command import BaseImportCommand - logger = logging.getLogger("mobility_data") CONFIG_FILE = "lounaistieto_shapefiles_config.yml" -class Command(BaseImportCommand): +class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( "-d", From b06c1987d34e3a1bbbbacc97b90d785004946854 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Tue, 25 Jul 2023 15:18:48 +0300 Subject: [PATCH 60/84] Replace inheritance from BaseImportCommand with BaseCommand --- .../management/commands/import_marinas.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/mobility_data/management/commands/import_marinas.py b/mobility_data/management/commands/import_marinas.py index c5bd2afd5..ab5a85e8a 100644 --- a/mobility_data/management/commands/import_marinas.py +++ b/mobility_data/management/commands/import_marinas.py @@ -1,5 +1,7 @@ import logging +from django.core.management import BaseCommand + from mobility_data.importers.marinas import ( BOAT_PARKING_CONTENT_TYPE_NAME, get_boat_parkings, @@ -14,28 +16,26 @@ save_to_database, ) -from ._base_import_command import BaseImportCommand - logger = logging.getLogger("mobility_data") -class Command(BaseImportCommand): +class Command(BaseCommand): def handle(self, *args, **options): objects = get_marinas() content_type = get_or_create_content_type_from_config(MARINA_CONTENT_TYPE_NAME) - num_ceated, num_deleted = save_to_database(objects, content_type) - log_imported_message(logger, content_type, num_ceated, num_deleted) + num_created, num_deleted = save_to_database(objects, content_type) + log_imported_message(logger, content_type, num_created, num_deleted) objects = get_boat_parkings() content_type = get_or_create_content_type_from_config( BOAT_PARKING_CONTENT_TYPE_NAME ) - num_ceated, num_deleted = save_to_database(objects, content_type) - log_imported_message(logger, content_type, num_ceated, num_deleted) + num_created, num_deleted = save_to_database(objects, content_type) + log_imported_message(logger, content_type, num_created, num_deleted) objects = get_guest_marinas() content_type = get_or_create_content_type_from_config( GUEST_MARINA_CONTENT_TYPE_NAME ) - num_ceated, num_deleted = save_to_database(objects, content_type) - log_imported_message(logger, content_type, num_ceated, num_deleted) + num_created, num_deleted = save_to_database(objects, content_type) + log_imported_message(logger, content_type, num_created, num_deleted) From f9f5216d717aea0b1193d0bfc480f6e89f398d96 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Tue, 25 Jul 2023 15:19:26 +0300 Subject: [PATCH 61/84] Replace inheritance from BaseImportCommand with BaseCommand --- .../management/commands/import_outdoor_gym_devices.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/mobility_data/management/commands/import_outdoor_gym_devices.py b/mobility_data/management/commands/import_outdoor_gym_devices.py index aa1232eb2..b32eb7831 100644 --- a/mobility_data/management/commands/import_outdoor_gym_devices.py +++ b/mobility_data/management/commands/import_outdoor_gym_devices.py @@ -1,5 +1,7 @@ import logging +from django.core.management import BaseCommand + from mobility_data.importers.outdoor_gym_devices import ( CONTENT_TYPE_NAME, get_oudoor_gym_devices, @@ -10,14 +12,12 @@ save_to_database, ) -from ._base_import_command import BaseImportCommand - logger = logging.getLogger("mobility_data") -class Command(BaseImportCommand): +class Command(BaseCommand): def handle(self, *args, **options): objects = get_oudoor_gym_devices() content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) - num_ceated, num_deleted = save_to_database(objects, content_type) - log_imported_message(logger, content_type, num_ceated, num_deleted) + num_created, num_deleted = save_to_database(objects, content_type) + log_imported_message(logger, content_type, num_created, num_deleted) From 31d3553e1b925251fa7ba1b979c9dff4fe04739a Mon Sep 17 00:00:00 2001 From: juuso-j Date: Tue, 25 Jul 2023 15:20:03 +0300 Subject: [PATCH 62/84] Fix typo --- .../management/commands/import_under_and_overpasses.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/mobility_data/management/commands/import_under_and_overpasses.py b/mobility_data/management/commands/import_under_and_overpasses.py index ead258163..79e0202b0 100644 --- a/mobility_data/management/commands/import_under_and_overpasses.py +++ b/mobility_data/management/commands/import_under_and_overpasses.py @@ -22,10 +22,10 @@ def handle(self, *args, **options): content_type = get_or_create_content_type_from_config( UNDERPASS_CONTENT_TYPE_NAME ) - num_ceated, num_deleted = save_to_database(underpass_objects, content_type) - log_imported_message(logger, content_type, num_ceated, num_deleted) + num_created, num_deleted = save_to_database(underpass_objects, content_type) + log_imported_message(logger, content_type, num_created, num_deleted) content_type = get_or_create_content_type_from_config( OVERPASS_CONTENT_TYPE_NAME ) - num_ceated, num_deleted = save_to_database(overpass_objects, content_type) - log_imported_message(logger, content_type, num_ceated, num_deleted) + num_created, num_deleted = save_to_database(overpass_objects, content_type) + log_imported_message(logger, content_type, num_created, num_deleted) From 36a95eed724f4cc9b50a08c3970834293e774f75 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Tue, 25 Jul 2023 15:21:40 +0300 Subject: [PATCH 63/84] Fix typo --- mobility_data/management/commands/import_parking_machines.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mobility_data/management/commands/import_parking_machines.py b/mobility_data/management/commands/import_parking_machines.py index 8ffadd6b7..97a032f87 100644 --- a/mobility_data/management/commands/import_parking_machines.py +++ b/mobility_data/management/commands/import_parking_machines.py @@ -19,5 +19,5 @@ class Command(BaseCommand): def handle(self, *args, **options): objects = get_parking_machine_objects() content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) - num_ceated, num_deleted = save_to_database(objects, content_type) - log_imported_message(logger, content_type, num_ceated, num_deleted) + num_created, num_deleted = save_to_database(objects, content_type) + log_imported_message(logger, content_type, num_created, num_deleted) From f760b9cd9470674aaaff869f971097aa1208dc25 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Wed, 26 Jul 2023 07:48:35 +0300 Subject: [PATCH 64/84] Replace BaseImportCommand with BaseCommand and remove obsolete code --- .../commands/import_bicycle_stands.py | 30 +++++-------------- .../commands/import_bike_service_stations.py | 16 ++++------ .../commands/import_charging_stations.py | 12 +++----- .../import_disabled_and_no_staff_parkings.py | 19 +++++------- .../import_loading_and_unloading_places.py | 15 ++++------ .../import_share_car_parking_places.py | 16 ++++------ 6 files changed, 37 insertions(+), 71 deletions(-) diff --git a/mobility_data/management/commands/import_bicycle_stands.py b/mobility_data/management/commands/import_bicycle_stands.py index ca324392e..73c3c4de9 100644 --- a/mobility_data/management/commands/import_bicycle_stands.py +++ b/mobility_data/management/commands/import_bicycle_stands.py @@ -1,5 +1,7 @@ import logging +from django.core.management import BaseCommand + from mobility_data.importers.bicycle_stands import ( BICYCLE_STANDS_URL, CONTENT_TYPE_NAME, @@ -11,31 +13,13 @@ save_to_database, ) -from ._base_import_command import BaseImportCommand -from ._utils import get_test_gdal_data_source - logger = logging.getLogger("mobility_data") -class Command(BaseImportCommand): +class Command(BaseCommand): def handle(self, *args, **options): - logger.info("Importing bicycle stands.") - if options["test_mode"]: - logger.info("Running bicycle stand importer in test mode.") - file_name = options["test_mode"] - data_source = None - ds = get_test_gdal_data_source(file_name) - - if file_name.endswith("gml"): - data_source = ("gml", ds) - elif file_name.endswith("geojson"): - data_source = ("geojson", ds) - - objects = get_bicycle_stand_objects(data_source=data_source) - else: - logger.info("Fetching bicycle stands from: {}".format(BICYCLE_STANDS_URL)) - objects = get_bicycle_stand_objects() - + logger.info("Importing bicycle stands from: {}".format(BICYCLE_STANDS_URL)) + objects = get_bicycle_stand_objects() content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) - num_ceated, num_deleted = save_to_database(objects, content_type) - log_imported_message(logger, content_type, num_ceated, num_deleted) + num_created, num_deleted = save_to_database(objects, content_type) + log_imported_message(logger, content_type, num_created, num_deleted) diff --git a/mobility_data/management/commands/import_bike_service_stations.py b/mobility_data/management/commands/import_bike_service_stations.py index 9bcce1976..b63ab8b34 100644 --- a/mobility_data/management/commands/import_bike_service_stations.py +++ b/mobility_data/management/commands/import_bike_service_stations.py @@ -1,5 +1,7 @@ import logging +from django.core.management import BaseCommand + from mobility_data.importers.bike_service_stations import ( CONTENT_TYPE_NAME, get_bike_service_station_objects, @@ -10,19 +12,13 @@ save_to_database, ) -from ._base_import_command import BaseImportCommand - logger = logging.getLogger("mobility_data") -class Command(BaseImportCommand): +class Command(BaseCommand): def handle(self, *args, **options): logger.info("Importing bike service stations.") - geojson_file = None - if options["test_mode"]: - geojson_file = options["test_mode"] - - objects = get_bike_service_station_objects(geojson_file=geojson_file) + objects = get_bike_service_station_objects() content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) - num_ceated, num_deleted = save_to_database(objects, content_type) - log_imported_message(logger, content_type, num_ceated, num_deleted) + num_created, num_deleted = save_to_database(objects, content_type) + log_imported_message(logger, content_type, num_created, num_deleted) diff --git a/mobility_data/management/commands/import_charging_stations.py b/mobility_data/management/commands/import_charging_stations.py index acd22e813..7c4066cbf 100644 --- a/mobility_data/management/commands/import_charging_stations.py +++ b/mobility_data/management/commands/import_charging_stations.py @@ -1,5 +1,7 @@ import logging +from django.core.management import BaseCommand + from mobility_data.importers.charging_stations import ( CONTENT_TYPE_NAME, get_charging_station_objects, @@ -10,19 +12,13 @@ save_to_database, ) -from ._base_import_command import BaseImportCommand - logger = logging.getLogger("mobility_data") -class Command(BaseImportCommand): +class Command(BaseCommand): def handle(self, *args, **options): logger.info("Importing charging stations...") - csv_file = None - if options["test_mode"]: - logger.info("Running charging_station_importer in test mode.") - csv_file = options["test_mode"] - objects = get_charging_station_objects(csv_file=csv_file) + objects = get_charging_station_objects() content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) num_created, num_deleted = save_to_database(objects, content_type) log_imported_message(logger, content_type, num_created, num_deleted) diff --git a/mobility_data/management/commands/import_disabled_and_no_staff_parkings.py b/mobility_data/management/commands/import_disabled_and_no_staff_parkings.py index b4c21b859..2b468d35a 100644 --- a/mobility_data/management/commands/import_disabled_and_no_staff_parkings.py +++ b/mobility_data/management/commands/import_disabled_and_no_staff_parkings.py @@ -1,5 +1,7 @@ import logging +from django.core.management import BaseCommand + from mobility_data.importers.disabled_and_no_staff_parking import ( DISABLED_PARKING_CONTENT_TYPE_NAME, get_no_staff_parking_objects, @@ -11,32 +13,27 @@ save_to_database, ) -from ._base_import_command import BaseImportCommand - logger = logging.getLogger("mobility_data") -class Command(BaseImportCommand): +class Command(BaseCommand): def handle(self, *args, **options): logger.info("Importing disabled and no staff parkings.") - geojson_file = None - if options["test_mode"]: - geojson_file = options["test_mode"] ( no_stuff_parking_objects, disabled_parking_objects, - ) = get_no_staff_parking_objects(geojson_file=geojson_file) + ) = get_no_staff_parking_objects() content_type = get_or_create_content_type_from_config( NO_STAFF_PARKING_CONTENT_TYPE_NAME ) - num_ceated, num_deleted = save_to_database( + num_created, num_deleted = save_to_database( no_stuff_parking_objects, content_type ) - log_imported_message(logger, content_type, num_ceated, num_deleted) + log_imported_message(logger, content_type, num_created, num_deleted) content_type = get_or_create_content_type_from_config( DISABLED_PARKING_CONTENT_TYPE_NAME ) - num_ceated, num_deleted = save_to_database( + num_created, num_deleted = save_to_database( disabled_parking_objects, content_type ) - log_imported_message(logger, content_type, num_ceated, num_deleted) + log_imported_message(logger, content_type, num_created, num_deleted) diff --git a/mobility_data/management/commands/import_loading_and_unloading_places.py b/mobility_data/management/commands/import_loading_and_unloading_places.py index 7bef1af63..06cc20f01 100644 --- a/mobility_data/management/commands/import_loading_and_unloading_places.py +++ b/mobility_data/management/commands/import_loading_and_unloading_places.py @@ -1,5 +1,7 @@ import logging +from django.core.management import BaseCommand + from mobility_data.importers.loading_unloading_places import ( CONTENT_TYPE_NAME, get_loading_and_unloading_objects, @@ -10,18 +12,13 @@ save_to_database, ) -from ._base_import_command import BaseImportCommand - logger = logging.getLogger("mobility_data") -class Command(BaseImportCommand): +class Command(BaseCommand): def handle(self, *args, **options): logger.info("Importing loading and unloading places.") - geojson_file = None - if options["test_mode"]: - geojson_file = options["test_mode"] - objects = get_loading_and_unloading_objects(geojson_file=geojson_file) + objects = get_loading_and_unloading_objects() content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) - num_ceated, num_deleted = save_to_database(objects, content_type) - log_imported_message(logger, content_type, num_ceated, num_deleted) + num_created, num_deleted = save_to_database(objects, content_type) + log_imported_message(logger, content_type, num_created, num_deleted) diff --git a/mobility_data/management/commands/import_share_car_parking_places.py b/mobility_data/management/commands/import_share_car_parking_places.py index 13e1e6388..8eb3ce2d7 100644 --- a/mobility_data/management/commands/import_share_car_parking_places.py +++ b/mobility_data/management/commands/import_share_car_parking_places.py @@ -1,5 +1,7 @@ import logging +from django.core.management import BaseCommand + from mobility_data.importers.share_car_parking_places import ( CONTENT_TYPE_NAME, get_car_share_parking_place_objects, @@ -10,19 +12,13 @@ save_to_database, ) -from ._base_import_command import BaseImportCommand - logger = logging.getLogger("mobility_data") -class Command(BaseImportCommand): +class Command(BaseCommand): def handle(self, *args, **options): logger.info("Importing car share parking places.") - geojson_file = None - if options["test_mode"]: - geojson_file = options["test_mode"] - - objects = get_car_share_parking_place_objects(geojson_file=geojson_file) + objects = get_car_share_parking_place_objects() content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) - num_ceated, num_deleted = save_to_database(objects, content_type) - log_imported_message(logger, content_type, num_ceated, num_deleted) + num_created, num_deleted = save_to_database(objects, content_type) + log_imported_message(logger, content_type, num_created, num_deleted) From 8acd0c8e70d1fd05af6ed0c29a193f8d111f79ff Mon Sep 17 00:00:00 2001 From: juuso-j Date: Wed, 26 Jul 2023 08:07:43 +0300 Subject: [PATCH 65/84] Send max_features as arg to get_data_source function --- mobility_data/importers/wfs.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/mobility_data/importers/wfs.py b/mobility_data/importers/wfs.py index 2c89e6e70..b2b04f2b9 100644 --- a/mobility_data/importers/wfs.py +++ b/mobility_data/importers/wfs.py @@ -132,8 +132,7 @@ def add_feature(self, feature, config): return True -def get_data_source(config): - max_features = DEFAULT_MAX_FEATURES +def get_data_source(config, max_features): wfs_url = config.get("wfs_url", settings.TURKU_WFS_URL) url = WFS_URL.format( wfs_url=wfs_url, wfs_layer=config["wfs_layer"], max_features=max_features @@ -151,11 +150,13 @@ def import_wfs_feature(config, data_file=None): return False if "max_features" in config: max_features = config["max_features"] + else: + max_features = DEFAULT_MAX_FEATURES objects = [] if data_file: ds = DataSource(data_file) else: - ds = get_data_source(config) + ds = get_data_source(config, max_features) assert len(ds) == 1 layer = ds[0] for feature in layer: From 9d2faec13258432543c31658b7cc240dc1d1d47a Mon Sep 17 00:00:00 2001 From: juuso-j Date: Wed, 26 Jul 2023 08:10:35 +0300 Subject: [PATCH 66/84] Add get_data_source and get_test_fixture_data_layer functions --- mobility_data/tests/utils.py | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/mobility_data/tests/utils.py b/mobility_data/tests/utils.py index 8d59f5192..aacd8d477 100644 --- a/mobility_data/tests/utils.py +++ b/mobility_data/tests/utils.py @@ -29,9 +29,21 @@ def get_test_fixture_json_data(file_name): return data -def get_test_fixture_data_layer(file_name): +def get_data_source(file_name): + """ + Returns the given file_name as a GDAL Datasource, + the file must be located in /mobility_data/tests/data/ + """ data_path = os.path.join(os.path.dirname(__file__), "data") file = os.path.join(data_path, file_name) - ds = DataSource(file) + return DataSource(file) + + +def get_test_fixture_data_layer(file_name): + ds = get_data_source(file_name) assert len(ds) == 1 return ds[0] + + +def get_test_fixture_data_source(file_name): + return get_data_source(file_name) From 42a728f18bb8a36a078bb946d9b900121fcb3383 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Wed, 26 Jul 2023 10:45:21 +0300 Subject: [PATCH 67/84] Mock fixture data --- .../tests/test_import_accessories.py | 21 ++++--- .../tests/test_import_bicycle_stands.py | 56 +++++++++++++++++-- .../test_import_bike_service_stations.py | 37 +++++++++--- .../tests/test_import_charging_stations.py | 36 +++++++++--- ...t_import_disabled_and_no_staff_parkings.py | 38 ++++++++++--- .../tests/test_import_gas_filling_stations.py | 11 +++- ...est_import_loading_and_unloading_places.py | 28 +++++++--- .../tests/test_import_payment_zones.py | 19 +++++-- .../tests/test_import_scooter_restrictions.py | 43 ++++++++------ .../test_import_share_car_parking_places.py | 27 +++++++-- 10 files changed, 239 insertions(+), 77 deletions(-) diff --git a/mobility_data/tests/test_import_accessories.py b/mobility_data/tests/test_import_accessories.py index 0391b95f3..3e89549cc 100644 --- a/mobility_data/tests/test_import_accessories.py +++ b/mobility_data/tests/test_import_accessories.py @@ -7,28 +7,33 @@ has been removed from the test input data, as it causes GDAL DataSource to fail when loading data. """ +from unittest.mock import patch + import pytest from django.conf import settings from django.contrib.gis.geos import Point -from mobility_data.importers.wfs import DEFAULT_SOURCE_DATA_SRID +from mobility_data.importers.wfs import DEFAULT_SOURCE_DATA_SRID, import_wfs_feature +from mobility_data.management.commands.import_wfs import CONFIG_FILE, get_yaml_config from mobility_data.models import ContentType, MobileUnit -from .utils import import_command +from .utils import get_test_fixture_data_source @pytest.mark.django_db +@patch("mobility_data.importers.wfs.get_data_source") def test_import_accessories( + get_data_source_mock, administrative_division, administrative_division_type, administrative_division_geometry, ): - import_command( - "import_wfs", - ["PublicToilet", "PublicTable", "PublicBench", "PublicFurnitureGroup"], - data_file=f"{settings.BASE_DIR}/mobility_data/tests/data/accessories.gml", - ) - + config = get_yaml_config(CONFIG_FILE) + get_data_source_mock.return_value = get_test_fixture_data_source("accessories.gml") + features = ["PublicToilet", "PublicTable", "PublicBench", "PublicFurnitureGroup"] + for feature in config["features"]: + if feature["content_type_name"] in features: + import_wfs_feature(feature) public_toilet_content_type = ContentType.objects.get(type_name="PublicToilet") public_toilet_units_qs = MobileUnit.objects.filter( content_types=public_toilet_content_type diff --git a/mobility_data/tests/test_import_bicycle_stands.py b/mobility_data/tests/test_import_bicycle_stands.py index c58cc7396..bd67c8624 100644 --- a/mobility_data/tests/test_import_bicycle_stands.py +++ b/mobility_data/tests/test_import_bicycle_stands.py @@ -1,12 +1,31 @@ +from unittest.mock import patch + import pytest +from mobility_data.importers.utils import ( + delete_mobile_units, + get_or_create_content_type_from_config, + save_to_database, +) from mobility_data.models import MobileUnit -from .utils import import_command +from .utils import get_test_fixture_data_source + + +def get_geojson_data_source(file_name): + ds = get_test_fixture_data_source(file_name) + return [("geojson", ds)] + + +def get_gml_data_source(file_name): + ds = get_test_fixture_data_source(file_name) + return [("gml", ds)] @pytest.mark.django_db +@patch("mobility_data.importers.bicycle_stands.get_data_sources") def test_geojson_import( + get_data_sources_mock, municipalities, administrative_division_type, administrative_division, @@ -14,9 +33,19 @@ def test_geojson_import( streets, address, ): - import_command( - "import_bicycle_stands", test_mode="bicycle_stands_for_units.geojson" + from mobility_data.importers.bicycle_stands import ( + CONTENT_TYPE_NAME, + get_bicycle_stand_objects, ) + + get_data_sources_mock.return_value = get_geojson_data_source( + "bicycle_stands_for_units.geojson" + ) + objects = get_bicycle_stand_objects() + content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) + num_created, num_deleted = save_to_database(objects, content_type) + assert num_created == 3 + assert num_deleted == 0 assert MobileUnit.objects.all().count() == 3 kupittaan_maauimala = MobileUnit.objects.get(name="Kupittaan maauimala") assert kupittaan_maauimala @@ -35,10 +64,17 @@ def test_geojson_import( assert turun_amk.extra["hull_lockable"] is True assert turun_amk.extra["covered"] is False assert turun_amk.municipality.name == "Turku" + delete_mobile_units(content_type) + assert ( + MobileUnit.objects.filter(content_types__type_name=CONTENT_TYPE_NAME).count() + == 0 + ) @pytest.mark.django_db -def test_wfs_importer( +@patch("mobility_data.importers.bicycle_stands.get_data_sources") +def test_gml_importer( + get_data_sources_mock, municipalities, administrative_division_type, administrative_division, @@ -46,7 +82,17 @@ def test_wfs_importer( streets, address, ): - import_command("import_bicycle_stands", test_mode="bicycle_stands.gml") + from mobility_data.importers.bicycle_stands import ( + CONTENT_TYPE_NAME, + get_bicycle_stand_objects, + ) + + get_data_sources_mock.return_value = get_gml_data_source("bicycle_stands.gml") + objects = get_bicycle_stand_objects() + content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) + num_created, num_deleted = save_to_database(objects, content_type) + assert num_created == 3 + assert num_deleted == 0 assert MobileUnit.objects.all().count() == 3 # 0 in fixture xml. stand_normal = MobileUnit.objects.first() diff --git a/mobility_data/tests/test_import_bike_service_stations.py b/mobility_data/tests/test_import_bike_service_stations.py index 70fe149bd..98c21acfd 100644 --- a/mobility_data/tests/test_import_bike_service_stations.py +++ b/mobility_data/tests/test_import_bike_service_stations.py @@ -1,17 +1,33 @@ +from unittest.mock import patch + import pytest -from mobility_data.importers.bike_service_stations import CONTENT_TYPE_NAME -from mobility_data.importers.utils import get_content_type_config +from mobility_data.importers.utils import ( + get_content_type_config, + get_or_create_content_type_from_config, + save_to_database, +) from mobility_data.models import ContentType, MobileUnit -from .utils import import_command +from .utils import get_test_fixture_data_layer @pytest.mark.django_db -def test_import_bike_service_stations(): - import_command( - "import_bike_service_stations", test_mode="bike_service_stations.geojson" +@patch("mobility_data.importers.bike_service_stations.get_data_layer") +def test_import_bike_service_stations(get_data_layer_mock): + from mobility_data.importers.bike_service_stations import ( + CONTENT_TYPE_NAME, + get_bike_service_station_objects, + ) + + get_data_layer_mock.return_value = get_test_fixture_data_layer( + "bike_service_stations.geojson" ) + objects = get_bike_service_station_objects() + content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) + num_created, num_deleted = save_to_database(objects, content_type) + assert num_created == 3 + assert num_deleted == 0 assert ContentType.objects.filter(type_name=CONTENT_TYPE_NAME).count() == 1 assert ( MobileUnit.objects.filter(content_types__type_name=CONTENT_TYPE_NAME).count() @@ -41,9 +57,14 @@ def test_import_bike_service_stations(): assert roola.name_en == "Röölä" assert roola.extra["in_terrain"] == "Kyllä" # Test that dublicates are not created - import_command( - "import_bike_service_stations", test_mode="bike_service_stations.geojson" + get_data_layer_mock.return_value = get_test_fixture_data_layer( + "bike_service_stations.geojson" ) + objects = get_bike_service_station_objects() + content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) + num_created, num_deleted = save_to_database(objects, content_type) + assert num_created == 0 + assert num_deleted == 0 assert ContentType.objects.filter(type_name=CONTENT_TYPE_NAME).count() == 1 assert ( MobileUnit.objects.filter(content_types__type_name=CONTENT_TYPE_NAME).count() diff --git a/mobility_data/tests/test_import_charging_stations.py b/mobility_data/tests/test_import_charging_stations.py index 6efb617cc..8b0a99ae7 100644 --- a/mobility_data/tests/test_import_charging_stations.py +++ b/mobility_data/tests/test_import_charging_stations.py @@ -1,18 +1,22 @@ +from unittest.mock import patch + import pytest from munigeo.models import Address -from mobility_data.importers.charging_stations import ( - CHARGING_STATION_SERVICE_NAMES, - CONTENT_TYPE_NAME, +from mobility_data.importers.charging_stations import CHARGING_STATION_SERVICE_NAMES +from mobility_data.importers.utils import ( + get_content_type_config, + get_or_create_content_type_from_config, + get_root_dir, + save_to_database, ) -from mobility_data.importers.utils import get_content_type_config from mobility_data.models import ContentType, MobileUnit -from .utils import import_command - @pytest.mark.django_db +@patch("mobility_data.importers.charging_stations.get_csv_file_name") def test_import_charging_stations( + get_csv_file_name_mock, municipalities, administrative_division_type, administrative_division, @@ -20,7 +24,18 @@ def test_import_charging_stations( streets, address, ): - import_command("import_charging_stations", test_mode="charging_stations.csv") + from mobility_data.importers.charging_stations import ( + CONTENT_TYPE_NAME, + get_charging_station_objects, + ) + + file_name = f"{get_root_dir()}/mobility_data/tests/data/charging_stations.csv" + get_csv_file_name_mock.return_value = file_name + content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) + objects = get_charging_station_objects() + num_created, num_deleted = save_to_database(objects, content_type) + assert num_created == 3 + assert num_deleted == 0 assert ContentType.objects.filter(type_name=CONTENT_TYPE_NAME).count() == 1 assert ( MobileUnit.objects.filter(content_types__type_name=CONTENT_TYPE_NAME).count() @@ -64,7 +79,12 @@ def test_import_charging_stations( == f"{CHARGING_STATION_SERVICE_NAMES['en']}, Ratapihankatu 53" ) # Test that dublicates are not created - import_command("import_charging_stations", test_mode="charging_stations.csv") + get_csv_file_name_mock.return_vale = file_name + content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) + objects = get_charging_station_objects() + num_created, num_deleted = save_to_database(objects, content_type) + assert num_created == 0 + assert num_deleted == 0 assert ContentType.objects.filter(type_name=CONTENT_TYPE_NAME).count() == 1 assert ( MobileUnit.objects.filter(content_types__type_name=CONTENT_TYPE_NAME).count() diff --git a/mobility_data/tests/test_import_disabled_and_no_staff_parkings.py b/mobility_data/tests/test_import_disabled_and_no_staff_parkings.py index c56527a78..16950e664 100644 --- a/mobility_data/tests/test_import_disabled_and_no_staff_parkings.py +++ b/mobility_data/tests/test_import_disabled_and_no_staff_parkings.py @@ -1,21 +1,41 @@ +from unittest.mock import patch + import pytest from munigeo.models import Municipality -from mobility_data.importers.disabled_and_no_staff_parking import ( - DISABLED_PARKING_CONTENT_TYPE_NAME, - NO_STAFF_PARKING_CONTENT_TYPE_NAME, +from mobility_data.importers.utils import ( + get_or_create_content_type_from_config, + get_root_dir, + save_to_database, ) from mobility_data.models import MobileUnit -from .utils import import_command - @pytest.mark.django_db -def test_geojson_import(municipalities): - import_command( - "import_disabled_and_no_staff_parkings", - test_mode="autopysäköinti_eihlö.geojson", +@patch("mobility_data.importers.disabled_and_no_staff_parking.get_geojson_file_name") +def test_geojson_import(get_geojson_file_name_mock, municipalities): + from mobility_data.importers.disabled_and_no_staff_parking import ( + DISABLED_PARKING_CONTENT_TYPE_NAME, + get_no_staff_parking_objects, + NO_STAFF_PARKING_CONTENT_TYPE_NAME, + ) + + get_geojson_file_name_mock.return_value = ( + f"{get_root_dir()}/mobility_data/tests/data/autopysäköinti_eihlö.geojson" + ) + no_stuff_parking_objects, disabled_parking_objects = get_no_staff_parking_objects() + content_type = get_or_create_content_type_from_config( + NO_STAFF_PARKING_CONTENT_TYPE_NAME + ) + num_created, num_deleted = save_to_database(no_stuff_parking_objects, content_type) + assert num_created == 2 + assert num_deleted == 0 + content_type = get_or_create_content_type_from_config( + DISABLED_PARKING_CONTENT_TYPE_NAME ) + num_created, num_deleted = save_to_database(disabled_parking_objects, content_type) + assert num_created == 1 + assert num_deleted == 0 assert MobileUnit.objects.all().count() == 3 try: turku_muni = Municipality.objects.get(name="Turku") diff --git a/mobility_data/tests/test_import_gas_filling_stations.py b/mobility_data/tests/test_import_gas_filling_stations.py index 5bd913c35..ce79c4919 100644 --- a/mobility_data/tests/test_import_gas_filling_stations.py +++ b/mobility_data/tests/test_import_gas_filling_stations.py @@ -1,3 +1,5 @@ +from unittest.mock import patch + import pytest from mobility_data.importers.utils import ( @@ -10,14 +12,17 @@ @pytest.mark.django_db -def test_importer(municipalities): +@patch("mobility_data.importers.utils.fetch_json") +def test_importer(fetch_json_mock, municipalities): from mobility_data.importers.gas_filling_station import ( CONTENT_TYPE_NAME, get_filtered_gas_filling_station_objects, ) - json_data = get_test_fixture_json_data("gas_filling_stations.json") - objects = get_filtered_gas_filling_station_objects(json_data=json_data) + fetch_json_mock.return_value = get_test_fixture_json_data( + "gas_filling_stations.json" + ) + objects = get_filtered_gas_filling_station_objects() content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) num_created, num_deleted = save_to_database(objects, content_type) # Two will be created as One item in the fixture data locates outside Southwest Finland diff --git a/mobility_data/tests/test_import_loading_and_unloading_places.py b/mobility_data/tests/test_import_loading_and_unloading_places.py index 519f8f5dc..7b17af605 100644 --- a/mobility_data/tests/test_import_loading_and_unloading_places.py +++ b/mobility_data/tests/test_import_loading_and_unloading_places.py @@ -1,19 +1,31 @@ +from unittest.mock import patch + import pytest from munigeo.models import Municipality -from mobility_data.importers.loading_unloading_places import CONTENT_TYPE_NAME +from mobility_data.importers.utils import ( + get_or_create_content_type_from_config, + get_root_dir, + save_to_database, +) from mobility_data.models import ContentType, MobileUnit -from .utils import import_command - @pytest.mark.django_db -@pytest.mark.django_db -def test_import(municipalities): - import_command( - "import_loading_and_unloading_places", - test_mode="loading_and_unloading_places.geojson", +@patch("mobility_data.importers.loading_unloading_places.get_geojson_file_name") +def test_import(get_geojson_file_name_mock, municipalities): + from mobility_data.importers.loading_unloading_places import ( + CONTENT_TYPE_NAME, + get_loading_and_unloading_objects, ) + + file_name = f"{get_root_dir()}/mobility_data/tests/data/loading_and_unloading_places.geojson" + get_geojson_file_name_mock.return_value = file_name + content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) + objects = get_loading_and_unloading_objects() + num_created, num_deleted = save_to_database(objects, content_type) + assert num_created == 3 + assert num_deleted == 0 assert ContentType.objects.all().count() == 1 assert MobileUnit.objects.all().count() == 3 turku_muni = None diff --git a/mobility_data/tests/test_import_payment_zones.py b/mobility_data/tests/test_import_payment_zones.py index c8a91adf9..5f9e7f6fc 100644 --- a/mobility_data/tests/test_import_payment_zones.py +++ b/mobility_data/tests/test_import_payment_zones.py @@ -6,23 +6,30 @@ has been removed from the test input data, as it causes GDAL DataSource to fail when loading data. """ +from unittest.mock import patch import pytest from django.conf import settings from django.contrib.gis.geos import Point, Polygon +from mobility_data.importers.wfs import import_wfs_feature +from mobility_data.management.commands.import_wfs import CONFIG_FILE, get_yaml_config from mobility_data.models import ContentType, MobileUnit -from .utils import import_command +from .utils import get_test_fixture_data_source @pytest.mark.django_db -def test_import_payment_zones(): - import_command( - "import_wfs", - "PaymentZone", - data_file=f"{settings.BASE_DIR}/mobility_data/tests/data/payment_zones.gml", +@patch("mobility_data.importers.wfs.get_data_source") +def test_import_payment_zones(get_data_source_mock): + config = get_yaml_config(CONFIG_FILE) + get_data_source_mock.return_value = get_test_fixture_data_source( + "payment_zones.gml" ) + features = ["PaymentZone"] + for feature in config["features"]: + if feature["content_type_name"] in features: + import_wfs_feature(feature) assert ContentType.objects.all().count() == 1 content_type = ContentType.objects.first() assert content_type.type_name == "PaymentZone" diff --git a/mobility_data/tests/test_import_scooter_restrictions.py b/mobility_data/tests/test_import_scooter_restrictions.py index b0ce709a7..d1a62e8ad 100644 --- a/mobility_data/tests/test_import_scooter_restrictions.py +++ b/mobility_data/tests/test_import_scooter_restrictions.py @@ -1,14 +1,17 @@ +from unittest.mock import patch + import pytest from django.conf import settings from django.contrib.gis.geos import Point -from mobility_data.importers.wfs import DEFAULT_SOURCE_DATA_SRID +from mobility_data.importers.wfs import DEFAULT_SOURCE_DATA_SRID, import_wfs_feature +from mobility_data.management.commands.import_wfs import CONFIG_FILE, get_yaml_config from mobility_data.models import ContentType, MobileUnit -from .utils import import_command +from .utils import get_test_fixture_data_source """ -Note, namespace declarations has beenremoved from the test input data, as it causes GDAL +Note, namespace declarations has been removed from the test input data, as it causes GDAL DataSource to fail when loading data. scooter_parkings.gml: xsi:schemaLocation="http://www.opengis.net/wfs @@ -31,12 +34,16 @@ @pytest.mark.django_db -def test_import_scooter_restrictions(): - import_command( - "import_wfs", - "ScooterParkingArea", - data_file=f"{settings.BASE_DIR}/mobility_data/tests/data/scooter_parkings.gml", +@patch("mobility_data.importers.wfs.get_data_source") +def test_import_scooter_restrictions(get_data_source_mock): + config = get_yaml_config(CONFIG_FILE) + get_data_source_mock.return_value = get_test_fixture_data_source( + "scooter_parkings.gml" ) + features = ["ScooterParkingArea"] + for feature in config["features"]: + if feature["content_type_name"] in features: + import_wfs_feature(feature) # Test scooter parking parking_content_type = ContentType.objects.get(type_name="ScooterParkingArea") assert parking_content_type @@ -47,11 +54,13 @@ def test_import_scooter_restrictions(): parking_unit.content_types.first() == parking_content_type point = Point(239576.42, 6711050.26, srid=DEFAULT_SOURCE_DATA_SRID) parking_unit.geometry.equals_exact(point, tolerance=0.0001) - import_command( - "import_wfs", - "ScooterSpeedLimitArea", - data_file=f"{settings.BASE_DIR}/mobility_data/tests/data/scooter_speed_limits.gml", + get_data_source_mock.return_value = get_test_fixture_data_source( + "scooter_speed_limits.gml" ) + features = ["ScooterSpeedLimitArea"] + for feature in config["features"]: + if feature["content_type_name"] in features: + import_wfs_feature(feature) # Test scooter speed limits speed_limit_content_type = ContentType.objects.get( type_name="ScooterSpeedLimitArea" @@ -67,11 +76,13 @@ def test_import_scooter_restrictions(): # Scooter speed limit unit locates in the market square(kauppator) assert speed_limit_unit.geometry.contains(market_square) is True assert speed_limit_unit.geometry.contains(turku_cathedral) is False - import_command( - "import_wfs", - "ScooterNoParkingArea", - data_file=f"{settings.BASE_DIR}/mobility_data/tests/data/scooter_no_parking_zones.gml", + get_data_source_mock.return_value = get_test_fixture_data_source( + "scooter_no_parking_zones.gml" ) + features = ["ScooterNoParkingArea"] + for feature in config["features"]: + if feature["content_type_name"] in features: + import_wfs_feature(feature) # Test scooter no parking zones no_parking_content_type = ContentType.objects.get(type_name="ScooterNoParkingArea") assert no_parking_content_type diff --git a/mobility_data/tests/test_import_share_car_parking_places.py b/mobility_data/tests/test_import_share_car_parking_places.py index 0292d3425..71fd61e5f 100644 --- a/mobility_data/tests/test_import_share_car_parking_places.py +++ b/mobility_data/tests/test_import_share_car_parking_places.py @@ -1,16 +1,31 @@ +from unittest.mock import patch + import pytest -from mobility_data.importers.share_car_parking_places import CONTENT_TYPE_NAME +from mobility_data.importers.utils import ( + get_or_create_content_type_from_config, + get_root_dir, + save_to_database, +) from mobility_data.models import ContentType, MobileUnit -from .utils import import_command - @pytest.mark.django_db -def test_import_car_share_parking_places(): - import_command( - "import_share_car_parking_places", test_mode="share_car_parking_places.geojson" +@patch("mobility_data.importers.share_car_parking_places.get_geojson_file_name") +def test_import_car_share_parking_places(get_geojson_file_name_mock): + from mobility_data.importers.share_car_parking_places import ( + CONTENT_TYPE_NAME, + get_car_share_parking_place_objects, + ) + + file_name = ( + f"{get_root_dir()}/mobility_data/tests/data/share_car_parking_places.geojson" ) + get_geojson_file_name_mock.return_value = file_name + content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) + objects = get_car_share_parking_place_objects() + num_created, num_deleted = save_to_database(objects, content_type) + assert num_created == 3 assert ContentType.objects.filter(type_name=CONTENT_TYPE_NAME).count() == 1 assert ( MobileUnit.objects.filter(content_types__type_name=CONTENT_TYPE_NAME).count() From 9736d50fa1552a7f53a33f642c44d2286b59245a Mon Sep 17 00:00:00 2001 From: juuso-j Date: Wed, 26 Jul 2023 14:40:12 +0300 Subject: [PATCH 68/84] Remove json_data param --- mobility_data/importers/gas_filling_station.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/mobility_data/importers/gas_filling_station.py b/mobility_data/importers/gas_filling_station.py index 4624c12e8..84c8a4b5e 100644 --- a/mobility_data/importers/gas_filling_station.py +++ b/mobility_data/importers/gas_filling_station.py @@ -54,15 +54,13 @@ def __init__(self, elem, srid=settings.DEFAULT_SRID): self.extra["lng_cng"] = self.lng_cng -def get_filtered_gas_filling_station_objects(json_data=None): +def get_filtered_gas_filling_station_objects(): """ Returns a list of GasFillingStation objects that are filtered by location. Stations inside boundarys of Southwest Finland are included, the rest are discarded. """ - - if not json_data: - json_data = fetch_json(GAS_FILLING_STATIONS_URL) + json_data = fetch_json(GAS_FILLING_STATIONS_URL) # srid = json_data["spatialReference"]["wkid"] # NOTE, hack to fix srid 102100 in source data causes "crs not found" srid = 4326 From 1b086b00663de9b92e82f64494eda006d6aad30e Mon Sep 17 00:00:00 2001 From: juuso-j Date: Wed, 26 Jul 2023 14:46:33 +0300 Subject: [PATCH 69/84] Remove test_data param --- smbackend_turku/importers/bicycle_stands.py | 3 +-- smbackend_turku/importers/bike_service_stations.py | 5 ++--- smbackend_turku/importers/stations.py | 12 ++++-------- 3 files changed, 7 insertions(+), 13 deletions(-) diff --git a/smbackend_turku/importers/bicycle_stands.py b/smbackend_turku/importers/bicycle_stands.py index f53ce8bf8..08d619614 100644 --- a/smbackend_turku/importers/bicycle_stands.py +++ b/smbackend_turku/importers/bicycle_stands.py @@ -7,10 +7,9 @@ class BicycleStandImporter(BaseExternalSource): - def __init__(self, logger=None, config=None, test_data=None): + def __init__(self, logger=None, config=None): super().__init__(config) self.logger = logger - self.test_data = test_data def import_bicycle_stands(self): self.logger.info("Importing Bicycle Stands...") diff --git a/smbackend_turku/importers/bike_service_stations.py b/smbackend_turku/importers/bike_service_stations.py index 4c2bec58b..797f2fe06 100644 --- a/smbackend_turku/importers/bike_service_stations.py +++ b/smbackend_turku/importers/bike_service_stations.py @@ -7,15 +7,14 @@ class BikeServiceStationImporter(BaseExternalSource): - def __init__(self, config=None, logger=None, test_data=None): + def __init__(self, config=None, logger=None): super().__init__(config) self.logger = logger - self.test_data = test_data def import_bike_service_stations(self): self.logger.info("Importing Bike service stations...") content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME) - filtered_objects = get_bike_service_station_objects(geojson_file=self.test_data) + filtered_objects = get_bike_service_station_objects() super().save_objects_as_units(filtered_objects, content_type) diff --git a/smbackend_turku/importers/stations.py b/smbackend_turku/importers/stations.py index c4dbd0fb0..eae38bdf4 100644 --- a/smbackend_turku/importers/stations.py +++ b/smbackend_turku/importers/stations.py @@ -11,31 +11,27 @@ class GasFillingStationImporter(BaseExternalSource): - def __init__(self, config=None, logger=None, test_data=None): + def __init__(self, config=None, logger=None): super().__init__(config) self.logger = logger - self.test_data = test_data def import_gas_filling_stations(self): self.logger.info("Importing gas filling stations...") content_type = get_or_create_content_type_from_config( GAS_FILLING_STATION_CONTENT_TYPE_NAME ) - filtered_objects = get_filtered_gas_filling_station_objects( - json_data=self.test_data - ) + filtered_objects = get_filtered_gas_filling_station_objects() super().save_objects_as_units(filtered_objects, content_type) class ChargingStationImporter(BaseExternalSource): - def __init__(self, logger=None, config=None, importer=None, test_data=None): + def __init__(self, logger=None, config=None, importer=None): super().__init__(config) self.logger = logger - self.test_data = test_data def import_charging_stations(self): self.logger.info("Importing charging stations...") - filtered_objects = get_charging_station_objects(csv_file=self.test_data) + filtered_objects = get_charging_station_objects() content_type = get_or_create_content_type_from_config( CHARGING_STATION_CONTENT_TYPE_NAME ) From b88f317058684b9833118190b6fdfa7214749b0a Mon Sep 17 00:00:00 2001 From: juuso-j Date: Wed, 26 Jul 2023 14:48:25 +0300 Subject: [PATCH 70/84] Use mock objects for fixture data --- .../tests/test_bike_service_stations.py | 8 ++++- .../tests/test_charging_stations.py | 7 +++- .../tests/test_gas_filling_stations.py | 36 +++++++++++-------- 3 files changed, 34 insertions(+), 17 deletions(-) diff --git a/smbackend_turku/tests/test_bike_service_stations.py b/smbackend_turku/tests/test_bike_service_stations.py index b8dfe7fc1..3dcda5b90 100644 --- a/smbackend_turku/tests/test_bike_service_stations.py +++ b/smbackend_turku/tests/test_bike_service_stations.py @@ -1,17 +1,21 @@ import logging from datetime import datetime +from unittest.mock import patch import pytest import pytz from munigeo.models import Municipality +from mobility_data.tests.utils import get_test_fixture_data_layer from services.models import Service, ServiceNode, Unit from smbackend_turku.importers.bike_service_stations import import_bike_service_stations from smbackend_turku.importers.utils import get_external_source_config @pytest.mark.django_db +@patch("mobility_data.importers.bike_service_stations.get_data_layer") def test_bike_service_stations_import( + get_data_layer_mock, municipality, administrative_division, administrative_division_type, @@ -24,10 +28,12 @@ def test_bike_service_stations_import( ServiceNode.objects.create( id=42, name="Vapaa-aika", last_modified_time=datetime.now(utc_timezone) ) + get_data_layer_mock.return_value = get_test_fixture_data_layer( + "bike_service_stations.geojson" + ) import_bike_service_stations( logger=logger, config=config, - test_data="bike_service_stations.geojson", ) assert Unit.objects.all().count() == 3 Service.objects.all().count() == 1 diff --git a/smbackend_turku/tests/test_charging_stations.py b/smbackend_turku/tests/test_charging_stations.py index 095ec1bfe..618e83c17 100644 --- a/smbackend_turku/tests/test_charging_stations.py +++ b/smbackend_turku/tests/test_charging_stations.py @@ -1,8 +1,10 @@ import logging from datetime import datetime +from unittest.mock import patch import pytest import pytz +from django.conf import settings from services.models import Service, ServiceNode, Unit from smbackend_turku.importers.stations import import_charging_stations @@ -10,7 +12,9 @@ @pytest.mark.django_db +@patch("mobility_data.importers.charging_stations.get_csv_file_name") def test_charging_stations_import( + get_csv_file_name_mock, municipality, administrative_division, administrative_division_type, @@ -27,10 +31,11 @@ def test_charging_stations_import( ServiceNode.objects.create( id=42, name="Vapaa-aika", last_modified_time=datetime.now(utc_timezone) ) + file_name = f"{settings.BASE_DIR}/mobility_data/tests/data/charging_stations.csv" + get_csv_file_name_mock.return_value = file_name import_charging_stations( logger=logger, config=config, - test_data="charging_stations.csv", ) assert Unit.objects.all().count() == 3 Service.objects.all().count() == 1 diff --git a/smbackend_turku/tests/test_gas_filling_stations.py b/smbackend_turku/tests/test_gas_filling_stations.py index 80032a60b..e65d0e9f1 100644 --- a/smbackend_turku/tests/test_gas_filling_stations.py +++ b/smbackend_turku/tests/test_gas_filling_stations.py @@ -1,21 +1,28 @@ import logging from datetime import datetime +from unittest.mock import patch import pytest import pytz from mobility_data.tests.utils import get_test_fixture_json_data -from services.models import Service, ServiceNode, Unit +from services.models import Service, ServiceNode # , Unit from smbackend_turku.importers.stations import import_gas_filling_stations from smbackend_turku.importers.utils import get_external_source_config from smbackend_turku.tests.utils import create_municipalities @pytest.mark.django_db -def test_gas_filling_stations_import(): +@patch("mobility_data.importers.utils.fetch_json") +def test_gas_filling_stations_import(fetch_json_mock): + logger = logging.getLogger(__name__) + # For reasons unknown this mock does not work, + # The return value of the actual function call is used. + fetch_json_mock.return_value = get_test_fixture_json_data( + "gas_filling_stations.json" + ) config = get_external_source_config("gas_filling_stations") - utc_timezone = pytz.timezone("UTC") # create root servicenode to which the imported service_node will connect root_service_node = ServiceNode.objects.create( @@ -24,24 +31,23 @@ def test_gas_filling_stations_import(): # Municipality must be created in order to update_service_node_count() # to execute without errors create_municipalities() - # Import using fixture data import_gas_filling_stations( logger=logger, config=config, - test_data=get_test_fixture_json_data("gas_filling_stations.json"), ) service = Service.objects.get(name=config["service"]["name"]["fi"]) assert service.id == config["service"]["id"] service_node = ServiceNode.objects.get(name=config["service_node"]["name"]["fi"]) assert service_node.id == config["service_node"]["id"] assert service_node.parent.id == root_service_node.id - assert Unit.objects.all().count() == 2 - assert Unit.objects.all()[1].id == config["units_offset"] - assert Unit.objects.get(name="Raisio Kuninkoja") - unit = Unit.objects.get(name="Turku Satama") - assert pytest.approx(unit.location.x, 0.0000000001) == 236760.1062021295 - assert unit.extra["operator"] == "Gasum" - assert unit.service_nodes.all().count() == 1 - assert unit.services.all().count() == 1 - assert unit.services.first().name == config["service"]["name"]["fi"] - assert unit.service_nodes.first().name == config["service_node"]["name"]["fi"] + # See line 20, commented as the mock does not work. + # assert Unit.objects.all().count() == 2 + # assert Unit.objects.all()[1].id == config["units_offset"] + # assert Unit.objects.get(name="Raisio Kuninkoja") + # unit = Unit.objects.get(name="Turku Satama") + # assert pytest.approx(unit.location.x, 0.0000000001) == 236760.1062021295 + # assert unit.extra["operator"] == "Gasum" + # assert unit.service_nodes.all().count() == 1 + # assert unit.services.all().count() == 1 + # assert unit.services.first().name == config["service"]["name"]["fi"] + # assert unit.service_nodes.first().name == config["service_node"]["name"]["fi"] From 231f8ab5072ac22a186b5589a3aaa3c946079ef3 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Thu, 27 Jul 2023 08:35:38 +0300 Subject: [PATCH 71/84] Add function to get JSON data Insead of importing and using fetch json use this, this solves the problem that the return value of fetch json is not mocked in tests. --- mobility_data/importers/gas_filling_station.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/mobility_data/importers/gas_filling_station.py b/mobility_data/importers/gas_filling_station.py index 84c8a4b5e..3e9cd132a 100644 --- a/mobility_data/importers/gas_filling_station.py +++ b/mobility_data/importers/gas_filling_station.py @@ -1,12 +1,12 @@ import logging +import requests from django.conf import settings from django.contrib.gis.geos import Point, Polygon from munigeo.models import Municipality from .constants import SOUTHWEST_FINLAND_BOUNDARY, SOUTHWEST_FINLAND_BOUNDARY_SRID from .utils import ( - fetch_json, get_street_name_and_number, get_street_name_translations, LANGUAGES, @@ -54,13 +54,21 @@ def __init__(self, elem, srid=settings.DEFAULT_SRID): self.extra["lng_cng"] = self.lng_cng +def get_json_data(url): + response = requests.get(url) + assert response.status_code == 200, "Fetching {} status code: {}".format( + url, response.status_code + ) + return response.json() + + def get_filtered_gas_filling_station_objects(): """ Returns a list of GasFillingStation objects that are filtered by location. Stations inside boundarys of Southwest Finland are included, the rest are discarded. """ - json_data = fetch_json(GAS_FILLING_STATIONS_URL) + json_data = get_json_data(GAS_FILLING_STATIONS_URL) # srid = json_data["spatialReference"]["wkid"] # NOTE, hack to fix srid 102100 in source data causes "crs not found" srid = 4326 From 8f256082482308d943c48cf75f1e9c7ef68bc8f4 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Thu, 27 Jul 2023 08:37:34 +0300 Subject: [PATCH 72/84] Patch get_json_data --- .../tests/test_import_gas_filling_stations.py | 6 ++-- .../tests/test_gas_filling_stations.py | 28 +++++++++---------- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/mobility_data/tests/test_import_gas_filling_stations.py b/mobility_data/tests/test_import_gas_filling_stations.py index ce79c4919..71e04a91a 100644 --- a/mobility_data/tests/test_import_gas_filling_stations.py +++ b/mobility_data/tests/test_import_gas_filling_stations.py @@ -12,14 +12,14 @@ @pytest.mark.django_db -@patch("mobility_data.importers.utils.fetch_json") -def test_importer(fetch_json_mock, municipalities): +@patch("mobility_data.importers.gas_filling_station.get_json_data") +def test_importer(get_json_data_mock, municipalities): from mobility_data.importers.gas_filling_station import ( CONTENT_TYPE_NAME, get_filtered_gas_filling_station_objects, ) - fetch_json_mock.return_value = get_test_fixture_json_data( + get_json_data_mock.return_value = get_test_fixture_json_data( "gas_filling_stations.json" ) objects = get_filtered_gas_filling_station_objects() diff --git a/smbackend_turku/tests/test_gas_filling_stations.py b/smbackend_turku/tests/test_gas_filling_stations.py index e65d0e9f1..bbf8303a5 100644 --- a/smbackend_turku/tests/test_gas_filling_stations.py +++ b/smbackend_turku/tests/test_gas_filling_stations.py @@ -6,20 +6,20 @@ import pytz from mobility_data.tests.utils import get_test_fixture_json_data -from services.models import Service, ServiceNode # , Unit +from services.models import Service, ServiceNode, Unit from smbackend_turku.importers.stations import import_gas_filling_stations from smbackend_turku.importers.utils import get_external_source_config from smbackend_turku.tests.utils import create_municipalities @pytest.mark.django_db -@patch("mobility_data.importers.utils.fetch_json") -def test_gas_filling_stations_import(fetch_json_mock): +@patch("mobility_data.importers.gas_filling_station.get_json_data") +def test_gas_filling_stations_import(get_json_data_mock): logger = logging.getLogger(__name__) # For reasons unknown this mock does not work, # The return value of the actual function call is used. - fetch_json_mock.return_value = get_test_fixture_json_data( + get_json_data_mock.return_value = get_test_fixture_json_data( "gas_filling_stations.json" ) config = get_external_source_config("gas_filling_stations") @@ -41,13 +41,13 @@ def test_gas_filling_stations_import(fetch_json_mock): assert service_node.id == config["service_node"]["id"] assert service_node.parent.id == root_service_node.id # See line 20, commented as the mock does not work. - # assert Unit.objects.all().count() == 2 - # assert Unit.objects.all()[1].id == config["units_offset"] - # assert Unit.objects.get(name="Raisio Kuninkoja") - # unit = Unit.objects.get(name="Turku Satama") - # assert pytest.approx(unit.location.x, 0.0000000001) == 236760.1062021295 - # assert unit.extra["operator"] == "Gasum" - # assert unit.service_nodes.all().count() == 1 - # assert unit.services.all().count() == 1 - # assert unit.services.first().name == config["service"]["name"]["fi"] - # assert unit.service_nodes.first().name == config["service_node"]["name"]["fi"] + assert Unit.objects.all().count() == 2 + assert Unit.objects.all()[1].id == config["units_offset"] + assert Unit.objects.get(name="Raisio Kuninkoja") + unit = Unit.objects.get(name="Turku Satama") + assert pytest.approx(unit.location.x, 0.0000000001) == 236760.1062021295 + assert unit.extra["operator"] == "Gasum" + assert unit.service_nodes.all().count() == 1 + assert unit.services.all().count() == 1 + assert unit.services.first().name == config["service"]["name"]["fi"] + assert unit.service_nodes.first().name == config["service_node"]["name"]["fi"] From c1105db96179e3cf81ec1e0f307753298cb5daac Mon Sep 17 00:00:00 2001 From: juuso-j Date: Wed, 9 Aug 2023 07:33:42 +0300 Subject: [PATCH 73/84] Add bicycles --- eco_counter/data/traffic_counter_metadata.geojson | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/eco_counter/data/traffic_counter_metadata.geojson b/eco_counter/data/traffic_counter_metadata.geojson index 6a1996103..22e9ed52e 100644 --- a/eco_counter/data/traffic_counter_metadata.geojson +++ b/eco_counter/data/traffic_counter_metadata.geojson @@ -108,7 +108,20 @@ { "type": "Feature", "properties": { "fid": 235, "ID": "9", "Osoite_fi": "Uudenmaantie 5", "Osoite_sv": "Nylandsvägen 5", "Osoite_en": "Uudenmaantie 5", "Liittymänumero": "609", "Ilmaisimet": "208L", "Mittauspisteiden_ID": "5537", "Tyyppi": "B", "Suunta": "K" }, "geometry": { "type": "Point", "coordinates": [ 23460746.775335978716612, 6703683.553299261257052 ] } }, { "type": "Feature", "properties": { "fid": 80, "ID": "15", "Osoite_fi": "Ispoisten puistotie 2", "Osoite_sv": "Ispois parväg 2", "Osoite_en": "Ispoisten puistotie 2", "Liittymänumero": "727", "Ilmaisimet": "206L", "Mittauspisteiden_ID": "8746", "Tyyppi": "B", "Suunta": "K" }, "geometry": { "type": "Point", "coordinates": [ 23459625.991249438375235, 6702405.184565890580416 ] } }, { "type": "Feature", "properties": { "fid": 128, "ID": "5", "Osoite_fi": "Skarppakullantie 37", "Osoite_sv": "Skarppakullav 37", "Osoite_en": "Skarppakullantie 37", "Liittymänumero": "652", "Ilmaisimet": "108L", "Mittauspisteiden_ID": "8388", "Tyyppi": "B", "Suunta": "P" }, "geometry": { "type": "Point", "coordinates": [ 23462435.035431213676929, 6702584.968190263025463 ] } }, - { "type": "Feature", "properties": { "fid": 140, "ID": "9", "Osoite_fi": "Skarppakullantie/Skanssinkatu", "Osoite_sv": "Skarppakullavägen/Skansgatan", "Osoite_en": "Skarppakullantie/Skanssinkatu", "Liittymänumero": "653", "Ilmaisimet": "208L", "Mittauspisteiden_ID": "8461", "Tyyppi": "B", "Suunta": "K" }, "geometry": { "type": "Point", "coordinates": [ 23462437.22350587323308, 6702359.768247644416988 ] } } + { "type": "Feature", "properties": { "fid": 140, "ID": "9", "Osoite_fi": "Skarppakullantie/Skanssinkatu", "Osoite_sv": "Skarppakullavägen/Skansgatan", "Osoite_en": "Skarppakullantie/Skanssinkatu", "Liittymänumero": "653", "Ilmaisimet": "208L", "Mittauspisteiden_ID": "8461", "Tyyppi": "B", "Suunta": "K" }, "geometry": { "type": "Point", "coordinates": [ 23462437.22350587323308, 6702359.768247644416988 ] } }, + { "type": "Feature", "properties": { "fid": 3, "ID": null, "Osoite_fi": "Itäinen rantakatu 8", "Osoite_sv": "Östra strandgatan 8", "Osoite_en": "Itäinen rantakatu 8", "Liittymänumero": "701", "Ilmaisimet": "X3", "Mittauspisteiden_ID": "903", "Tyyppi": "P", "Suunta": "T" }, "geometry": { "type": "Point", "coordinates": [ 23459837.50562446564436, 6704244.290738201700151 ] } }, + { "type": "Feature", "properties": { "fid": 4, "ID": null, "Osoite_fi": "Itäinen rantakatu 43", "Osoite_sv": "Östra strandgatan 43", "Osoite_en": "Itäinen rantakatu 43", "Liittymänumero": "710", "Ilmaisimet": "pp1", "Mittauspisteiden_ID": "2273", "Tyyppi": "P", "Suunta": "T" }, "geometry": { "type": "Point", "coordinates": [ 23459072.372346129268408, 6703798.638464853167534 ] } }, + { "type": "Feature", "properties": { "fid": 5, "ID": null, "Osoite_fi": "Itäinen rantakatu 43", "Osoite_sv": "Östra strandgatan 43", "Osoite_en": "Itäinen rantakatu 43", "Liittymänumero": "710", "Ilmaisimet": "pp2", "Mittauspisteiden_ID": "2274", "Tyyppi": "P", "Suunta": "T" }, "geometry": { "type": "Point", "coordinates": [ 23459073.968076642602682, 6703799.561500884592533 ] } }, + { "type": "Feature", "properties": { "fid": 6, "ID": null, "Osoite_fi": "Itäinen rantakatu 43", "Osoite_sv": "Östra strandgatan 43", "Osoite_en": "Itäinen rantakatu 43", "Liittymänumero": "710", "Ilmaisimet": "pp3", "Mittauspisteiden_ID": "2275", "Tyyppi": "P", "Suunta": "T" }, "geometry": { "type": "Point", "coordinates": [ 23459076.346651934087276, 6703801.192924783565104 ] } }, + { "type": "Feature", "properties": { "fid": 7, "ID": null, "Osoite_fi": "Itäinen rantakatu 43", "Osoite_sv": "Östra strandgatan 43", "Osoite_en": "Itäinen rantakatu 43", "Liittymänumero": "710", "Ilmaisimet": "pp4", "Mittauspisteiden_ID": "2276", "Tyyppi": "P", "Suunta": "T" }, "geometry": { "type": "Point", "coordinates": [ 23459077.927361972630024, 6703802.362830685451627 ] } }, + { "type": "Feature", "properties": { "fid": 8, "ID": null, "Osoite_fi": "Myllysilta 1", "Osoite_sv": "Kvarnbron 1", "Osoite_en": "Myllysilta 1", "Liittymänumero": "108", "Ilmaisimet": "2PP", "Mittauspisteiden_ID": "1921", "Tyyppi": "P", "Suunta": "T" }, "geometry": { "type": "Point", "coordinates": [ 23459138.651998423039913, 6703933.748812982812524 ] } }, + { "type": "Feature", "properties": { "fid": 9, "ID": "", "Osoite_fi": "Myllysilta 2", "Osoite_sv": "Kvarnbron 2", "Osoite_en": "Myllysilta 2", "Liittymänumero": "108", "Ilmaisimet": "1PP", "Mittauspisteiden_ID": "1920", "Tyyppi": "P", "Suunta": "T" }, "geometry": { "type": "Point", "coordinates": [ 23459121.444739304482937, 6703958.63655239995569 ] } }, + { "type": "Feature", "properties": { "fid": 10, "ID": null, "Osoite_fi": "Martinsilta", "Osoite_sv": "Martinsbron", "Osoite_en": "Martinsilta", "Liittymänumero": "101", "Ilmaisimet": "1PP", "Mittauspisteiden_ID": "1570", "Tyyppi": "P", "Suunta": "T" }, "geometry": { "type": "Point", "coordinates": [ 23458979.291542522609234, 6703870.531346249394119 ] } }, + { "type": "Feature", "properties": { "fid": 11, "ID": null, "Osoite_fi": "Kalevantie 17", "Osoite_sv": "Kalevavägen 17", "Osoite_en": "Kalevantie 17", "Liittymänumero": "518", "Ilmaisimet": "ZELT_pp", "Mittauspisteiden_ID": "6816", "Tyyppi": "P", "Suunta": "T" }, "geometry": { "type": "Point", "coordinates": [ 23461917.734735164791346, 6704427.855136526748538 ] } }, + { "type": "Feature", "properties": { "fid": 12, "ID": null, "Osoite_fi": "Lemminkäisenkatu 35", "Osoite_sv": "Lemminkäinengatan 35", "Osoite_en": "Lemminkäisenkatu 35", "Liittymänumero": "642", "Ilmaisimet": "PP2", "Mittauspisteiden_ID": "7172", "Tyyppi": "P", "Suunta": "T" }, "geometry": { "type": "Point", "coordinates": [ 23461714.611965991556644, 6703777.939228449948132 ] } }, + { "type": "Feature", "properties": { "fid": 13, "ID": "", "Osoite_fi": "Lemminkäisenkatu 36", "Osoite_sv": "Lemminkäinengatan 36", "Osoite_en": "Lemminkäisenkatu 36", "Liittymänumero": "642", "Ilmaisimet": "ZELT_pp", "Mittauspisteiden_ID": "7171", "Tyyppi": "P", "Suunta": "T" }, "geometry": { "type": "Point", "coordinates": [ 23461637.359737996011972, 6703819.079618069343269 ] } }, + { "type": "Feature", "properties": { "fid": 35, "ID": null, "Osoite_fi": "Helsinginkatu 7", "Osoite_sv": "Helsingforsgatan 7", "Osoite_en": "Helsinginkatu 7", "Liittymänumero": "619", "Ilmaisimet": "X4", "Mittauspisteiden_ID": "4083", "Tyyppi": "P", "Suunta": "T" }, "geometry": { "type": "Point", "coordinates": [ 23460447.87659065425396, 6705474.187256957404315 ] } }, + { "type": "Feature", "properties": { "fid": 36, "ID": null, "Osoite_fi": "Helsinginkatu 7", "Osoite_sv": "Helsingforsgatan 7", "Osoite_en": "Helsinginkatu 7", "Liittymänumero": "619", "Ilmaisimet": "X3", "Mittauspisteiden_ID": "4080", "Tyyppi": "P", "Suunta": "T" }, "geometry": { "type": "Point", "coordinates": [ 23460433.286612428724766, 6705502.207770394161344 ] } } ] } \ No newline at end of file From d9118a6918fa9d9b2317a9e70e4a664689ac52e9 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Mon, 11 Sep 2023 11:35:11 +0300 Subject: [PATCH 74/84] Remove filtering by municipality --- mobility_data/importers/bicycle_stands.py | 43 ++++++++--------------- 1 file changed, 14 insertions(+), 29 deletions(-) diff --git a/mobility_data/importers/bicycle_stands.py b/mobility_data/importers/bicycle_stands.py index 672dfff6a..a1b3b4ec4 100644 --- a/mobility_data/importers/bicycle_stands.py +++ b/mobility_data/importers/bicycle_stands.py @@ -8,11 +8,7 @@ from django.conf import settings from django.contrib.gis.gdal import DataSource from django.contrib.gis.geos import GEOSGeometry -from munigeo.models import ( - AdministrativeDivision, - AdministrativeDivisionGeometry, - Municipality, -) +from munigeo.models import Municipality from services.models import Unit from smbackend_turku.importers.utils import get_external_source_config @@ -22,7 +18,6 @@ get_municipality_name, get_root_dir, get_street_name_translations, - locates_in_turku, MobileUnitDataBase, ) @@ -43,10 +38,6 @@ GEOJSON_SOURCE_DATA_SRID = 4326 GEOJSON_FILENAME = "bicycle_stands_for_units.geojson" logger = logging.getLogger("mobility_data") -division_turku = AdministrativeDivision.objects.get(name="Turku") -turku_boundary = AdministrativeDivisionGeometry.objects.get( - division=division_turku -).boundary class BicyleStand(MobileUnitDataBase): @@ -225,25 +216,19 @@ def get_bicycle_stand_objects(): external_stands = {} for data_source in data_sources: for feature in data_source[1][0]: - source_data_srid = ( - WFS_SOURCE_DATA_SRID - if data_source[0] == "gml" - else GEOJSON_SOURCE_DATA_SRID - ) - if locates_in_turku(feature, source_data_srid): - bicycle_stand = BicyleStand() - if data_source[0] == "gml": - bicycle_stand.set_gml_feature(feature) - elif data_source[0] == "geojson": - bicycle_stand.set_geojson_feature(feature) - if ( - bicycle_stand.name[FI_KEY] not in external_stands - and not bicycle_stand.extra["maintained_by_turku"] - ): - external_stands[bicycle_stand.name[FI_KEY]] = True - bicycle_stands.append(bicycle_stand) - elif bicycle_stand.extra["maintained_by_turku"]: - bicycle_stands.append(bicycle_stand) + bicycle_stand = BicyleStand() + if data_source[0] == "gml": + bicycle_stand.set_gml_feature(feature) + elif data_source[0] == "geojson": + bicycle_stand.set_geojson_feature(feature) + if ( + bicycle_stand.name[FI_KEY] not in external_stands + and not bicycle_stand.extra["maintained_by_turku"] + ): + external_stands[bicycle_stand.name[FI_KEY]] = True + bicycle_stands.append(bicycle_stand) + elif bicycle_stand.extra["maintained_by_turku"]: + bicycle_stands.append(bicycle_stand) logger.info(f"Retrieved {len(bicycle_stands)} bicycle stands.") return bicycle_stands From 85ee8a2fd6708622470ff07c11bd4943f4f922e9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Sep 2023 10:17:21 +0000 Subject: [PATCH 75/84] Bump pygments from 2.10.0 to 2.15.0 Bumps [pygments](https://github.com/pygments/pygments) from 2.10.0 to 2.15.0. - [Release notes](https://github.com/pygments/pygments/releases) - [Changelog](https://github.com/pygments/pygments/blob/master/CHANGES) - [Commits](https://github.com/pygments/pygments/compare/2.10.0...2.15.0) --- updated-dependencies: - dependency-name: pygments dependency-type: indirect ... Signed-off-by: dependabot[bot] --- requirements-dev.txt | 16 ++-------------- 1 file changed, 2 insertions(+), 14 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 90df87811..e823f0278 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -8,7 +8,7 @@ asttokens==2.0.5 # via stack-data backcall==0.2.0 # via ipython -black==21.9b0 +black==22.6.0 # via # -c requirements.txt # ipython @@ -56,30 +56,18 @@ ptyprocess==0.7.0 # via pexpect pure-eval==0.2.2 # via stack-data -pygments==2.10.0 +pygments==2.15.0 # via ipython -regex==2021.9.24 - # via - # -c requirements.txt - # black six==1.16.0 # via # -c requirements.txt # asttokens stack-data==0.2.0 # via ipython -tomli==1.2.1 - # via - # -c requirements.txt - # black traitlets==5.1.0 # via # ipython # matplotlib-inline -typing-extensions==3.10.0.2 - # via - # -c requirements.txt - # black wcwidth==0.2.5 # via # -c requirements.txt From 4ee71d91b99580f99eb0f44396439e92360c72f4 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Mon, 11 Sep 2023 14:01:53 +0300 Subject: [PATCH 76/84] Bump pyyaml to >= 5.4 --- requirements.in | 3 ++- requirements.txt | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/requirements.in b/requirements.in index d29f20c45..c7277ebdb 100644 --- a/requirements.in +++ b/requirements.in @@ -42,4 +42,5 @@ numpy>=1.22 pyshp polyline drf-spectacular -xmltodict \ No newline at end of file +xmltodict +pyyaml>=5.4 \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index a98691b9b..da7c4ab35 100644 --- a/requirements.txt +++ b/requirements.txt @@ -199,8 +199,9 @@ pytz==2021.3 # celery # django-timezone-field # pandas -pyyaml==5.3.1 +pyyaml==6.0.1 # via + # -r requirements.in # django-munigeo # drf-spectacular redis==4.4.4 From d16283319a8a2abf95ecf7ddbe156028d2704454 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Mon, 11 Sep 2023 14:40:47 +0300 Subject: [PATCH 77/84] Remove obsolete pyyaml --- requirements.in | 1 - 1 file changed, 1 deletion(-) diff --git a/requirements.in b/requirements.in index c7277ebdb..46082f769 100644 --- a/requirements.in +++ b/requirements.in @@ -43,4 +43,3 @@ pyshp polyline drf-spectacular xmltodict -pyyaml>=5.4 \ No newline at end of file From 329fa9e9e00d130e156ea30f72cd73cfc34ccca2 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Mon, 11 Sep 2023 14:41:40 +0300 Subject: [PATCH 78/84] Bump certifi to 2023.7.22 --- requirements.txt | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index da7c4ab35..db410ec3b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -29,7 +29,7 @@ celery==5.2.3 # -r requirements.in # django-celery-beat # django-celery-results -certifi==2022.12.7 +certifi==2023.7.22 # via # requests # sentry-sdk @@ -201,7 +201,6 @@ pytz==2021.3 # pandas pyyaml==6.0.1 # via - # -r requirements.in # django-munigeo # drf-spectacular redis==4.4.4 From c1a474a504d370313129a4567172ec5d153b46d1 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Mon, 11 Sep 2023 14:57:03 +0300 Subject: [PATCH 79/84] Bump sentry-sdk to >=1.14.0 --- requirements.in | 2 +- requirements.txt | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements.in b/requirements.in index c7277ebdb..3c2d47ecc 100644 --- a/requirements.in +++ b/requirements.in @@ -14,7 +14,7 @@ django-extensions psycopg2-binary<2.9 django-mptt lxml>=4.9.1 -sentry-sdk +sentry-sdk>=1.14.0 pip-tools python-dateutil pytest-django diff --git a/requirements.txt b/requirements.txt index da7c4ab35..c3edb0e0d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -216,7 +216,7 @@ requests-cache==0.8.1 # via -r requirements.in requests-mock==1.9.3 # via -r requirements.in -sentry-sdk==1.9.0 +sentry-sdk==1.30.0 # via -r requirements.in shapely==1.8.0 # via -r requirements.in @@ -249,7 +249,7 @@ uritemplate==4.1.1 # via drf-spectacular url-normalize==1.4.3 # via requests-cache -urllib3==1.26.7 +urllib3==1.26.16 # via # requests # requests-cache From 7cbc2b4193131ded8d4efbadf6269c6263b1428d Mon Sep 17 00:00:00 2001 From: juuso-j Date: Wed, 13 Sep 2023 07:52:06 +0300 Subject: [PATCH 80/84] Test data_from_year field --- eco_counter/tests/test_api.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eco_counter/tests/test_api.py b/eco_counter/tests/test_api.py index 8f0afc7bb..9e7fc2ce7 100644 --- a/eco_counter/tests/test_api.py +++ b/eco_counter/tests/test_api.py @@ -3,6 +3,7 @@ import pytest from rest_framework.reverse import reverse +from .conftest import TEST_TIMESTAMP from .constants import TEST_EC_STATION_NAME @@ -271,6 +272,7 @@ def test__station(api_client, stations, year_datas): assert response.status_code == 200 assert response.json()["results"][0]["name"] == TEST_EC_STATION_NAME assert response.json()["results"][0]["sensor_types"] == ["at"] + assert response.json()["results"][0]["data_from_year"] == TEST_TIMESTAMP.year # Test retrieving station by data type url = reverse("eco_counter:stations-list") + "?data_type=a" response = api_client.get(url) From 24fb14b8ec6b385a5c70e3b0ac54d2c8253e08d8 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Wed, 13 Sep 2023 08:16:38 +0300 Subject: [PATCH 81/84] Serialize data_from_year field --- eco_counter/api/serializers.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/eco_counter/api/serializers.py b/eco_counter/api/serializers.py index d81362679..ea095df18 100644 --- a/eco_counter/api/serializers.py +++ b/eco_counter/api/serializers.py @@ -1,3 +1,4 @@ +from django.db.models import Q from rest_framework import serializers from ..models import ( @@ -35,6 +36,7 @@ class StationSerializer(serializers.ModelSerializer): lon = serializers.SerializerMethodField() lat = serializers.SerializerMethodField() sensor_types = serializers.SerializerMethodField() + data_from_year = serializers.SerializerMethodField() class Meta: model = Station @@ -52,6 +54,7 @@ class Meta: "lon", "lat", "sensor_types", + "data_from_year", ] def get_y(self, obj): @@ -79,6 +82,19 @@ def get_sensor_types(self, obj): result.append(type) return result + def get_data_from_year(self, obj): + q_exp = ( + Q(value_at__gt=0) + | Q(value_pt__gt=0) + | Q(value_jt__gt=0) + | Q(value_bt__gt=0) + ) + qs = YearData.objects.filter(q_exp, station=obj).order_by("year__year_number") + if qs.count() > 0: + return qs[0].year.year_number + else: + return None + class YearSerializer(serializers.ModelSerializer): station_name = serializers.PrimaryKeyRelatedField( From b7f4781b50095cecea6642e30b4c20ddde404cd7 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Wed, 13 Sep 2023 14:53:05 +0300 Subject: [PATCH 82/84] Create ImportState if not found --- .../commands/import_telraam_to_csv.py | 40 +++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/eco_counter/management/commands/import_telraam_to_csv.py b/eco_counter/management/commands/import_telraam_to_csv.py index 6ef2e0f5b..fb3a9b75c 100644 --- a/eco_counter/management/commands/import_telraam_to_csv.py +++ b/eco_counter/management/commands/import_telraam_to_csv.py @@ -7,6 +7,7 @@ import json import logging import os +import re from datetime import date, datetime, timedelta import pandas as pd @@ -158,6 +159,23 @@ def get_day_data( return res, delta_hours +def get_last_saved_date() -> date: + # Try to find the import from CSV file names + start_date = date.today() + pattern = r"^0" + # Go back 90 days, as three months is the maximum length that data is store in the telraam API + c = 90 + while c >= 0: + date_str = start_date.strftime("%d_%m_%Y").replace("_0", "_") + date_str = re.sub(pattern, "", date_str) + for filename in os.listdir(TELRAAM_COUNTER_CSV_FILE_PATH): + if filename.endswith(date_str + ".csv"): + return start_date + start_date -= timedelta(days=1) + c -= 1 + return None + + def save_dataframe(from_date: date = True) -> datetime: can_overwrite_csv_file = True if from_date else False if not os.path.exists(TELRAAM_COUNTER_CSV_FILE_PATH): @@ -171,6 +189,27 @@ def save_dataframe(from_date: date = True) -> datetime: ) else: import_state = ImportState.objects.filter(csv_data_source=TELRAAM_CSV).first() + # In case that a import state is not found, try to create a state + # by finding the last date a CSV file is saved. + if not import_state: + last_saved_date = get_last_saved_date() + if last_saved_date: + import_state = ImportState.objects.create( + csv_data_source=TELRAAM_CSV, + current_year_number=last_saved_date.year, + current_month_number=last_saved_date.month, + current_day_number=last_saved_date.day, + ) + else: + # As no date found set it to current date + date_today = date.today() + import_state = ImportState.objects.create( + csv_data_source=TELRAAM_CSV, + current_year_number=date_today.year, + current_month_number=date_today.month, + current_day_number=date_today.day, + ) + if not from_date: from_date = date( import_state.current_year_number, @@ -214,6 +253,7 @@ def save_dataframe(from_date: date = True) -> datetime: else: values_list.append(report[hour][value_key]) columns[key] = values_list + df = pd.DataFrame(data=columns, index=columns[INDEX_COLUMN_NAME]) df = df.drop(columns=[INDEX_COLUMN_NAME], axis=1) df.index.rename(INDEX_COLUMN_NAME, inplace=True) From cfb849f7588aaa570ccbd11119b121e42a8b4d53 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Thu, 14 Sep 2023 11:07:43 +0300 Subject: [PATCH 83/84] Add constant for Telraam data time format --- eco_counter/constants.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/eco_counter/constants.py b/eco_counter/constants.py index ab296ad4b..94d4ab1a8 100644 --- a/eco_counter/constants.py +++ b/eco_counter/constants.py @@ -114,6 +114,8 @@ # from the beginning of the start tear TELRAAM_COUNTER_START_MONTH = 5 TELRAAM_COUNTER_API_TIME_FORMAT = "%Y-%m-%d %H:%M:%S" +TELRAAM_COUNTER_DATA_TIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ" + TELRAAM_COUNTER_CSV_FILE_PATH = f"{settings.MEDIA_ROOT}/telraam_data/" TELRAAM_COUNTER_CSV_FILE = ( TELRAAM_COUNTER_CSV_FILE_PATH + "telraam_data_{id}_{day}_{month}_{year}.csv" From e31522d42f3398a48c28acf53dc9b296d4dfe1b6 Mon Sep 17 00:00:00 2001 From: juuso-j Date: Thu, 14 Sep 2023 11:23:54 +0300 Subject: [PATCH 84/84] Correct offset for missing hours --- .../commands/import_telraam_to_csv.py | 87 +++++++++---------- 1 file changed, 41 insertions(+), 46 deletions(-) diff --git a/eco_counter/management/commands/import_telraam_to_csv.py b/eco_counter/management/commands/import_telraam_to_csv.py index fb3a9b75c..a6143a79a 100644 --- a/eco_counter/management/commands/import_telraam_to_csv.py +++ b/eco_counter/management/commands/import_telraam_to_csv.py @@ -21,6 +21,7 @@ TELRAAM_COUNTER_CAMERAS, TELRAAM_COUNTER_CSV_FILE, TELRAAM_COUNTER_CSV_FILE_PATH, + TELRAAM_COUNTER_DATA_TIME_FORMAT, TELRAAM_COUNTER_START_MONTH, TELRAAM_COUNTER_START_YEAR, TELRAAM_COUNTER_TRAFFIC_URL, @@ -102,61 +103,61 @@ def get_delta_hours(from_date: datetime, end_date: datetime) -> datetime: def get_day_data( - day_date: date, camera_id: str, utf_offset: datetime, check_delta_hours: bool = True -) -> tuple[list, int]: + day_date: date, camera_id: str, utc_offset: datetime, check_delta_hours: bool = True +) -> list: from_datetime = ( - datetime(day_date.year, day_date.month, day_date.day, 0, 0, 0) - utf_offset - ) + datetime(day_date.year, day_date.month, day_date.day, 0, 0, 0) + ) - utc_offset from_datetime_str = from_datetime.strftime(TELRAAM_COUNTER_API_TIME_FORMAT) end_datetime = ( datetime(day_date.year, day_date.month, day_date.day) + timedelta(hours=23) + timedelta(minutes=59) - ) - utf_offset + ) - utc_offset end_datetime_str = end_datetime.strftime(TELRAAM_COUNTER_API_TIME_FORMAT) report = fetch_traffic_report(from_datetime_str, end_datetime_str, camera_id) - delta_hours = len(report) - if not report: - logger.warning( - f"No report found for camera {camera_id}, populating with empty dicts" - ) - report = [{} for a in range(delta_hours)] - else: - logger.info( - f"Imorted report with {len(report)} elements for camera {camera_id}" - ) - if check_delta_hours and delta_hours != 24: - dif = 24 - delta_hours - if day_date == date.today(): - logger.warning( - f"Fetched report with delta_hours not equal to 24, appending missing {dif} empty dicts." - ) - report += [{} for a in range(dif)] - - else: - # Case when camera gets turned on in the middle of day. - logger.warning( - f"Fetched report with delta_hours not equal to 24, adding missing {dif} empty dicts to start of report." - ) - report = [{} for a in range(dif)] + report - delta_hours = len(report) + logger.info( + f"Imorted report with {len(report)} elements for camera {camera_id}, for date {str(day_date)}" + ) res = [] - start_date = from_datetime - for item in report: + start_datetime = from_datetime + utc_offset + # As fetched data migth not include data for every hour, use report_index variable to index + report_index = 0 + # Add value for every hour + while start_datetime <= end_datetime + utc_offset: d = {} - d["date"] = datetime.strftime(start_date, TELRAAM_COUNTER_API_TIME_FORMAT) + d["date"] = datetime.strftime(start_datetime, TELRAAM_COUNTER_API_TIME_FORMAT) + item_datetime = None + report_item = None + if report_index < len(report): + report_item = report[report_index] + item_datetime = report_item["date"].replace(".000", "") + item_datetime = ( + datetime.strptime(item_datetime, TELRAAM_COUNTER_DATA_TIME_FORMAT) + + utc_offset + ) + # If datetimes are equal, the fetched report contains data for given start_datetime + if item_datetime == start_datetime: + # In next ireration read the next element in report + report_index += 1 + else: + report_item = None + for veh in VEHICLE_TYPES.keys(): for dir in DIRECTIONS: if dir == TOTAL: key = f"{veh}{dir}" else: key = f"{veh}_{dir}" - val = int(round(item.get(key, 0))) + if report_item: + val = int(round(report_item.get(key, 0))) + else: + val = 0 d[key] = val res.append(d) - start_date += timedelta(hours=1) - return res, delta_hours + start_datetime += timedelta(hours=1) + return res def get_last_saved_date() -> date: @@ -224,20 +225,15 @@ def save_dataframe(from_date: date = True) -> datetime: for camera in cameras: start_date = from_date while start_date <= date_today: - report, delta_hours = get_day_data( - start_date, camera["instance_id"], utc_offset - ) + report = get_day_data(start_date, camera["instance_id"], utc_offset) mappings = get_mappings( camera["mac"], direction=TELRAAM_COUNTER_CAMERAS[camera["mac"]] ) columns = {} columns[INDEX_COLUMN_NAME] = [] - for hour in range(delta_hours): - col_date = ( - datetime.strptime( - report[hour]["date"], TELRAAM_COUNTER_API_TIME_FORMAT - ) - + utc_offset + for hour in range(len(report)): + col_date = datetime.strptime( + report[hour]["date"], TELRAAM_COUNTER_API_TIME_FORMAT ) col_date_str = col_date.strftime(TELRAAM_COUNTER_API_TIME_FORMAT) columns[INDEX_COLUMN_NAME].append(col_date_str) @@ -302,5 +298,4 @@ def handle(self, *args, **options): return until_date = save_dataframe(from_date) - logger.info(f"Telraam data imported until {str(until_date)}")