diff --git a/eco_counter/admin.py b/eco_counter/admin.py index 4a8bd6c82..452689ba5 100644 --- a/eco_counter/admin.py +++ b/eco_counter/admin.py @@ -55,8 +55,16 @@ def get_date(self, obj): class ImportStateAdmin(admin.ModelAdmin): + list_display = ( + "id", + "csv_data_source", + "current_year_number", + "current_month_number", + "current_day_number", + ) + def get_readonly_fields(self, request, obj=None): - return [f.name for f in self.model._meta.fields] + return ["csv_data_source"] class StationAdmin(admin.ModelAdmin): diff --git a/eco_counter/constants.py b/eco_counter/constants.py index c41829e77..12c9b2f54 100644 --- a/eco_counter/constants.py +++ b/eco_counter/constants.py @@ -1,5 +1,6 @@ import platform import types +from datetime import datetime import requests from django.conf import settings @@ -12,7 +13,7 @@ # Manually define the end year, as the source data comes from the page # defined in env variable TRAFFIC_COUNTER_OBSERVATIONS_BASE_URL. # Change end year when data for the next year is available. -TRAFFIC_COUNTER_END_YEAR = 2023 +TRAFFIC_COUNTER_END_YEAR = datetime.today().year ECO_COUNTER_START_YEAR = 2020 LAM_COUNTER_START_YEAR = 2010 TELRAAM_COUNTER_START_YEAR = 2023 diff --git a/eco_counter/management/commands/utils.py b/eco_counter/management/commands/utils.py index 5a9169129..e358db319 100644 --- a/eco_counter/management/commands/utils.py +++ b/eco_counter/management/commands/utils.py @@ -206,7 +206,10 @@ def get_traffic_counter_csv(start_year=2015): # data from years before the start year. if key <= start_year: continue - concat_df = get_dataframe(TRAFFIC_COUNTER_CSV_URLS[key]) + try: + concat_df = get_dataframe(TRAFFIC_COUNTER_CSV_URLS[key]) + except AssertionError: + continue # ignore_index=True, do not use the index values along the concatenation axis. # The resulting axis will be labeled 0, …, n - 1. df = pd.concat([df, concat_df], ignore_index=True) diff --git a/exceptional_situations/README.md b/exceptional_situations/README.md new file mode 100644 index 000000000..59d54b69f --- /dev/null +++ b/exceptional_situations/README.md @@ -0,0 +1,15 @@ +# Exceptional Situations APP +APP for importing, storing and serving exceptional situations + +## Importing data +### Traffic Announcements +Imports road works and traffic announcements in Southwest Finland from digitraffic.fi. +To import type: +`./manage.py import_traffic_situations` + +### Delete inactive situations +`./manage.py delete_inactive_situations` +Deletes also the related announcements and locations. + +## API Documentation +See online swagger documentation. diff --git a/exceptional_situations/__init__.py b/exceptional_situations/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/exceptional_situations/admin.py b/exceptional_situations/admin.py new file mode 100644 index 000000000..46cb8a8bb --- /dev/null +++ b/exceptional_situations/admin.py @@ -0,0 +1,33 @@ +from django.contrib.gis import admin + +from exceptional_situations.models import ( + Situation, + SituationAnnouncement, + SituationLocation, + SituationType, +) + + +class SituationAdmin(admin.ModelAdmin): + list_display = ("is_active", "start_time", "end_time") + + +class SituationTypeAdmin(admin.ModelAdmin): + list_display = ("type_name", "sub_type_name") + + +class SituationAnnouncementAdmin(admin.ModelAdmin): + list_display = ("title", "start_time", "end_time") + + +class SituationLocationAdmin(admin.OSMGeoAdmin): + list_display = ("id", "title", "geometry") + + def title(self, obj): + return obj.announcement.title + + +admin.site.register(Situation, SituationAdmin) +admin.site.register(SituationType, SituationTypeAdmin) +admin.site.register(SituationAnnouncement, SituationAnnouncementAdmin) +admin.site.register(SituationLocation, SituationLocationAdmin) diff --git a/exceptional_situations/api/serializers.py b/exceptional_situations/api/serializers.py new file mode 100644 index 000000000..983cde86a --- /dev/null +++ b/exceptional_situations/api/serializers.py @@ -0,0 +1,58 @@ +from rest_framework import serializers + +from exceptional_situations.models import ( + Situation, + SituationAnnouncement, + SituationLocation, + SituationType, +) + + +class SituationLocationSerializer(serializers.ModelSerializer): + class Meta: + model = SituationLocation + fields = ["id", "location", "geometry", "details"] + + +class SituationAnnouncementSerializer(serializers.ModelSerializer): + location = SituationLocationSerializer() + + class Meta: + model = SituationAnnouncement + fields = [ + "id", + "title", + "description", + "start_time", + "end_time", + "additional_info", + "location", + ] + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + +class SituationTypeSerializer(serializers.ModelSerializer): + class Meta: + model = SituationType + fields = "__all__" + + +class SituationSerializer(serializers.ModelSerializer): + announcements = SituationAnnouncementSerializer(many=True, read_only=True) + + class Meta: + model = Situation + fields = [ + "id", + "is_active", + "start_time", + "end_time", + "situation_id", + "release_time", + "situation_type", + "situation_type_str", + "situation_sub_type_str", + "announcements", + ] diff --git a/exceptional_situations/api/urls.py b/exceptional_situations/api/urls.py new file mode 100644 index 000000000..13ae06a4e --- /dev/null +++ b/exceptional_situations/api/urls.py @@ -0,0 +1,24 @@ +from django.urls import include, path +from rest_framework import routers + +from exceptional_situations.api import views + +app_name = "exceptional_situations" + + +router = routers.DefaultRouter() + +router.register("situation", views.SituationViewSet, basename="situation") +router.register("situation_type", views.SituationTypeViewSet, basename="situation_type") +router.register( + "situation_location", views.SituationLocationViewSet, basename="situation_location" +) +router.register( + "situation_announcement", + views.SituationAnnouncementViewSet, + basename="situation_announcement", +) + +urlpatterns = [ + path("api/v1/", include(router.urls), name="exceptional_situations"), +] diff --git a/exceptional_situations/api/views.py b/exceptional_situations/api/views.py new file mode 100644 index 000000000..f14488ba1 --- /dev/null +++ b/exceptional_situations/api/views.py @@ -0,0 +1,87 @@ +import django_filters +from django_filters.rest_framework import DjangoFilterBackend +from rest_framework import viewsets + +from exceptional_situations.api.serializers import ( + SituationAnnouncementSerializer, + SituationLocationSerializer, + SituationSerializer, + SituationTypeSerializer, +) +from exceptional_situations.models import ( + Situation, + SituationAnnouncement, + SituationLocation, + SituationType, +) + + +class SituationFilter(django_filters.FilterSet): + is_active = django_filters.BooleanFilter(method="filter_is_active") + situation_type_str = django_filters.CharFilter(method="filter_situation_type_str") + start_time__gt = django_filters.DateTimeFilter(method="filter_start_time__gt") + start_time__lt = django_filters.DateTimeFilter(method="filter_start_time__lt") + end_time__gt = django_filters.DateTimeFilter(method="filter_end_time__gt") + end_time__lt = django_filters.DateTimeFilter(method="filter_end_time__lt") + + class Meta: + model = Situation + fields = { + "situation_type": ["exact"], + "situation_id": ["exact"], + "release_time": ["lt", "gt"], + } + + def filter_situation_type_str(self, queryset, fields, situation_type_str): + ids = [ + obj.id for obj in queryset if obj.situation_type_str == situation_type_str + ] + return queryset.filter(id__in=ids) + + def filter_is_active(self, queryset, fields, active): + ids = [obj.id for obj in queryset if obj.is_active == bool(active)] + return queryset.filter(id__in=ids) + + def filter_start_time__gt(self, queryset, fields, start_time): + ids = [obj.id for obj in queryset if obj.start_time > start_time] + return queryset.filter(id__in=ids) + + def filter_start_time__lt(self, queryset, fields, start_time): + ids = [obj.id for obj in queryset if obj.start_time < start_time] + return queryset.filter(id__in=ids) + + def filter_end_time__gt(self, queryset, fields, start_time): + ids = [obj.id for obj in queryset if obj.start_time > start_time] + return queryset.filter(id__in=ids) + + def filter_end_time__lt(self, queryset, fields, start_time): + ids = [obj.id for obj in queryset if obj.start_time < start_time] + return queryset.filter(id__in=ids) + + +class SituationViewSet(viewsets.ReadOnlyModelViewSet): + queryset = Situation.objects.all() + serializer_class = SituationSerializer + filter_backends = [DjangoFilterBackend] + filterset_class = SituationFilter + + def list(self, request, *args, **kwargs): + queryset = self.filter_queryset(self.queryset) + page = self.paginate_queryset(queryset) + serializer = self.serializer_class(page, many=True) + return self.get_paginated_response(serializer.data) + + +class SituationLocationViewSet(viewsets.ReadOnlyModelViewSet): + queryset = SituationLocation.objects.all() + serializer_class = SituationLocationSerializer + + +class SituationAnnouncementViewSet(viewsets.ReadOnlyModelViewSet): + queryset = SituationAnnouncement.objects.all() + serializer_class = SituationAnnouncementSerializer + + +class SituationTypeViewSet(viewsets.ReadOnlyModelViewSet): + queryset = SituationType.objects.all() + serializer_class = SituationTypeSerializer diff --git a/exceptional_situations/apps.py b/exceptional_situations/apps.py new file mode 100644 index 000000000..c1addb764 --- /dev/null +++ b/exceptional_situations/apps.py @@ -0,0 +1,6 @@ +from django.apps import AppConfig + + +class ExceptionalSituationsConfig(AppConfig): + default_auto_field = "django.db.models.BigAutoField" + name = "exceptional_situations" diff --git a/exceptional_situations/management/commands/delete_inactive_situations.py b/exceptional_situations/management/commands/delete_inactive_situations.py new file mode 100644 index 000000000..629497b1d --- /dev/null +++ b/exceptional_situations/management/commands/delete_inactive_situations.py @@ -0,0 +1,18 @@ +import logging + +from django.core.management import BaseCommand + +from exceptional_situations.models import Situation, SituationAnnouncement + +logger = logging.getLogger(__name__) + + +class Command(BaseCommand): + def handle(self, *args, **options): + num_deleted = 0 + for situation in Situation.objects.all(): + if situation.is_active is False: + SituationAnnouncement.objects.filter(situation=situation).delete() + situation.delete() + num_deleted += 1 + logger.info(f"Deleted {num_deleted} inactive situations.") diff --git a/exceptional_situations/management/commands/import_traffic_situations.py b/exceptional_situations/management/commands/import_traffic_situations.py new file mode 100644 index 000000000..80e23c37b --- /dev/null +++ b/exceptional_situations/management/commands/import_traffic_situations.py @@ -0,0 +1,145 @@ +""" +Imports road works and traffic announcements in Southwest Finland from digitraffic.fi. +""" + +import logging +from copy import deepcopy +from datetime import datetime + +import requests +from dateutil import parser +from django.contrib.gis.geos import GEOSGeometry, Polygon +from django.core.management import BaseCommand +from django.utils import timezone + +from exceptional_situations.models import ( + PROJECTION_SRID, + Situation, + SituationAnnouncement, + SituationLocation, + SituationType, +) +from mobility_data.importers.constants import ( + SOUTHWEST_FINLAND_BOUNDARY, + SOUTHWEST_FINLAND_BOUNDARY_SRID, +) + +logger = logging.getLogger(__name__) +ROAD_WORK_URL = ( + "https://tie.digitraffic.fi/api/traffic-message/v1/messages" + "?inactiveHours=0&includeAreaGeometry=true&situationType=ROAD_WORK" +) +TRAFFIC_ANNOUNCEMENT_URL = ( + "https://tie.digitraffic.fi/api/traffic-message/v1/messages" + "?inactiveHours=0&includeAreaGeometry=true&situationType=TRAFFIC_ANNOUNCEMENT" +) +URLS = [ROAD_WORK_URL, TRAFFIC_ANNOUNCEMENT_URL] +DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%fZ" +SOUTHWEST_FINLAND_POLYGON = Polygon( + SOUTHWEST_FINLAND_BOUNDARY, srid=SOUTHWEST_FINLAND_BOUNDARY_SRID +) + + +class Command(BaseCommand): + def get_geos_geometry(self, feature_data): + return GEOSGeometry(str(feature_data["geometry"]), srid=PROJECTION_SRID) + + def create_location(self, geometry, announcement_data): + location = None + details = announcement_data["locationDetails"].get("roadAddressLocation", None) + details.update(announcement_data.get("location", None)) + filter = { + "geometry": geometry, + "location": location, + "details": details, + } + situation_location = SituationLocation.objects.create(**filter) + return situation_location + + def create_announcement(self, announcement_data, situation_location): + title = announcement_data.get("title", "") + description = announcement_data["location"].get("description", "") + additional_info = {} + for road_work_phase in announcement_data.get("roadWorkPhases", []): + del road_work_phase["locationDetails"] + del road_work_phase["location"] + additional_info.update(road_work_phase) + + additional_info.update( + { + "additionalInformation": announcement_data.get( + "additionalInformation", None + ) + } + ) + additional_info.update({"sender": announcement_data.get("sender", None)}) + start_time = parser.parse( + announcement_data["timeAndDuration"].get("startTime", None) + ) + end_time = announcement_data["timeAndDuration"].get("endTime", None) + # Note, endTime can be None (unknown) + if end_time: + end_time = parser.parse(end_time) + filter = { + "location": situation_location, + "title": title, + "description": description, + "additional_info": additional_info, + "start_time": start_time, + "end_time": end_time, + } + situation_announcement = SituationAnnouncement.objects.create(**filter) + return situation_announcement + + def handle(self, *args, **options): + num_imported = 0 + for url in URLS: + try: + response = requests.get(url) + assert response.status_code == 200 + except AssertionError: + continue + features = response.json()["features"] + + for feature_data in features: + geometry = self.get_geos_geometry(feature_data) + if not SOUTHWEST_FINLAND_POLYGON.intersects(geometry): + continue + + properties = feature_data.get("properties", None) + if not properties: + continue + situation_id = properties.get("situationId", None) + release_time = properties.get("releaseTime", None) + release_time = datetime.strptime(release_time, DATETIME_FORMAT).replace( + microsecond=0 + ) + release_time = timezone.make_aware(release_time, timezone.utc) + + type_name = properties.get("situationType", None) + sub_type_name = properties.get("trafficAnnouncementType", None) + + situation_type, _ = SituationType.objects.get_or_create( + type_name=type_name, sub_type_name=sub_type_name + ) + + filter = { + "situation_id": situation_id, + "situation_type": situation_type, + } + situation, _ = Situation.objects.get_or_create(**filter) + situation.release_time = release_time + situation.save() + + SituationAnnouncement.objects.filter(situation=situation).delete() + situation.announcements.clear() + for announcement_data in properties.get("announcements", []): + situation_location = self.create_location( + geometry, announcement_data + ) + situation_announcement = self.create_announcement( + deepcopy(announcement_data), situation_location + ) + situation.announcements.add(situation_announcement) + num_imported += 1 + logger.info(f"Imported/updated {num_imported} traffic situations.") diff --git a/exceptional_situations/migrations/0001_initial.py b/exceptional_situations/migrations/0001_initial.py new file mode 100644 index 000000000..7ae959154 --- /dev/null +++ b/exceptional_situations/migrations/0001_initial.py @@ -0,0 +1,137 @@ +# Generated by Django 4.1.13 on 2024-03-27 11:12 + +import django.contrib.gis.db.models.fields +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [] + + operations = [ + migrations.CreateModel( + name="SituationLocation", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ( + "location", + django.contrib.gis.db.models.fields.PointField( + blank=True, null=True, srid=4326 + ), + ), + ( + "geometry", + django.contrib.gis.db.models.fields.GeometryField( + blank=True, null=True, srid=4326 + ), + ), + ("details", models.JSONField(blank=True, null=True)), + ], + options={ + "ordering": ["id"], + }, + ), + migrations.CreateModel( + name="SituationType", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("type_name", models.CharField(max_length=64)), + ( + "sub_type_name", + models.CharField(blank=True, max_length=64, null=True), + ), + ], + options={ + "ordering": ["id"], + }, + ), + migrations.CreateModel( + name="SituationAnnouncement", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("title", models.CharField(max_length=128)), + ("title_fi", models.CharField(max_length=128, null=True)), + ("title_sv", models.CharField(max_length=128, null=True)), + ("title_en", models.CharField(max_length=128, null=True)), + ("description", models.TextField(blank=True, null=True)), + ("description_fi", models.TextField(blank=True, null=True)), + ("description_sv", models.TextField(blank=True, null=True)), + ("description_en", models.TextField(blank=True, null=True)), + ("start_time", models.DateTimeField()), + ("end_time", models.DateTimeField(blank=True, null=True)), + ("additional_info", models.JSONField(blank=True, null=True)), + ( + "location", + models.OneToOneField( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="announcement", + to="exceptional_situations.situationlocation", + ), + ), + ], + options={ + "ordering": ["start_time"], + }, + ), + migrations.CreateModel( + name="Situation", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("situation_id", models.CharField(max_length=64)), + ("release_time", models.DateTimeField(blank=True, null=True)), + ( + "announcements", + models.ManyToManyField( + to="exceptional_situations.situationannouncement" + ), + ), + ( + "situation_type", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + to="exceptional_situations.situationtype", + ), + ), + ], + options={ + "ordering": ["id"], + }, + ), + ] diff --git a/exceptional_situations/migrations/__init__.py b/exceptional_situations/migrations/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/exceptional_situations/models.py b/exceptional_situations/models.py new file mode 100644 index 000000000..7b138ec46 --- /dev/null +++ b/exceptional_situations/models.py @@ -0,0 +1,115 @@ +from datetime import datetime + +from django.contrib.gis.db import models +from django.utils import timezone + +PROJECTION_SRID = 4326 + + +class SituationType(models.Model): + type_name = models.CharField(max_length=64) + sub_type_name = models.CharField(max_length=64, null=True, blank=True) + + class Meta: + ordering = ["id"] + + def __str__(self): + return "%s (%s)" % (self.type_name, self.id) + + +class SituationLocation(models.Model): + location = models.PointField(null=True, blank=True, srid=PROJECTION_SRID) + geometry = models.GeometryField(null=True, blank=True, srid=PROJECTION_SRID) + details = models.JSONField(null=True, blank=True) + + class Meta: + ordering = ["id"] + + +class SituationAnnouncement(models.Model): + title = models.CharField(max_length=128) + description = models.TextField(null=True, blank=True) + start_time = models.DateTimeField() + end_time = models.DateTimeField(null=True, blank=True) + additional_info = models.JSONField(null=True, blank=True) + location = models.OneToOneField( + SituationLocation, + on_delete=models.CASCADE, + null=True, + blank=True, + related_name="announcement", + ) + + class Meta: + ordering = ["start_time"] + + def __str__(self): + return "%s (%s)" % (self.title, self.id) + + +class Situation(models.Model): + situation_id = models.CharField(max_length=64) + situation_type = models.ForeignKey(SituationType, on_delete=models.CASCADE) + release_time = models.DateTimeField(null=True, blank=True) + announcements = models.ManyToManyField(SituationAnnouncement) + + class Meta: + ordering = ["id"] + + @property + def situation_type_str(self) -> str: + return self.situation_type.type_name + + @property + def situation_sub_type_str(self) -> str: + return self.situation_type.sub_type_name + + @property + def is_active(self) -> bool: + if not self.announcements.exists(): + return False + + start_times_in_future = all( + {a.start_time > timezone.now() for a in self.announcements.all()} + ) + # If all start times are in future, return False + if start_times_in_future: + return False + # If one or more end_time is null(unknown?) the situation is active + if self.announcements.filter(end_time__isnull=True).exists(): + return True + + # If end_time is past for all announcements, return True, else False + return any( + { + a.end_time > timezone.now() + for a in self.announcements.filter(end_time__isnull=False) + } + ) + + @property + def start_time(self) -> datetime: + """ + Return the start_time that is furthest in history + """ + start_time = None + for announcement in self.announcements.all(): + if not start_time: + start_time = announcement.start_time + if announcement.start_time < start_time: + start_time = announcement.start_time + return start_time + + @property + def end_time(self) -> datetime: + """ + Return the end_time that is furthest in future + """ + end_time = None + for announcement in self.announcements.filter(end_time__isnull=False): + if not end_time: + end_time = announcement.end_time + + if announcement.end_time > end_time: + end_time = announcement.end_time + return end_time diff --git a/exceptional_situations/tasks.py b/exceptional_situations/tasks.py new file mode 100644 index 000000000..3dcc90208 --- /dev/null +++ b/exceptional_situations/tasks.py @@ -0,0 +1,13 @@ +from django.core import management + +from smbackend.utils import shared_task_email + + +@shared_task_email +def import_traffic_situations(name="import_traffic_situations"): + management.call_command("import_traffic_situations") + + +@shared_task_email +def delete_inactive_situations(name="delete_inactive_situations"): + management.call_command("delete_inactive_situations") diff --git a/exceptional_situations/tests/__init__.py b/exceptional_situations/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/exceptional_situations/tests/conftest.py b/exceptional_situations/tests/conftest.py new file mode 100644 index 000000000..a91ed5434 --- /dev/null +++ b/exceptional_situations/tests/conftest.py @@ -0,0 +1,115 @@ +from datetime import timedelta + +import pytest +from django.contrib.gis.geos import GEOSGeometry +from django.utils import timezone +from rest_framework.test import APIClient + +from exceptional_situations.models import ( + Situation, + SituationAnnouncement, + SituationLocation, + SituationType, +) + +NOW = timezone.now() + + +@pytest.fixture +def api_client(): + return APIClient() + + +@pytest.mark.django_db +@pytest.fixture +def situation_types(): + SituationType.objects.create( + type_name="test type name", sub_type_name="test sub type name" + ) + return SituationType.objects.all() + + +@pytest.mark.django_db +@pytest.fixture +def locations(): + json_data = {"test_key": "test_value"} + SituationLocation.objects.create( + details=json_data, geometry=GEOSGeometry("POINT(0 0)") + ) + SituationLocation.objects.create( + details=json_data, geometry=GEOSGeometry("POINT(1 0)") + ) + SituationLocation.objects.create( + details=json_data, geometry=GEOSGeometry("POINT(0 1)") + ) + + return SituationLocation.objects.all() + + +@pytest.mark.django_db +@pytest.fixture +def announcements(locations): + json_data = {"test_key": "test_value"} + SituationAnnouncement.objects.create( + title="two hours", + description="two hours long situation", + additional_info=json_data, + location=locations[0], + start_time=NOW - timedelta(hours=1), + end_time=NOW + timedelta(hours=1), + ) + SituationAnnouncement.objects.create( + title="two days", + description="two days long situation", + additional_info=json_data, + location=locations[1], + start_time=NOW - timedelta(days=1), + end_time=NOW + timedelta(days=1), + ) + + return SituationAnnouncement.objects.all() + + +@pytest.mark.django_db +@pytest.fixture +def inactive_announcements(locations): + json_data = {"test_key": "test_value"} + SituationAnnouncement.objects.create( + title="in past", + description="inactive announcement", + additional_info=json_data, + location=locations[2], + start_time=NOW - timedelta(days=2), + end_time=NOW - timedelta(days=1), + ) + return SituationAnnouncement.objects.all() + + +@pytest.mark.django_db +@pytest.fixture +def inactive_situations(situation_types, inactive_announcements): + situation = Situation.objects.create( + release_time=NOW, + situation_id="inactive", + situation_type=situation_types.first(), + ) + situation.announcements.add(inactive_announcements.first()) + return Situation.objects.all() + + +@pytest.mark.django_db +@pytest.fixture +def situations(situation_types, announcements): + situation = Situation.objects.create( + release_time=NOW, + situation_id="TwoHoursLong", + situation_type=situation_types.first(), + ) + situation.announcements.add(announcements[0]) + situation = Situation.objects.create( + release_time=NOW - timedelta(days=1), + situation_id="TwoDaysLong", + situation_type=situation_types.first(), + ) + situation.announcements.add(announcements[1]) + return Situation.objects.all() diff --git a/exceptional_situations/tests/test_api.py b/exceptional_situations/tests/test_api.py new file mode 100644 index 000000000..12c78b227 --- /dev/null +++ b/exceptional_situations/tests/test_api.py @@ -0,0 +1,215 @@ +from datetime import datetime, timedelta + +import pytest +from django.utils import timezone +from rest_framework.reverse import reverse + +SITUATION_LIST_URL = reverse("exceptional_situations:situation-list") +DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%S" + + +@pytest.mark.django_db +def test_situations_list(api_client, situations, inactive_situations): + response = api_client.get(SITUATION_LIST_URL) + assert response.status_code == 200 + json_data = response.json() + assert json_data.keys() == {"count", "next", "previous", "results"} + assert json_data["count"] == 3 + result_data = json_data["results"][0] + assert result_data.keys() == { + "id", + "is_active", + "start_time", + "end_time", + "situation_id", + "release_time", + "situation_type", + "situation_type_str", + "situation_sub_type_str", + "announcements", + } + assert len(result_data["announcements"]) == 1 + announcement = result_data["announcements"][0] + assert announcement.keys() == { + "id", + "title", + "description", + "start_time", + "end_time", + "additional_info", + "location", + } + location = announcement["location"] + assert location.keys() == {"id", "location", "geometry", "details"} + + +@pytest.mark.django_db +def test_situation_retrieve(api_client, situations): + response = api_client.get( + reverse( + "exceptional_situations:situation-detail", kwargs={"pk": situations[0].pk} + ) + ) + assert response.status_code == 200 + json_data = response.json() + assert json_data.keys() == { + "id", + "is_active", + "start_time", + "end_time", + "situation_id", + "release_time", + "situation_type", + "situation_type_str", + "situation_sub_type_str", + "announcements", + } + assert json_data["id"] == situations[0].pk + assert json_data["is_active"] is True + + +@pytest.mark.django_db +def test_situation_filter_by_start_time(api_client, situations): + start_time = timezone.now() + response = api_client.get( + SITUATION_LIST_URL + + f"?start_time__gt={datetime.strftime(start_time, DATETIME_FORMAT)}" + ) + assert response.json()["count"] == 1 + response = api_client.get( + SITUATION_LIST_URL + + f"?start_time__lt={datetime.strftime(start_time, DATETIME_FORMAT)}" + ) + assert response.json()["count"] == 1 + + start_time = timezone.now() - timedelta(days=2) + response = api_client.get( + SITUATION_LIST_URL + + f"?start_time__gt={datetime.strftime(start_time, DATETIME_FORMAT)}" + ) + assert response.json()["count"] == 2 + response = api_client.get( + SITUATION_LIST_URL + + f"?start_time__lt={datetime.strftime(start_time, DATETIME_FORMAT)}" + ) + assert response.json()["count"] == 0 + + +@pytest.mark.django_db +def test_situation_filter_by_end_time(api_client, situations): + end_time = timezone.now() + response = api_client.get( + SITUATION_LIST_URL + + f"?end_time__gt={datetime.strftime(end_time, DATETIME_FORMAT)}" + ) + assert response.json()["count"] == 1 + response = api_client.get( + SITUATION_LIST_URL + + f"?end_time__lt={datetime.strftime(end_time, DATETIME_FORMAT)}" + ) + assert response.json()["count"] == 1 + + end_time = timezone.now() - timedelta(days=2) + response = api_client.get( + SITUATION_LIST_URL + + f"?end_time__gt={datetime.strftime(end_time, DATETIME_FORMAT)}" + ) + assert response.json()["count"] == 2 + response = api_client.get( + SITUATION_LIST_URL + + f"?end_time__lt={datetime.strftime(end_time, DATETIME_FORMAT)}" + ) + assert response.json()["count"] == 0 + + +@pytest.mark.django_db +def test_situation_types_list(api_client, situation_types): + response = api_client.get(reverse("exceptional_situations:situation_type-list")) + assert response.status_code == 200 + json_data = response.json() + assert json_data.keys() == {"count", "next", "previous", "results"} + assert json_data["count"] == situation_types.count() + + +@pytest.mark.django_db +def test_situation_types_retrieve(api_client, situation_types): + response = api_client.get( + reverse( + "exceptional_situations:situation_type-detail", + kwargs={"pk": situation_types[0].pk}, + ) + ) + assert response.status_code == 200 + json_data = response.json() + assert json_data.keys() == {"id", "type_name", "sub_type_name"} + assert json_data["id"] == situation_types[0].pk + + +@pytest.mark.django_db +def test_announcement_list(api_client, announcements): + response = api_client.get( + reverse("exceptional_situations:situation_announcement-list") + ) + assert response.status_code == 200 + json_data = response.json() + assert json_data.keys() == {"count", "next", "previous", "results"} + assert json_data["count"] == announcements.count() + result_data = json_data["results"][0] + assert result_data.keys() == { + "id", + "title", + "description", + "start_time", + "end_time", + "additional_info", + "location", + } + location = result_data["location"] + assert location.keys() == {"id", "location", "geometry", "details"} + + +@pytest.mark.django_db +def test_announcement_retrieve(api_client, announcements): + response = api_client.get( + reverse( + "exceptional_situations:situation_announcement-detail", + kwargs={"pk": announcements[0].pk}, + ) + ) + assert response.status_code == 200 + json_data = response.json() + assert json_data.keys() == { + "id", + "title", + "description", + "start_time", + "end_time", + "additional_info", + "location", + } + assert json_data["id"] == announcements[0].pk + + +@pytest.mark.django_db +def test_location_list(api_client, locations): + response = api_client.get(reverse("exceptional_situations:situation_location-list")) + assert response.status_code == 200 + json_data = response.json() + assert json_data.keys() == {"count", "next", "previous", "results"} + assert json_data["count"] == locations.count() + result_data = json_data["results"][0] + assert result_data.keys() == {"id", "location", "geometry", "details"} + + +@pytest.mark.django_db +def test_location_retrieve(api_client, locations): + response = api_client.get( + reverse( + "exceptional_situations:situation_location-detail", + kwargs={"pk": locations[0].pk}, + ) + ) + assert response.status_code == 200 + json_data = response.json() + assert json_data.keys() == {"id", "location", "geometry", "details"} + assert json_data["id"] == locations[0].pk diff --git a/exceptional_situations/tests/test_delete_inactive_situations.py b/exceptional_situations/tests/test_delete_inactive_situations.py new file mode 100644 index 000000000..f017e1391 --- /dev/null +++ b/exceptional_situations/tests/test_delete_inactive_situations.py @@ -0,0 +1,13 @@ +import pytest +from django.core.management import call_command + +from exceptional_situations.models import Situation, SituationAnnouncement + + +@pytest.mark.django_db +def test_delete_inactive_situations(inactive_situations, inactive_announcements): + assert Situation.objects.count() == 1 + assert SituationAnnouncement.objects.count() == 1 + call_command("delete_inactive_situations") + assert Situation.objects.count() == 0 + assert SituationAnnouncement.objects.count() == 0 diff --git a/exceptional_situations/tests/test_models.py b/exceptional_situations/tests/test_models.py new file mode 100644 index 000000000..db1c38fcd --- /dev/null +++ b/exceptional_situations/tests/test_models.py @@ -0,0 +1,78 @@ +from datetime import timedelta + +import pytest +from django.utils import timezone + +from exceptional_situations.models import Situation, SituationAnnouncement + +NOW = timezone.now() + + +@pytest.mark.django_db +def test_situation_is_active(situation_types): + announcement_1 = SituationAnnouncement.objects.create(start_time=NOW, title="test1") + announcement_2 = SituationAnnouncement.objects.create(start_time=NOW, title="test2") + situation = Situation.objects.create( + release_time=NOW, situation_type=situation_types.first(), situation_id="TestID" + ) + assert situation.is_active is False + + situation.announcements.add(announcement_1) + situation.announcements.add(announcement_2) + assert situation.is_active is True + + announcement_1.start_time = NOW - timedelta(days=2) + announcement_1.end_time = NOW - timedelta(days=1) + announcement_1.save() + announcement_2.start_time = NOW - timedelta(hours=2) + announcement_2.end_time = NOW - timedelta(hours=1) + announcement_2.save() + assert situation.is_active is False + + announcement_2.start_time = NOW - timedelta(hours=2) + announcement_2.end_time = NOW + timedelta(hours=1) + announcement_2.save() + assert situation.is_active is True + # Test that returns False if all start times are in future + announcement_1.start_time = NOW + timedelta(days=2) + announcement_1.end_time = NOW + timedelta(days=3) + announcement_1.save() + announcement_2.start_time = NOW + timedelta(hours=2) + announcement_2.end_time = NOW + timedelta(hours=3) + announcement_2.save() + assert situation.is_active is False + + +@pytest.mark.django_db +def test_situation_start_time(situation_types): + announcement_1 = SituationAnnouncement.objects.create( + start_time=NOW, title="starts now" + ) + announcement_2 = SituationAnnouncement.objects.create( + start_time=NOW - timedelta(hours=1), title="started an hour ago" + ) + situation = Situation.objects.create( + release_time=NOW, situation_type=situation_types.first(), situation_id="TestID" + ) + situation.announcements.add(announcement_1) + situation.announcements.add(announcement_2) + assert situation.start_time == announcement_2.start_time + + +@pytest.mark.django_db +def test_situation_end_time(situation_types): + announcement_1 = SituationAnnouncement.objects.create( + start_time=NOW - timedelta(hours=1), + end_time=NOW + timedelta(hours=2), + title="ends after two hours", + ) + announcement_2 = SituationAnnouncement.objects.create( + start_time=NOW, end_time=NOW + timedelta(days=2), title="ends after two days" + ) + situation = Situation.objects.create( + release_time=NOW, situation_type=situation_types.first(), situation_id="TestID" + ) + situation.announcements.add(announcement_1) + situation.announcements.add(announcement_2) + assert situation.end_time == announcement_2.end_time + assert situation.start_time == announcement_1.start_time diff --git a/exceptional_situations/translation.py b/exceptional_situations/translation.py new file mode 100644 index 000000000..08f98800d --- /dev/null +++ b/exceptional_situations/translation.py @@ -0,0 +1,13 @@ +from modeltranslation.translator import TranslationOptions, translator + +from exceptional_situations.models import SituationAnnouncement + + +class SituationAnnouncementTranslationOptions(TranslationOptions): + fields = ( + "title", + "description", + ) + + +translator.register(SituationAnnouncement, SituationAnnouncementTranslationOptions) diff --git a/iot/README.md b/iot/README.md index e7935c21d..d40de5a32 100644 --- a/iot/README.md +++ b/iot/README.md @@ -1,22 +1,25 @@ ## About The purpose of the IoT app is to store temporarly data from various IoT-data sources, that do not allow frequent fetching of their data. -The data is stored as it is in JSON to a JSONField and served as JSON. The app uses caching to cache all its queries and serialized data. The Cache is cleared for the source when importing the data source or when a data source is added. The cache is populated if empty when serving data. +The data is stored as it in JSON format to a JSONField and served as JSON. The app uses caching to cache all its queries and serialized data. The Cache is cleared for the source when importing the data or when a data source is added. The cache is populated if empty when serving data. ## Adding IoT-data source from the Admin -* Give a tree letter long source name, this name will be the name for the source. Used for example when requesting the data. +* Give a tree letter long identifier, this will be used to identify the data +to be imported in the Celery task and when requesting data in the API. * Add the full name of the source * Add the Url to the JSON data. +* Set is_xml to True if the data is in XML format, the data will be converted to JSON. +* Add the optional headers for the request. ## Setting periodic importing using Celery from the Admin * Create a periodic task, give a descrpitive name. * Select *iot.tasks.import_iot_data* as the Task (registered) * Choose the *Interval Schedule* * Set the Start DateTime -* Add the source name as *Positional Arguments*, e.g. ["R24"] would import the source_name R24. +* Add the identifier as *Positional Arguments*, e.g. ["R24"] would import the identifier R24. ## Manual import To manually import source: -`./manage.py import_iot_data source_name` +`./manage.py import_iot_data identifier` Or by running the perioc task from the admin. ## Retriving data diff --git a/iot/management/commands/import_iot_data.py b/iot/management/commands/import_iot_data.py index c1d90866e..eae05d817 100644 --- a/iot/management/commands/import_iot_data.py +++ b/iot/management/commands/import_iot_data.py @@ -1,7 +1,9 @@ import json import logging +from xml.parsers.expat import ExpatError import requests +import xmltodict from django.core.cache import cache from django.core.management.base import BaseCommand @@ -13,17 +15,25 @@ def save_data_to_db(source): IoTData.objects.filter(data_source=source).delete() - try: - response = requests.get(source.url) + response = requests.get(source.url, headers=source.headers) except requests.exceptions.ConnectionError: logger.error(f"Could not fetch data from: {source.url}") return - try: - json_data = response.json() - except json.decoder.JSONDecodeError: - logger.error(f"Could not decode data to json from: {source.url}") - return + if source.is_xml: + try: + json_data = xmltodict.parse(response.text) + except ExpatError as err: + logger.error( + f"Could not parse XML data from the give url {source.url}. {err}" + ) + return + else: + try: + json_data = response.json() + except json.decoder.JSONDecodeError as err: + logger.error(f"Could not decode data to JSON from: {source.url}. {err}") + return IoTData.objects.create(data_source=source, data=json_data) diff --git a/iot/migrations/0003_iotdatasource_headers.py b/iot/migrations/0003_iotdatasource_headers.py new file mode 100644 index 000000000..d70dd8d84 --- /dev/null +++ b/iot/migrations/0003_iotdatasource_headers.py @@ -0,0 +1,20 @@ +# Generated by Django 4.1.13 on 2024-03-13 06:59 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("iot", "0002_add_ordering"), + ] + + operations = [ + migrations.AddField( + model_name="iotdatasource", + name="headers", + field=models.JSONField( + blank=True, null=True, verbose_name="request headers" + ), + ), + ] diff --git a/iot/migrations/0004_iotdatasource_is_xml_alter_iotdatasource_headers_and_more.py b/iot/migrations/0004_iotdatasource_is_xml_alter_iotdatasource_headers_and_more.py new file mode 100644 index 000000000..d8bdb9ddb --- /dev/null +++ b/iot/migrations/0004_iotdatasource_is_xml_alter_iotdatasource_headers_and_more.py @@ -0,0 +1,39 @@ +# Generated by Django 4.1.13 on 2024-03-13 08:49 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("iot", "0003_iotdatasource_headers"), + ] + + operations = [ + migrations.AddField( + model_name="iotdatasource", + name="is_xml", + field=models.BooleanField( + default=False, + verbose_name="If True, XML data will be converted to JSON.", + ), + ), + migrations.AlterField( + model_name="iotdatasource", + name="headers", + field=models.JSONField( + blank=True, + null=True, + verbose_name='request headers in JSON format, e.g., {"key1": "value1", "key2": "value2"}', + ), + ), + migrations.AlterField( + model_name="iotdatasource", + name="source_name", + field=models.CharField( + max_length=3, + unique=True, + verbose_name="Three letter long identifier for the source. Set the identifier as an argument to the Celery task that fetches the data.", + ), + ), + ] diff --git a/iot/models.py b/iot/models.py index d2b4c7b22..a60d5cd1f 100644 --- a/iot/models.py +++ b/iot/models.py @@ -1,16 +1,29 @@ import json +from xml.parsers.expat import ExpatError import requests +import xmltodict from django.core.exceptions import ValidationError from django.db import models class IoTDataSource(models.Model): source_name = models.CharField( - max_length=3, unique=True, verbose_name="Three letter long name for the source" + max_length=3, + unique=True, + verbose_name="Three letter long identifier for the source. " + "Set the identifier as an argument to the Celery task that fetches the data.", ) source_full_name = models.CharField(max_length=64, null=True) + is_xml = models.BooleanField( + default=False, verbose_name="If True, XML data will be converted to JSON." + ) url = models.URLField() + headers = models.JSONField( + null=True, + blank=True, + verbose_name='request headers in JSON format, e.g., {"key1": "value1", "key2": "value2"}', + ) def __str__(self): return self.source_name @@ -18,16 +31,26 @@ def __str__(self): def clean(self): # Test if url exists try: - response = requests.get(self.url) + response = requests.get(self.url, headers=self.headers) except requests.exceptions.ConnectionError: raise ValidationError(f"The given url {self.url} does not exist.") - # Test if valid json - try: - response.json() - except json.decoder.JSONDecodeError: - raise ValidationError( - f"Could not parse the JSON data for the given url {self.url}" - ) + + # Test if XML data can be parsed into JSON + if self.is_xml: + try: + xmltodict.parse(response.text) + except ExpatError as err: + raise ExpatError( + f"Could not parse XML data from the give url {self.url}. {err}" + ) + else: + # Test if valid JSON + try: + response.json() + except json.decoder.JSONDecodeError as err: + raise ValidationError( + f"Could not parse the JSON data from the given url {self.url}. {err}" + ) class IoTData(models.Model): diff --git a/mobility_data/data/Pyorienkorjauspisteet_2022.geojson b/mobility_data/data/Pyorienkorjauspisteet_2022.geojson index 1cd597a4b..bf166f41a 100755 --- a/mobility_data/data/Pyorienkorjauspisteet_2022.geojson +++ b/mobility_data/data/Pyorienkorjauspisteet_2022.geojson @@ -12,8 +12,10 @@ { "type": "Feature", "properties": { "id": null, "Kohde": "Pääkirjaston sisäpiha / Huvudbiblioteks gård / Main librarys inner court", "Osoite": "Läntinen rantakatu 1, 20100 Turku / Västra Strandgatan 1, 20100 Turku", "Varustelaji": "Polkupyörän pumppu / cykelpump / Bicycle pump", "Pvm": "6.5.2022", "Kuvaus": "Kasikayttoinen pyoranpumppu kahdella eri suukappaleella.\nCykelpump med två olika munstycken.\nBicycle pump with two different mouthpiece options.", "Maastossa": "Kyllä", "Lisätieto": "Epakunnossa", "x": 23459938.338352039, "y": 6704503.6369999349 }, "geometry": { "type": "Point", "coordinates": [ 23459938.338352039456367, 6704503.636999934911728 ] } }, { "type": "Feature", "properties": { "id": null, "Kohde": "Brankkiksenaukio / Brankisplan / Brankis plaza", "Osoite": "Kauppiaskatu 4, 21600 Parainen / Köpmansgatan 4, 21600 Pargas", "Varustelaji": "Pyöränkorjauspiste / Cykelservicestation / Bike service station", "Pvm": "16.05.2022", "Kuvaus": "Katoksellinen pyöränkorjauspiste sisältää ilmapumpun ja työkaluja. Ilmapumpussa on moniventtiilisuulake ja painemittari. Korjauspiste sisältää kolme ruuvimeisseliä, (ristipää- ura- ja TORX T25 -ruuvimeisselin), jakoavaimen, Gedore kärkipihdit, kaksi kiintoavainta (8x10mm ja 13x15mm), kuusiokoloavainsarjan (2-8mm) ja rengasraudat. Pyöränkorjauspisteessä on kannatinkoukut, joihin pyörän voi nostaa huollon ajaksi.\nCykelservicestation med gapskjul innehåller en pump och verktygs. I pumpen finns multi-munstycke och en manometer. Servicestation innehåller tre skruvmejsel (krysspårmejsel, spårskruvmejsel och TORX T25 skruvmejsel), skiftnyckeln, Gedora spetstång, två blocknycklar (8x10mm och 13x15mm), sexkantnyckelsats (2-8mm) och däckjärn. Cykelservicestation har också två pelaren som cykeln kan hänga upp.\nBike service station with roof includes air pump and tools. There are different mouthpiece options and a manometer. Service station tools include three different screwdrivers (a Philips screwdriver, a slotted screwdriver and a TORX T25 screwdriver), adjustable wrench, Gedora nippers, two wrench (8x10mm and 13x15mm), a series of hex head wrench (2-8mm) and tyre irons. Bike service station also include hooks where the bicycle can be lifted during the service. \n", "Maastossa": "Kyllä", "Lisätieto": "Merkki iBOMBO PRS SCANDIC", "x": 23461398.542189036, "y": 6687555.9403193807 }, "geometry": { "type": "Point", "coordinates": [ 23461398.542189035564661, 6687555.94031938072294 ] } }, { "type": "Feature", "properties": { "id": null, "Kohde": "Lillmälö", "Osoite": "Saaristotie 3107, 21600 Parainen / Skärgårdsvägen 3107, 21600 Pargas", "Varustelaji": "Pyöränkorjauspiste / Cykelservicestation / Bike service station", "Pvm": "16.05.2022", "Kuvaus": "Katoksellinen pyöränkorjauspiste sisältää ilmapumpun ja työkaluja. Ilmapumpussa on moniventtiilisuulake ja painemittari. Korjauspiste sisältää kolme ruuvimeisseliä, (ristipää- ura- ja TORX T25 -ruuvimeisselin), jakoavaimen, Gedore kärkipihdit, kaksi kiintoavainta (8x10mm ja 13x15mm), kuusiokoloavainsarjan (2-8mm) ja rengasraudat. Pyöränkorjauspisteessä on kannatinkoukut, joihin pyörän voi nostaa huollon ajaksi.\nCykelservicestation med gapskjul innehåller en pump och verktygs. I pumpen finns multi-munstycke och en manometer. Servicestation innehåller tre skruvmejsel (krysspårmejsel, spårskruvmejsel och TORX T25 skruvmejsel), skiftnyckeln, Gedora spetstång, två blocknycklar (8x10mm och 13x15mm), sexkantnyckelsats (2-8mm) och däckjärn. Cykelservicestation har också två pelaren som cykeln kan hänga upp.\nBike service station with roof includes air pump and tools. There are different mouthpiece options and a manometer. Service station tools include three different screwdrivers (a Philips screwdriver, a slotted screwdriver and a TORX T25 screwdriver), adjustable wrench, Gedora nippers, two wrench (8x10mm and 13x15mm), a series of hex head wrench (2-8mm) and tyre irons. Bike service station also include hooks where the bicycle can be lifted during the service. \n", "Maastossa": "Kyllä", "Lisätieto": "Merkki iBOMBO PRS SCANDIC", "x": 23450878.808870975, "y": 6680753.3015155382 }, "geometry": { "type": "Point", "coordinates": [ 23450878.808870974928141, 6680753.30151553824544 ] } }, -{ "type": "Feature", "properties": { "id": null, "Kohde": "Hanka", "Osoite": "Luotojentie 1092, 21150 Naantali", "Varustelaji": "Pyöränkorjauspiste / Cykelservicestation / Bike service station", "Pvm": "16.05.2022", "Kuvaus": "Pyöränkorjauspiste sisältää ilmapumpun ja työkaluja. Ilmapumpussa on moniventtiilisuulake ja painemittari. Korjauspiste sisältää kolme ruuvimeisseliä, (ristipää- ura- ja TORX T25 -ruuvimeisselin), jakoavaimen, Gedore kärkipihdit, kaksi kiintoavainta (8x10mm ja 13x15mm), kuusiokoloavainsarjan (2-8mm) ja rengasraudat. Pyöränkorjauspisteessä on kannatinkoukut, joihin pyörän voi nostaa huollon ajaksi.\nCykelservicestation innehåller en pump och verktygs. I pumpen finns multi-munstycke och en manometer. Servicestation innehåller tre skruvmejsel (krysspårmejsel, spårskruvmejsel och TORX T25 skruvmejsel), skiftnyckeln, Gedora spetstång, två blocknycklar (8x10mm och 13x15mm), sexkantnyckelsats (2-8mm) och däckjärn. Cykelservicestation har också två pelaren som cykeln kan hänga upp.\nBike service station includes air pump and tools. There are different mouthpiece options and a manometer. Service station tools include three different screwdrivers (a Philips screwdriver, a slotted screwdriver and a TORX T25 screwdriver), adjustable wrench, Gedora nippers, two wrench (8x10mm and 13x15mm), a series of hex head wrench (2-8mm) and tyre irons. Bike service station also include hooks where the bicycle can be lifted during the service. \n", "Maastossa": "Kyllä", "Lisätieto": "Merkki iBOMBO PRS SCANDIC", "x": 23442968.857040703, "y": 6686362.781391114 }, "geometry": { "type": "Point", "coordinates": [ 23442968.857040703296661, 6686362.781391113996506 ] } }, -{ "type": "Feature", "properties": { "id": null, "Kohde": "Röölä", "Osoite": "Rööläntie 402, 21150 Naantali", "Varustelaji": "Pyöränkorjauspiste / Cykelservicestation / Bike service station", "Pvm": "16.05.2022", "Kuvaus": "Pyöränkorjauspiste sisältää ilmapumpun ja työkaluja. Ilmapumpussa on moniventtiilisuulake ja painemittari. Korjauspiste sisältää kolme ruuvimeisseliä, (ristipää- ura- ja TORX T25 -ruuvimeisselin), jakoavaimen, Gedore kärkipihdit, kaksi kiintoavainta (8x10mm ja 13x15mm), kuusiokoloavainsarjan (2-8mm) ja rengasraudat. Pyöränkorjauspisteessä on kannatinkoukut, joihin pyörän voi nostaa huollon ajaksi.\nCykelservicestation innehåller en pump och verktygs. I pumpen finns multi-munstycke och en manometer. Servicestation innehåller tre skruvmejsel (krysspårmejsel, spårskruvmejsel och TORX T25 skruvmejsel), skiftnyckeln, Gedora spetstång, två blocknycklar (8x10mm och 13x15mm), sexkantnyckelsats (2-8mm) och däckjärn. Cykelservicestation har också två pelaren som cykeln kan hänga upp.\nBike service station includes air pump and tools. There are different mouthpiece options and a manometer. Service station tools include three different screwdrivers (a Philips screwdriver, a slotted screwdriver and a TORX T25 screwdriver), adjustable wrench, Gedora nippers, two wrench (8x10mm and 13x15mm), a series of hex head wrench (2-8mm) and tyre irons. Bike service station also include hooks where the bicycle can be lifted during the service. \n", "Maastossa": "Kyllä", "Lisätieto": "Merkki iBOMBO PRS SCANDIC", "x": 23442390.872016005, "y": 6693030.3863375364 }, "geometry": { "type": "Point", "coordinates": [ 23442390.872016005218029, 6693030.386337536387146 ] } }, -{ "type": "Feature", "properties": { "id": null, "Kohde": "Nauvo / Nagu", "Osoite": "Nauvon ranta 6, 21660 Parainen / Nagu Strand 6, 21660 Pargas", "Varustelaji": "Pyöränkorjauspiste / Cykelservicestation / Bike service station", "Pvm": "16.05.2022", "Kuvaus": "Pyöränkorjauspiste sisältää ilmapumpun ja työkaluja. Ilmapumpussa on moniventtiilisuulake ja painemittari. Korjauspiste sisältää kolme ruuvimeisseliä, (ristipää- ura- ja TORX T25 -ruuvimeisselin), jakoavaimen, Gedore kärkipihdit, kaksi kiintoavainta (8x10mm ja 13x15mm), kuusiokoloavainsarjan (2-8mm) ja rengasraudat. Pyöränkorjauspisteessä on kannatinkoukut, joihin pyörän voi nostaa huollon ajaksi.\nCykelservicestation innehåller en pump och verktygs. I pumpen finns multi-munstycke och en manometer. Servicestation innehåller tre skruvmejsel (krysspårmejsel, spårskruvmejsel och TORX T25 skruvmejsel), skiftnyckeln, Gedora spetstång, två blocknycklar (8x10mm och 13x15mm), sexkantnyckelsats (2-8mm) och däckjärn. Cykelservicestation har också två pelaren som cykeln kan hänga upp.\nBike service station includes air pump and tools. There are different mouthpiece options and a manometer. Service station tools include three different screwdrivers (a Philips screwdriver, a slotted screwdriver and a TORX T25 screwdriver), adjustable wrench, Gedora nippers, two wrench (8x10mm and 13x15mm), a series of hex head wrench (2-8mm) and tyre irons. Bike service station also include hooks where the bicycle can be lifted during the service. \n", "Maastossa": "Kyllä", "Lisätieto": "Merkki iBOMBO PRS SCANDIC", "x": 23439620.678092718, "y": 6676188.3548369883 }, "geometry": { "type": "Point", "coordinates": [ 23439620.67809271812439, 6676188.354836988262832 ] } } +{ "type": "Feature", "properties": { "id": null, "Kohde": "Hanka", "Osoite": "Luotojentie 1092, 21150 Naantali / Luotojentie 1092, 21150 Naantali", "Varustelaji": "Pyöränkorjauspiste / Cykelservicestation / Bike service station", "Pvm": "16.05.2022", "Kuvaus": "Pyöränkorjauspiste sisältää ilmapumpun ja työkaluja. Ilmapumpussa on moniventtiilisuulake ja painemittari. Korjauspiste sisältää kolme ruuvimeisseliä, (ristipää- ura- ja TORX T25 -ruuvimeisselin), jakoavaimen, Gedore kärkipihdit, kaksi kiintoavainta (8x10mm ja 13x15mm), kuusiokoloavainsarjan (2-8mm) ja rengasraudat. Pyöränkorjauspisteessä on kannatinkoukut, joihin pyörän voi nostaa huollon ajaksi.\nCykelservicestation innehåller en pump och verktygs. I pumpen finns multi-munstycke och en manometer. Servicestation innehåller tre skruvmejsel (krysspårmejsel, spårskruvmejsel och TORX T25 skruvmejsel), skiftnyckeln, Gedora spetstång, två blocknycklar (8x10mm och 13x15mm), sexkantnyckelsats (2-8mm) och däckjärn. Cykelservicestation har också två pelaren som cykeln kan hänga upp.\nBike service station includes air pump and tools. There are different mouthpiece options and a manometer. Service station tools include three different screwdrivers (a Philips screwdriver, a slotted screwdriver and a TORX T25 screwdriver), adjustable wrench, Gedora nippers, two wrench (8x10mm and 13x15mm), a series of hex head wrench (2-8mm) and tyre irons. Bike service station also include hooks where the bicycle can be lifted during the service. \n", "Maastossa": "Kyllä", "Lisätieto": "Merkki iBOMBO PRS SCANDIC", "x": 23442968.857040703, "y": 6686362.781391114 }, "geometry": { "type": "Point", "coordinates": [ 23442968.857040703296661, 6686362.781391113996506 ] } }, +{ "type": "Feature", "properties": { "id": null, "Kohde": "Röölä", "Osoite": "Rööläntie 402, 21150 Naantali / Rööläntie 402, 21150 Naantali", "Varustelaji": "Pyöränkorjauspiste / Cykelservicestation / Bike service station", "Pvm": "16.05.2022", "Kuvaus": "Pyöränkorjauspiste sisältää ilmapumpun ja työkaluja. Ilmapumpussa on moniventtiilisuulake ja painemittari. Korjauspiste sisältää kolme ruuvimeisseliä, (ristipää- ura- ja TORX T25 -ruuvimeisselin), jakoavaimen, Gedore kärkipihdit, kaksi kiintoavainta (8x10mm ja 13x15mm), kuusiokoloavainsarjan (2-8mm) ja rengasraudat. Pyöränkorjauspisteessä on kannatinkoukut, joihin pyörän voi nostaa huollon ajaksi.\nCykelservicestation innehåller en pump och verktygs. I pumpen finns multi-munstycke och en manometer. Servicestation innehåller tre skruvmejsel (krysspårmejsel, spårskruvmejsel och TORX T25 skruvmejsel), skiftnyckeln, Gedora spetstång, två blocknycklar (8x10mm och 13x15mm), sexkantnyckelsats (2-8mm) och däckjärn. Cykelservicestation har också två pelaren som cykeln kan hänga upp.\nBike service station includes air pump and tools. There are different mouthpiece options and a manometer. Service station tools include three different screwdrivers (a Philips screwdriver, a slotted screwdriver and a TORX T25 screwdriver), adjustable wrench, Gedora nippers, two wrench (8x10mm and 13x15mm), a series of hex head wrench (2-8mm) and tyre irons. Bike service station also include hooks where the bicycle can be lifted during the service. \n", "Maastossa": "Kyllä", "Lisätieto": "Merkki iBOMBO PRS SCANDIC", "x": 23442390.872016005, "y": 6693030.3863375364 }, "geometry": { "type": "Point", "coordinates": [ 23442390.872016005218029, 6693030.386337536387146 ] } }, +{ "type": "Feature", "properties": { "id": null, "Kohde": "Nauvo / Nagu", "Osoite": "Nauvon ranta 6, 21660 Parainen / Nagu Strand 6, 21660 Pargas", "Varustelaji": "Pyöränkorjauspiste / Cykelservicestation / Bike service station", "Pvm": "16.05.2022", "Kuvaus": "Pyöränkorjauspiste sisältää ilmapumpun ja työkaluja. Ilmapumpussa on moniventtiilisuulake ja painemittari. Korjauspiste sisältää kolme ruuvimeisseliä, (ristipää- ura- ja TORX T25 -ruuvimeisselin), jakoavaimen, Gedore kärkipihdit, kaksi kiintoavainta (8x10mm ja 13x15mm), kuusiokoloavainsarjan (2-8mm) ja rengasraudat. Pyöränkorjauspisteessä on kannatinkoukut, joihin pyörän voi nostaa huollon ajaksi.\nCykelservicestation innehåller en pump och verktygs. I pumpen finns multi-munstycke och en manometer. Servicestation innehåller tre skruvmejsel (krysspårmejsel, spårskruvmejsel och TORX T25 skruvmejsel), skiftnyckeln, Gedora spetstång, två blocknycklar (8x10mm och 13x15mm), sexkantnyckelsats (2-8mm) och däckjärn. Cykelservicestation har också två pelaren som cykeln kan hänga upp.\nBike service station includes air pump and tools. There are different mouthpiece options and a manometer. Service station tools include three different screwdrivers (a Philips screwdriver, a slotted screwdriver and a TORX T25 screwdriver), adjustable wrench, Gedora nippers, two wrench (8x10mm and 13x15mm), a series of hex head wrench (2-8mm) and tyre irons. Bike service station also include hooks where the bicycle can be lifted during the service. \n", "Maastossa": "Kyllä", "Lisätieto": "Merkki iBOMBO PRS SCANDIC", "x": 23439620.678092718, "y": 6676188.3548369883 }, "geometry": { "type": "Point", "coordinates": [ 23439620.67809271812439, 6676188.354836988262832 ] } }, +{ "type": "Feature", "properties": { "id": null, "Kohde": "Kupittaa / Kuppis / Kupittaa", "Osoite": "Joukahaisenkatu 6, 20520 Turku / Joukahainengatan 6, 20520 Turku", "Varustelaji": "Pyöränkorjauspiste / Cykelservicestation / Bike service station", "Pvm": "1.3.2024", "Kuvaus": "Pyöränkorjauspiste sisältää pyöränpumpun ja kaksi monitoimityökalua. Pyöränpumppuun on valittavissa erilaisia suukappaleita, jotka sopivat tavallisiin venttiilityyppeihin. Monitoimityökalut sisältävät kiintoavaimen (8,9,10 ja 15mm), kuusiokoloavaimen (3,4,5,6 ja 8mm) ja ruuvimeisselin (0,8x4mm). Pyöränkorjauspisteessä on myös kaksi kannatinkoukkua eri korkeudella, joihin pyörän voi nostaa huollon ajaksi.\nCykelservicestation innehåller cykelpump och två multiverktyg. I cykelpump det finns multi-munstycke som passar alla gängse ventiltyper. Multiverktyg innehåller blocknyckel (8,9,10 och 15mm), sexkantnyckel (3,4,5,6 och 8mm) och skruvmejsel (0,8x4mm). Cykelservicestation har också två pelaren i olika nivåer som cykeln kan hänga upp. \nBike service station includes bicycle pump and two multifunctional tools. There are different mouthpiece options which are compatible to usual valve types. Multifunctional tools include a wrench (8,9,10 and 15mm), a hex head wrench (3,4,5,6 and 8mm) and a screwdriver (0,8x4mm). Bike service station also include two hooks in different heights where the bicycles can be lifted during the service.", "Maastossa": "Ei", "Lisätieto": "Merkki Care4bikes", "x": 23461183.976663336, "y": 6704468.582275727 }, "geometry": { "type": "Point", "coordinates": [ 23461183.976663336, 6704468.582275727 ] } } + ] } diff --git a/mobility_data/importers/culture_routes.py b/mobility_data/importers/culture_routes.py index 2b8eee4d9..9427a3518 100644 --- a/mobility_data/importers/culture_routes.py +++ b/mobility_data/importers/culture_routes.py @@ -26,6 +26,11 @@ SOURCE_DATA_SRID = 4326 # Routes are from https://citynomadi.com/route/?keywords=turku URLS = { + "Tapion Polku": { + "fi": "https://www.citynomadi.com/api/route/5b6669fa989c1b8c2fc552b2b2afdbd1/kml?lang=fi", + "sv": "https://www.citynomadi.com/api/route/5b6669fa989c1b8c2fc552b2b2afdbd1/kml?lang=sv", + "en": "https://www.citynomadi.com/api/route/5b6669fa989c1b8c2fc552b2b2afdbd1/kml?lang=en", + }, "Sotiemme Turku": { "fi": "https://citynomadi.com/api/route/fb656ce4fc31868f4b90168ecc3fabdb/kml?lang=fi", "sv": "https://citynomadi.com/api/route/fb656ce4fc31868f4b90168ecc3fabdb/kml?lang=sv", diff --git a/mobility_data/importers/data/content_types.yml b/mobility_data/importers/data/content_types.yml index 9584a2aed..c3b76eb5f 100644 --- a/mobility_data/importers/data/content_types.yml +++ b/mobility_data/importers/data/content_types.yml @@ -195,6 +195,12 @@ content_types: sv: Grillplats en: Barbecue place + - content_type_name: LeanTo + name: + fi: Laavu + sv: Vindskydd + en: LeanTo + - content_type_name: TicketMachineSign # Liikennemerkki: 990 Lippuautomaatti name: @@ -413,6 +419,11 @@ content_types: sv: Tillgänglighetsområde för skola och daghem en: School and kindergarten accessibility area + - content_type_name: StreetAreaInformation + name: + fi: KatuAlueTieto + sv: GatuOmrådeInformation + en: StreetAreaInformation # End of content types importer from opaskarta.turku.fi - content_type_name: Underpass diff --git a/mobility_data/importers/data/wfs_importer_config.yml b/mobility_data/importers/data/wfs_importer_config.yml index a5a71403f..9a0bf5f76 100644 --- a/mobility_data/importers/data/wfs_importer_config.yml +++ b/mobility_data/importers/data/wfs_importer_config.yml @@ -1,4 +1,32 @@ features: + - content_type_name: StreetAreaInformation + wfs_layer: GIS:Katualueet + max_features: 100000 + fields: + name: + fi: Kadunnimi + extra_fields: + omistaja: + wfs_field: Omistaja + omistaja_koodi: + wfs_field: Omistaja_koodi + kunnossapitaja: + wfs_field: Kunnossapitaja + kunnossapitoluokka: + wfs_field: Kunnossapitoluokka + kunnossapitoluokka_koodi: + wfs_field: Kunnossapitoluokka_koodi + talvikunnossapito: + wfs_field: Talvikunnossapito + talvikunnossapito_koodi: + wfs_field: Talvikunnossapito_koodi + pintamateriaaliryhma: + wfs_field: Pintamateriaaliryhma + pintamateriaali: + wfs_field: Pintamateriaali + pintamateriaali_koodi: + wfs_field: Pintamateriaali_koodi + - content_type_name: PlayGround wfs_layer: GIS:Viheralueet max_features: 50000 @@ -38,7 +66,7 @@ features: wfs_layer: GIS:Varusteet max_features: 100000 include: - Tyyppi: Grillipaikka + Varustelaji: Grillauspaikka extra_fields: valmistaja: wfs_field: Valmistaja @@ -70,6 +98,12 @@ features: asennus: wfs_field: Asennus + - content_type_name: LeanTo + wfs_layer: GIS:Varusteet + max_features: 100000 + include: + Tyyppi: Laavu + - content_type_name: TicketMachineSign wfs_layer: GIS:Liikennemerkit include: @@ -335,18 +369,22 @@ features: - content_type_name: ScooterParkingArea wfs_layer: GIS:Sahkopotkulautaparkki + locates_in_turku: False - content_type_name: ScooterSpeedLimitArea wfs_layer: GIS:Sahkopotkulauta_nopeusrajoitus + locates_in_turku: False - content_type_name: ScooterNoParkingArea wfs_layer: GIS:Sahkopotkulauta_pysakointikielto + locates_in_turku: False + - content_type_name: PublicToilet wfs_layer: GIS:Varusteet max_features: 10000 - # Default is False, if True include only if geometry locates in Turku. locates_in_turku: True + # Default is False, if True include only if geometry locates in Turku. # Include feature if field 'Tyyppi' has value 'WC' include: Tyyppi: WC @@ -522,9 +560,12 @@ features: name: fi: Kohde extra_fields: - Minuutit: + kohde_ID: + wfs_field: Kohde_ID + wfs_type: int + minuutit: wfs_field: Minuutit wfs_type: int - Kulkumuoto: + kulkumuoto: wfs_field: Kulkumuoto diff --git a/mobility_data/importers/wfs.py b/mobility_data/importers/wfs.py index 137f36116..666a46f4a 100644 --- a/mobility_data/importers/wfs.py +++ b/mobility_data/importers/wfs.py @@ -151,10 +151,10 @@ def get_data_source(config, max_features): def import_wfs_feature(config, data_file=None): if "content_type_name" not in config: - logger.warning(f"Skipping feature {config}, 'content_type_name' is required.") + logger.warning(f"Discarding feature {config}, 'content_type_name' is required.") return False if "wfs_layer" not in config: - logger.warning(f"Skipping feature {config}, no wfs_layer defined.") + logger.warning(f"Dicarding feature {config}, no wfs_layer defined.") return False if "max_features" in config: max_features = config["max_features"] diff --git a/mobility_data/management/commands/import_wfs.py b/mobility_data/management/commands/import_wfs.py index 68d033fdd..1d750da45 100644 --- a/mobility_data/management/commands/import_wfs.py +++ b/mobility_data/management/commands/import_wfs.py @@ -65,4 +65,4 @@ def handle(self, *args, **options): try: import_wfs_feature(feature, data_file) except Exception as e: - logger.warning(f"Skipping content_type {feature} : {e}") + logger.warning(f"Discarding content_type {feature} : {e}") diff --git a/mobility_data/tasks.py b/mobility_data/tasks.py index 6539ba71c..8d4fef6fa 100644 --- a/mobility_data/tasks.py +++ b/mobility_data/tasks.py @@ -44,8 +44,8 @@ def import_accessories(name="import_accessories"): @shared_task_email -def import_barbecue_places(name="import_barbecue_places"): - management.call_command("import_wfs", ["BarbecuePlace"]) +def import_outdoor_places(name="import_outdoor_places"): + management.call_command("import_wfs", ["BarbecuePlace", "LeanTo"]) @shared_task_email @@ -163,6 +163,11 @@ def import_under_and_overpasses(name="import_under_and_overpasses"): management.call_command("import_under_and_overpasses") +@shared_task_email +def import_street_area_information(name="import_street_area_information"): + management.call_command("import_wfs", "StreetAreaInformation") + + @shared_task_email def delete_obsolete_data(name="delete_obsolete_data"): MobileUnit.objects.filter(content_types__isnull=True).delete() diff --git a/smbackend/settings.py b/smbackend/settings.py index 6e72fcc9f..59bfb9b55 100644 --- a/smbackend/settings.py +++ b/smbackend/settings.py @@ -79,6 +79,7 @@ BICYCLE_NETWORK_LOG_LEVEL=(str, "INFO"), STREET_MAINTENANCE_LOG_LEVEL=(str, "INFO"), ENVIRONMENT_DATA_LOG_LEVEL=(str, "INFO"), + EXCEPTIONAL_SITUATIONS_LOG_LEVEL=(str, "INFO"), ) @@ -103,6 +104,7 @@ BICYCLE_NETWORK_LOG_LEVEL = env("BICYCLE_NETWORK_LOG_LEVEL") STREET_MAINTENANCE_LOG_LEVEL = env("STREET_MAINTENANCE_LOG_LEVEL") ENVIRONMENT_DATA_LOG_LEVEL = env("ENVIRONMENT_DATA_LOG_LEVEL") +EXCEPTIONAL_SITUATIONS_LOG_LEVEL = env("EXCEPTIONAL_SITUATIONS_LOG_LEVEL") # Application definition INSTALLED_APPS = [ @@ -133,6 +135,7 @@ "iot.apps.IotConfig", "street_maintenance.apps.StreetMaintenanceConfig", "environment_data.apps.EnvironmentDataConfig", + "exceptional_situations.apps.ExceptionalSituationsConfig", ] if env("ADDITIONAL_INSTALLED_APPS"): @@ -336,6 +339,10 @@ def gettext(s): "handlers": ["console"], "level": ENVIRONMENT_DATA_LOG_LEVEL, }, + "exceptional_situations": { + "handlers": ["console"], + "level": EXCEPTIONAL_SITUATIONS_LOG_LEVEL, + }, }, } logging.config.dictConfig(LOGGING) @@ -348,6 +355,8 @@ def gettext(s): "/environment_data/api/v1/stations/", "/environment_data/api/v1/parameters/", "/environment_data/api/v1/data/", + "/exceptional_situations/api/v1/situation/", + "/exceptional_situations/api/v1/situation_type/", ] diff --git a/smbackend/urls.py b/smbackend/urls.py index b04e54fab..94555aa30 100644 --- a/smbackend/urls.py +++ b/smbackend/urls.py @@ -10,6 +10,7 @@ import bicycle_network.api.urls import eco_counter.api.urls import environment_data.api.urls +import exceptional_situations.api.urls import mobility_data.api.urls import street_maintenance.api.urls from iot.api import IoTViewSet @@ -71,6 +72,11 @@ include(environment_data.api.urls), name="environmet_data", ), + re_path( + r"^exceptional_situations/", + include(exceptional_situations.api.urls), + name="exceptional_situations", + ), re_path( r"^street_maintenance/", include(street_maintenance.api.urls),