Skip to content

Commit

Permalink
Merge pull request #299 from City-of-Turku/feature/mobility-data-impo…
Browse files Browse the repository at this point in the history
…rters-tests-use-mock-fixtures

Feature/mobility data importers tests use mock fixtures
  • Loading branch information
juuso-j authored Aug 4, 2023
2 parents 5701c53 + 8f25608 commit 955247e
Show file tree
Hide file tree
Showing 41 changed files with 453 additions and 309 deletions.
30 changes: 15 additions & 15 deletions mobility_data/importers/bicycle_stands.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,24 +198,24 @@ def set_gml_feature(self, feature):
self.prefix_name = {k: f"{NAME_PREFIX[k]} {v}" for k, v in self.name.items()}


def get_bicycle_stand_objects(data_source=None):
def get_data_sources():
data_sources = []
# Add the WFS datasource that is in GML format
ds = DataSource(BICYCLE_STANDS_URL)
data_sources.append(("gml", ds))
# Add the GEOJSON datasource which is a file
data_path = os.path.join(get_root_dir(), "mobility_data/data")
file_path = os.path.join(data_path, GEOJSON_FILENAME)
ds = DataSource(file_path)
data_sources.append(("geojson", ds))
return data_sources


def get_bicycle_stand_objects():
"""
Returns a list containg instances of BicycleStand class.
"""
data_sources = []

if data_source:
data_sources.append(data_source)
else:
# Add the WFS datasource that is in GML format
ds = DataSource(BICYCLE_STANDS_URL)
data_sources.append(("gml", ds))
# Add the GEOJSON datasource which is a file
data_path = os.path.join(get_root_dir(), "mobility_data/data")
file_path = os.path.join(data_path, GEOJSON_FILENAME)
ds = DataSource(file_path)
data_sources.append(("geojson", ds))

data_sources = get_data_sources()
bicycle_stands = []
"""
external_stands dict is used to keep track of the names of imported external stands
Expand Down
24 changes: 11 additions & 13 deletions mobility_data/importers/bike_service_stations.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,20 +64,18 @@ def __init__(self, feature):
self.extra["in_terrain"] = feature["Maastossa"].as_string()


def get_bike_service_station_objects(geojson_file=None):
bicycle_repair_points = []
file_name = None
if not geojson_file:
file_name = get_file_name_from_data_source(CONTENT_TYPE_NAME)
if not file_name:
file_name = f"{get_root_dir()}/mobility_data/data/{GEOJSON_FILENAME}"
else:
file_name = f"{get_root_dir()}/mobility_data/data/{GEOJSON_FILENAME}"
def get_data_layer():
file_name = get_file_name_from_data_source(CONTENT_TYPE_NAME)
if not file_name:
file_name = f"{get_root_dir()}/mobility_data/data/{GEOJSON_FILENAME}"
else:
# Use the test data file
file_name = f"{get_root_dir()}/mobility_data/tests/data/{geojson_file}"

file_name = f"{get_root_dir()}/mobility_data/data/{GEOJSON_FILENAME}"
data_layer = GDALDataSource(file_name)[0]
for feature in data_layer:
return data_layer


def get_bike_service_station_objects():
bicycle_repair_points = []
for feature in get_data_layer():
bicycle_repair_points.append(BikeServiceStation(feature))
return bicycle_repair_points
18 changes: 9 additions & 9 deletions mobility_data/importers/charging_stations.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,18 +111,18 @@ def get_number_of_rows(file_name):
return number_of_rows


def get_charging_station_objects(csv_file=None):
def get_csv_file_name():
file_name = get_file_name_from_data_source(CONTENT_TYPE_NAME)
if file_name:
return file_name
return f"{get_root_dir()}/mobility_data/data/{SOURCE_DATA_FILE_NAME}"


def get_charging_station_objects():
# Store the imported stations to dict, the index is the key.
file_name = get_csv_file_name()
charging_stations = {}
column_mappings = {}
if not csv_file:
file_name = get_file_name_from_data_source(CONTENT_TYPE_NAME)
if not file_name:
file_name = f"{get_root_dir()}/mobility_data/data/{SOURCE_DATA_FILE_NAME}"
else:
# Use the test data file
file_name = f"{get_root_dir()}/mobility_data/tests/data/{csv_file}"

number_of_rows = get_number_of_rows(file_name)
with open(file_name, encoding="utf-8-sig") as csv_file:
csv_reader = csv.reader(csv_file, delimiter=";")
Expand Down
20 changes: 9 additions & 11 deletions mobility_data/importers/disabled_and_no_staff_parking.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,19 +135,17 @@ def __init__(self, feature):
self.extra[field_name] = feature[field].as_double()


def get_no_staff_parking_objects(geojson_file=None):
no_staff_parkings = []
disabled_parkings = []
file_name = None
def get_geojson_file_name():
file_name = get_file_name_from_data_source(NO_STAFF_PARKING_CONTENT_TYPE_NAME)
if file_name:
return file_name
return f"{get_root_dir()}/mobility_data/data/{GEOJSON_FILENAME}"

if not geojson_file:
file_name = get_file_name_from_data_source(NO_STAFF_PARKING_CONTENT_TYPE_NAME)
if not file_name:
file_name = f"{get_root_dir()}/mobility_data/data/{GEOJSON_FILENAME}"
else:
# Use the test data file
file_name = f"{get_root_dir()}/mobility_data/tests/data/{geojson_file}"

def get_no_staff_parking_objects():
no_staff_parkings = []
disabled_parkings = []
file_name = get_geojson_file_name()
data_layer = GDALDataSource(file_name)[0]

for feature in data_layer:
Expand Down
16 changes: 11 additions & 5 deletions mobility_data/importers/gas_filling_station.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
import logging

import requests
from django.conf import settings
from django.contrib.gis.geos import Point, Polygon
from munigeo.models import Municipality

from .constants import SOUTHWEST_FINLAND_BOUNDARY, SOUTHWEST_FINLAND_BOUNDARY_SRID
from .utils import (
fetch_json,
get_street_name_and_number,
get_street_name_translations,
LANGUAGES,
Expand Down Expand Up @@ -54,15 +54,21 @@ def __init__(self, elem, srid=settings.DEFAULT_SRID):
self.extra["lng_cng"] = self.lng_cng


def get_filtered_gas_filling_station_objects(json_data=None):
def get_json_data(url):
response = requests.get(url)
assert response.status_code == 200, "Fetching {} status code: {}".format(
url, response.status_code
)
return response.json()


def get_filtered_gas_filling_station_objects():
"""
Returns a list of GasFillingStation objects that are filtered by location.
Stations inside boundarys of Southwest Finland are included, the rest
are discarded.
"""

if not json_data:
json_data = fetch_json(GAS_FILLING_STATIONS_URL)
json_data = get_json_data(GAS_FILLING_STATIONS_URL)
# srid = json_data["spatialReference"]["wkid"]
# NOTE, hack to fix srid 102100 in source data causes "crs not found"
srid = 4326
Expand Down
18 changes: 8 additions & 10 deletions mobility_data/importers/loading_unloading_places.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,18 +104,16 @@ def __init__(self, feature):
self.extra[field_name] = feature[field].as_int()


def get_loading_and_unloading_objects(geojson_file=None):
objects = []
file_name = None
def get_geojson_file_name():
file_name = get_file_name_from_data_source(CONTENT_TYPE_NAME)
if file_name:
return file_name
return f"{get_root_dir()}/mobility_data/data/{GEOJSON_FILENAME}"

if not geojson_file:
file_name = get_file_name_from_data_source(CONTENT_TYPE_NAME)
if not file_name:
file_name = f"{get_root_dir()}/mobility_data/data/{GEOJSON_FILENAME}"
else:
# Use the test data file
file_name = f"{get_root_dir()}/mobility_data/tests/data/{geojson_file}"

def get_loading_and_unloading_objects():
objects = []
file_name = get_geojson_file_name()
data_layer = GDALDataSource(file_name)[0]
for feature in data_layer:
objects.append(LoadingPlace(feature))
Expand Down
4 changes: 2 additions & 2 deletions mobility_data/importers/lounaistieto_shapefiles.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,5 +123,5 @@ def import_lounaistieto_data_source(config):
if obj.add_feature(feature, config, srid):
objects.append(obj)
content_type = get_or_create_content_type_from_config(config["content_type_name"])
num_ceated, num_deleted = save_to_database(objects, content_type)
log_imported_message(logger, content_type, num_ceated, num_deleted)
num_created, num_deleted = save_to_database(objects, content_type)
log_imported_message(logger, content_type, num_created, num_deleted)
16 changes: 8 additions & 8 deletions mobility_data/importers/share_car_parking_places.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,16 +54,16 @@ def __init__(self, feature):
self.extra[self.RESTRICTION_FIELD][language] = restrictions[i].strip()


def get_geojson_file_name():
file_name = get_file_name_from_data_source(CONTENT_TYPE_NAME)
if file_name:
return file_name
return f"{get_root_dir()}/mobility_data/data/{GEOJSON_FILENAME}"


def get_car_share_parking_place_objects(geojson_file=None):
car_share_parking_places = []
file_name = None
if not geojson_file:
file_name = get_file_name_from_data_source(CONTENT_TYPE_NAME)
if not file_name:
file_name = f"{get_root_dir()}/mobility_data/data/{GEOJSON_FILENAME}"
else:
# Use the test data file
file_name = f"{get_root_dir()}/mobility_data/tests/data/{geojson_file}"
file_name = get_geojson_file_name()

data_layer = GDALDataSource(file_name)[0]
for feature in data_layer:
Expand Down
55 changes: 31 additions & 24 deletions mobility_data/importers/wfs.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from django import db
from django.conf import settings
from django.contrib.gis.gdal import DataSource
from django.contrib.gis.gdal.error import GDALException
from django.contrib.gis.geos import GEOSGeometry, MultiPolygon, Polygon
from munigeo.models import Municipality

Expand Down Expand Up @@ -67,22 +68,24 @@ def add_feature(self, feature, config):
if config.get("locates_in_turku", False):
if not locates_in_turku(feature, source_srid):
return False

# If geometry contains multiple polygons and create_multipolygon attribute is True
# create one multipolygon from the polygons.
if (
len(feature.geom.coords) > 1
and create_multipolygon
and isinstance(feature.geom, gdalgeometries.Polygon)
):
polygons = []
for coords in feature.geom.coords:
polygons.append(Polygon(coords, srid=source_srid))
self.geometry = MultiPolygon(polygons, srid=source_srid)
else:
self.geometry = GEOSGeometry(feature.geom.wkt, srid=source_srid)
self.geometry.transform(settings.DEFAULT_SRID)

try:
if (
len(feature.geom.coords) > 1
and create_multipolygon
and isinstance(feature.geom, gdalgeometries.Polygon)
):
polygons = []
for coords in feature.geom.coords:
polygons.append(Polygon(coords, srid=source_srid))
self.geometry = MultiPolygon(polygons, srid=source_srid)
else:
self.geometry = GEOSGeometry(feature.geom.wkt, srid=source_srid)
self.geometry.transform(settings.DEFAULT_SRID)
except GDALException as ex:
logger.error(ex)
return False
if "municipality" in config:
municipality = feature[config["municipality"]].as_string()
if municipality:
Expand Down Expand Up @@ -129,8 +132,16 @@ def add_feature(self, feature, config):
return True


def get_data_source(config, max_features):
wfs_url = config.get("wfs_url", settings.TURKU_WFS_URL)
url = WFS_URL.format(
wfs_url=wfs_url, wfs_layer=config["wfs_layer"], max_features=max_features
)
ds = DataSource(url)
return ds


def import_wfs_feature(config, data_file=None):
max_features = DEFAULT_MAX_FEATURES
if "content_type_name" not in config:
logger.warning(f"Skipping feature {config}, 'content_type_name' is required.")
return False
Expand All @@ -139,23 +150,19 @@ def import_wfs_feature(config, data_file=None):
return False
if "max_features" in config:
max_features = config["max_features"]
wfs_layer = config["wfs_layer"]
else:
max_features = DEFAULT_MAX_FEATURES
objects = []
if data_file:
ds = DataSource(data_file)
else:
wfs_url = config.get("wfs_url", settings.TURKU_WFS_URL)

url = WFS_URL.format(
wfs_url=wfs_url, wfs_layer=wfs_layer, max_features=max_features
)
ds = DataSource(url)
ds = get_data_source(config, max_features)
assert len(ds) == 1
layer = ds[0]
for feature in layer:
object = MobilityData()
if object.add_feature(feature, config):
objects.append(object)
content_type = get_or_create_content_type_from_config(config["content_type_name"])
num_ceated, num_deleted = save_to_database(objects, content_type)
log_imported_message(logger, content_type, num_ceated, num_deleted)
num_created, num_deleted = save_to_database(objects, content_type)
log_imported_message(logger, content_type, num_created, num_deleted)
13 changes: 0 additions & 13 deletions mobility_data/management/commands/_utils.py

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
"""
This command removes all units that have a ContentType or
GroupType where type_name is not Null. This data is deprecated
as the only the name will be used in future.
as only the name will be used in future.
"""


Expand Down
30 changes: 7 additions & 23 deletions mobility_data/management/commands/import_bicycle_stands.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
import logging

from django.core.management import BaseCommand

from mobility_data.importers.bicycle_stands import (
BICYCLE_STANDS_URL,
CONTENT_TYPE_NAME,
Expand All @@ -11,31 +13,13 @@
save_to_database,
)

from ._base_import_command import BaseImportCommand
from ._utils import get_test_gdal_data_source

logger = logging.getLogger("mobility_data")


class Command(BaseImportCommand):
class Command(BaseCommand):
def handle(self, *args, **options):
logger.info("Importing bicycle stands.")
if options["test_mode"]:
logger.info("Running bicycle stand importer in test mode.")
file_name = options["test_mode"]
data_source = None
ds = get_test_gdal_data_source(file_name)

if file_name.endswith("gml"):
data_source = ("gml", ds)
elif file_name.endswith("geojson"):
data_source = ("geojson", ds)

objects = get_bicycle_stand_objects(data_source=data_source)
else:
logger.info("Fetching bicycle stands from: {}".format(BICYCLE_STANDS_URL))
objects = get_bicycle_stand_objects()

logger.info("Importing bicycle stands from: {}".format(BICYCLE_STANDS_URL))
objects = get_bicycle_stand_objects()
content_type = get_or_create_content_type_from_config(CONTENT_TYPE_NAME)
num_ceated, num_deleted = save_to_database(objects, content_type)
log_imported_message(logger, content_type, num_ceated, num_deleted)
num_created, num_deleted = save_to_database(objects, content_type)
log_imported_message(logger, content_type, num_created, num_deleted)
Loading

0 comments on commit 955247e

Please sign in to comment.