Skip to content

Commit

Permalink
Merge pull request #346 from City-of-Turku/dependabot/pip/black-24.3.0
Browse files Browse the repository at this point in the history
Bump black from 22.6.0 to 24.3.0
  • Loading branch information
juuso-j authored Apr 16, 2024
2 parents 69282d5 + 7fc02c3 commit 09afd7e
Show file tree
Hide file tree
Showing 19 changed files with 63 additions and 37 deletions.
1 change: 1 addition & 0 deletions eco_counter/tests/test_import_counter_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
The main purpose of these tests are to verify that the importer
imports and calculates the data correctly.
"""

import calendar
from datetime import datetime, timedelta
from io import StringIO
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,9 +45,9 @@ def get_dataframe(stations, from_year=START_YEAR, from_month=1, initial_import=F
if not initial_import and from_year == current_date_time.year:
params["startTime"] = f"{from_year}-{from_month}-01T00:00Z"
else:
params[
"startTime"
] = f"{start_date_time.year}-{start_date_time.month}-01T00:00Z"
params["startTime"] = (
f"{start_date_time.year}-{start_date_time.month}-01T00:00Z"
)
if current_date_time - relativedelta(months=1) < start_date_time:
params["endTime"] = current_date_time.strftime(TIME_FORMAT)
else:
Expand All @@ -56,9 +56,9 @@ def get_dataframe(stations, from_year=START_YEAR, from_month=1, initial_import=F
+ relativedelta(months=1)
- relativedelta(hours=1)
)
params[
"endTime"
] = f"{tmp_time.year}-{tmp_time.month}-{tmp_time.day}T23:00Z"
params["endTime"] = (
f"{tmp_time.year}-{tmp_time.month}-{tmp_time.day}T23:00Z"
)

response = REQUEST_SESSION.get(DATA_URL, params=params)
logger.info(f"Requested data from: {response.url}")
Expand Down
1 change: 1 addition & 0 deletions mobility_data/importers/bicycle_stands.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
Note, bicycle stands are not imorter via the wfs importer
as it needs logic to derive if the stand is hull lockable or covered.
"""

import logging
import os

Expand Down
6 changes: 3 additions & 3 deletions mobility_data/importers/bike_service_stations.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,9 +50,9 @@ def __init__(self, feature):
# If no swedish address, retrieve it from the database.
if language == "sv":
street_name, number = addresses[0].split(" ")
self.address[
language
] = f"{get_street_name_translations(street_name, municipality)[language]} number"
self.address[language] = (
f"{get_street_name_translations(street_name, municipality)[language]} number"
)
# Source data does not contain English addresses, assign the Finnsh
else:
self.address[language] = addresses[0]
Expand Down
1 change: 1 addition & 0 deletions mobility_data/importers/marinas.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
Note, wfs importer is not used as the berths data is
separately assigned to the marina mobile units.
"""

import logging

from django.conf import settings
Expand Down
6 changes: 3 additions & 3 deletions mobility_data/importers/share_car_parking_places.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,9 @@ def __init__(self, feature):
street_name["en"] = street_name["fi"]
self.extra[self.RESTRICTION_FIELD] = {}
for i, language in enumerate(LANGUAGES):
self.name[
language
] = f"{self.CAR_PARKING_NAME[language]}, {street_name[language]}"
self.name[language] = (
f"{self.CAR_PARKING_NAME[language]}, {street_name[language]}"
)
self.address[language] = street_name[language]
self.extra[self.RESTRICTION_FIELD][language] = restrictions[i].strip()

Expand Down
1 change: 1 addition & 0 deletions mobility_data/management/commands/import_mobility_data.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Imports all mobility data sources.
"""

import logging

from django.core import management
Expand Down
1 change: 1 addition & 0 deletions mobility_data/tests/test_import_accessories.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
has been removed from the test input data, as it causes GDAL
DataSource to fail when loading data.
"""

from unittest.mock import patch

import pytest
Expand Down
1 change: 1 addition & 0 deletions mobility_data/tests/test_import_payment_zones.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
has been removed from the test input data, as it causes GDAL
DataSource to fail when loading data.
"""

from unittest.mock import patch

import pytest
Expand Down
1 change: 1 addition & 0 deletions mobility_data/tests/test_import_speed_limits.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
has been removed from the test input data, as it causes GDAL
DataSource to fail when loading data.
"""

import pytest
from django.conf import settings

Expand Down
14 changes: 13 additions & 1 deletion requirements-dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ asttokens==2.0.5
# via stack-data
backcall==0.2.0
# via ipython
black==22.6.0
black==24.3.0
# via
# -c requirements.txt
# ipython
Expand All @@ -32,6 +32,10 @@ mypy-extensions==0.4.3
# via
# -c requirements.txt
# black
packaging==24.0
# via
# -c requirements.txt
# black
parso==0.8.2
# via
# -c requirements.txt
Expand Down Expand Up @@ -64,10 +68,18 @@ six==1.16.0
# asttokens
stack-data==0.2.0
# via ipython
tomli==1.2.1
# via
# -c requirements.txt
# black
traitlets==5.1.0
# via
# ipython
# matplotlib-inline
typing-extensions==4.11.0
# via
# -c requirements.txt
# black
wcwidth==0.2.5
# via
# -c requirements.txt
Expand Down
12 changes: 7 additions & 5 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ attrs==21.2.0
# requests-cache
billiard==3.6.4.0
# via celery
black==22.6.0
black==24.3.0
# via -r requirements.in
cattrs==1.8.0
# via requests-cache
Expand Down Expand Up @@ -138,8 +138,10 @@ numpy==1.23.0
# via
# -r requirements.in
# pandas
packaging==21.0
# via pytest
packaging==24.0
# via
# black
# pytest
pandas==2.0.1
# via -r requirements.in
parso==0.8.2
Expand Down Expand Up @@ -172,8 +174,6 @@ pyflakes==2.3.1
# via flake8
pykml==0.2.0
# via -r requirements.in
pyparsing==2.4.7
# via packaging
pyrsistent==0.19.3
# via jsonschema
pyshp==2.3.1
Expand Down Expand Up @@ -240,6 +240,8 @@ tomli==1.2.1
# pep517
tqdm==4.62.3
# via -r requirements.in
typing-extensions==4.11.0
# via black
tzdata==2022.1
# via
# django-celery-beat
Expand Down
12 changes: 6 additions & 6 deletions services/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,9 +134,9 @@ def to_internal_value(self, data):
value = obj[language] # "musiikkiklubit"
if language == settings.LANGUAGES[0][0]: # default language
extra_fields[field_name] = value # { "name": "musiikkiklubit" }
extra_fields[
"{}_{}".format(field_name, language)
] = value # { "name_fi": "musiikkiklubit" }
extra_fields["{}_{}".format(field_name, language)] = (
value # { "name_fi": "musiikkiklubit" }
)
del data[field_name] # delete original translated fields

# handle other than translated fields
Expand Down Expand Up @@ -733,9 +733,9 @@ def to_representation(self, obj):
if "accessibility_shortcoming_count" in getattr(
self, "keep_fields", ["accessibility_shortcoming_count"]
):
ret[
"accessibility_shortcoming_count"
] = shortcomings.accessibility_shortcoming_count
ret["accessibility_shortcoming_count"] = (
shortcomings.accessibility_shortcoming_count
)

if "request" not in self.context:
return ret
Expand Down
1 change: 1 addition & 0 deletions services/content_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
with either long field contents or a large amount of related
objects.
"""

from django.db.models import Case, Count, IntegerField, Sum, When
from django.db.models.functions import Length

Expand Down
7 changes: 4 additions & 3 deletions services/search/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
- The search_columns can be manually updated with the index_search_columns
and emptied with the empty_search_columns management script.
"""

import logging
import re
from itertools import chain
Expand Down Expand Up @@ -127,9 +128,9 @@ def to_representation(self, obj):
shortcomings = obj.accessibility_shortcomings
except UnitAccessibilityShortcomings.DoesNotExist:
shortcomings = UnitAccessibilityShortcomings()
representation[
"accessibility_shortcoming_count"
] = shortcomings.accessibility_shortcoming_count
representation["accessibility_shortcoming_count"] = (
shortcomings.accessibility_shortcoming_count
)
representation["contract_type"] = UnitSerializer.get_contract_type(
self, obj
)
Expand Down
8 changes: 5 additions & 3 deletions services/utils/accessibility_shortcoming_calculator.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,9 +131,11 @@ def _calculate_shortcomings(self, rule, properties, messages, profile_id):
"{}: {} {}".format(
rule["id"],
rule["operator"],
"{}recorded".format("" if message_recorded else "not ")
if not is_ok
else "passed",
(
"{}recorded".format("" if message_recorded else "not ")
if not is_ok
else "passed"
),
)
)
return is_ok, message_recorded
Expand Down
2 changes: 1 addition & 1 deletion smbackend/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -223,7 +223,7 @@ def gettext(s):
991, # health stations
1097, # basic education
2125, # pre school education
869 # municipal day care
869, # municipal day care
# 25344, # recycling
# 25480, # public libraries
],
Expand Down
1 change: 1 addition & 0 deletions smbackend_turku/importers/divisions.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
and modified to fit the WFS server of Turku.
"""

import os
import re
from datetime import datetime
Expand Down
12 changes: 6 additions & 6 deletions smbackend_turku/importers/geo_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,9 +205,9 @@ def save_page(self, results, municipality):
for result in results:
postal_code = result["postal_code_area"]["postal_code"]
if postal_code not in self.postal_code_areas_cache:
self.postal_code_areas_cache[
postal_code
] = self.get_or_create_postal_code_area(postal_code, result)
self.postal_code_areas_cache[postal_code] = (
self.get_or_create_postal_code_area(postal_code, result)
)

(
street_name_fi,
Expand Down Expand Up @@ -353,9 +353,9 @@ def enrich_page(self, results, municipality):

postal_code = result["postal_code_area"]["postal_code"]
if postal_code not in self.postal_code_areas_cache:
self.postal_code_areas_cache[
postal_code
] = self.get_or_create_postal_code_area(postal_code, result)
self.postal_code_areas_cache[postal_code] = (
self.get_or_create_postal_code_area(postal_code, result)
)
# name_sv is not added as there might be a swedish translation
street_entry = {
"name": street_name_fi,
Expand Down

0 comments on commit 09afd7e

Please sign in to comment.