Skip to content

Commit

Permalink
Merge pull request #216 from atoti/sync-deploy-to-aws-with-main
Browse files Browse the repository at this point in the history
  • Loading branch information
tibdex authored Jul 13, 2023
2 parents ec11886 + 5d2e5fc commit 23b12f6
Show file tree
Hide file tree
Showing 8 changed files with 471 additions and 314 deletions.
43 changes: 22 additions & 21 deletions app/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,58 +8,59 @@
from typing import Annotated, Optional, Union

from pydantic import (
BaseSettings,
AliasChoices,
DirectoryPath,
Field,
FilePath,
HttpUrl,
PostgresDsn,
parse_obj_as,
validator,
TypeAdapter,
field_validator,
)
from pydantic_settings import BaseSettings, SettingsConfigDict

from .util import normalize_postgres_dsn_for_atoti_sql


class Config(BaseSettings):
"""Hold all the configuration properties of the app, not only the ones related to atoti.
"""Hold all the configuration properties of the app, not only the ones related to Atoti.
See https://pydantic-docs.helpmanual.io/usage/settings/.
"""

model_config = SettingsConfigDict(frozen=True)

data_refresh_period: Optional[timedelta] = timedelta(minutes=1)

# The $PORT environment variable is used by most PaaS to indicate the port the app server should bind to.
port: int = 9090

requests_timeout: timedelta = timedelta(seconds=30)

reverse_geocoding_path: Union[HttpUrl, FilePath] = parse_obj_as(
HttpUrl, "https://api-adresse.data.gouv.fr/reverse/csv/"
)
reverse_geocoding_path: Union[HttpUrl, FilePath] = TypeAdapter(
HttpUrl
).validate_python("https://api-adresse.data.gouv.fr/reverse/csv/")

user_content_storage: Annotated[
Optional[Union[PostgresDsn, Path]],
Field(
# $DATABASE_URL is used by some PaaS such to designate the URL of the app's primary database.
# For instance: https://devcenter.heroku.com/articles/heroku-postgresql#designating-a-primary-database.
env="database_url",
validation_alias=AliasChoices("user_content_storage", "database_url")
),
] = Path("content")

velib_data_base_path: Union[HttpUrl, DirectoryPath] = parse_obj_as(
HttpUrl,
"https://velib-metropole-opendata.smoove.pro/opendata/Velib_Metropole",
velib_data_base_path: Union[HttpUrl, DirectoryPath] = TypeAdapter(
HttpUrl
).validate_python(
"https://velib-metropole-opendata.smoove.pro/opendata/Velib_Metropole"
)

@validator("user_content_storage")
@field_validator("user_content_storage")
@classmethod
def normalize_postgresql_dsn(cls, value: PostgresDsn | object) -> object:
return (
normalize_postgres_dsn_for_atoti_sql(value)
if isinstance(value, PostgresDsn)
else value
)

class Config:
allow_mutation = False
def normalize_postgres_dsn(cls, value: object) -> object:
try:
postgres_dsn: PostgresDsn = TypeAdapter(PostgresDsn).validate_python(value)
return normalize_postgres_dsn_for_atoti_sql(postgres_dsn)
except ValueError:
return value
18 changes: 11 additions & 7 deletions app/start_session.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
from __future__ import annotations

import sys
from pathlib import Path

import atoti as tt
from pydantic import AnyUrl

from .config import Config
from .create_and_join_tables import create_and_join_tables
Expand All @@ -12,15 +12,19 @@


def create_session(*, config: Config) -> tt.Session:
user_content_storage: Path | tt.UserContentStorageConfig | None = None

if config.user_content_storage is not None:
user_content_storage = (
config.user_content_storage
if isinstance(config.user_content_storage, Path)
else tt.UserContentStorageConfig(url=str(config.user_content_storage))
)

return tt.Session(
logging=tt.LoggingConfig(destination=sys.stdout),
port=config.port,
user_content_storage=config.user_content_storage
and (
tt.UserContentStorageConfig(url=str(config.user_content_storage))
if isinstance(config.user_content_storage, AnyUrl)
else config.user_content_storage
),
user_content_storage=user_content_storage,
)


Expand Down
25 changes: 12 additions & 13 deletions app/util/normalize_postgres_dsn_for_atoti_sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,13 @@

from urllib.parse import urlencode, urlparse

from pydantic import PostgresDsn
from pydantic import PostgresDsn, TypeAdapter


def normalize_postgres_dsn_for_atoti_sql(url: PostgresDsn, /) -> object:
parts = urlparse(url)
def normalize_postgres_dsn_for_atoti_sql(url: PostgresDsn, /) -> PostgresDsn:
parts = urlparse(str(url))

parts = parts._replace(scheme="postgresql")

query_parts: list[str] = []

Expand All @@ -17,14 +19,11 @@ def normalize_postgres_dsn_for_atoti_sql(url: PostgresDsn, /) -> object:
query_parts.append(
urlencode({"user": parts.username, "password": parts.password})
)
# Remove username and password.
parts = parts._replace(netloc=parts.netloc.split("@", maxsplit=1).pop())

if query_parts:
parts = parts._replace(query="&".join(query_parts))

return PostgresDsn(
# This is how Pydantic creates an instance from parts.
None,
scheme="postgresql",
host=str(parts.hostname),
port=str(parts.port) if parts.port else None,
path=parts.path,
query="&".join(query_parts) if query_parts else None,
fragment=parts.fragment,
)
new_url = parts.geturl()
return TypeAdapter(PostgresDsn).validate_python(new_url)
12 changes: 6 additions & 6 deletions app/util/read_json.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,10 @@
def read_json(
base_path: HttpUrl | Path, file_path: Path, /, *, timeout: timedelta
) -> object:
if isinstance(base_path, HttpUrl):
url = f"{base_path}/{file_path.as_posix()}"
response = requests.get(url, timeout=timeout.total_seconds())
response.raise_for_status()
return response.json()
if isinstance(base_path, Path):
return json.loads((base_path / file_path).read_bytes())

return json.loads((base_path / file_path).read_bytes())
url = f"{base_path}/{file_path.as_posix()}"
response = requests.get(url, timeout=timeout.total_seconds())
response.raise_for_status()
return response.json()
2 changes: 1 addition & 1 deletion app/util/reverse_geocode.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def _reverse_geocode(
coordinates_df.to_csv(file, index=False)
file.seek(0)
response = requests.post(
reverse_geocoding_path,
str(reverse_geocoding_path),
data=[
("result_columns", column_name) for column_name in _COLUMN_NAME_MAPPING
],
Expand Down
Loading

0 comments on commit 23b12f6

Please sign in to comment.