Skip to content

Commit

Permalink
Update to Atoti 0.9.0 (#288)
Browse files Browse the repository at this point in the history
  • Loading branch information
tibdex authored Sep 17, 2024
1 parent b774b7b commit 1e01161
Show file tree
Hide file tree
Showing 40 changed files with 1,056 additions and 1,229 deletions.
13 changes: 0 additions & 13 deletions .github/actions/setup/action.yaml

This file was deleted.

46 changes: 46 additions & 0 deletions .github/workflows/deploy.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
name: Deploy

on:
push:
branches:
- main

permissions:
id-token: write
contents: read

jobs:
deploy:
runs-on: ubuntu-latest
name: Deploy to AWS
environment: deploy-to-aws
steps:
- uses: actions/checkout@v4
- uses: aws-actions/configure-aws-credentials@v4
with:
aws-region: ${{ vars.AWS_REGION }}
role-to-assume: arn:aws:iam::${{ secrets.AWS_ACCOUNT_ID }}:role/${{ vars.AWS_DEPLOYMENT_ROLE }}
- uses: aws-actions/amazon-ecr-login@v2
id: log-into-ecr
- name: Build, tag, and push Docker image to Amazon ECR
id: build-tag-and-push-docker-image
env:
TAG: ${{ steps.log-into-ecr.outputs.registry }}/atoti-project-template:${{ github.sha }}
run: |
docker build --tag $TAG .
docker push $TAG
echo "tag=$TAG" >> $GITHUB_OUTPUT
- name: Inline variables in the task definition
run: sed -i -e 's/AWS_ACCOUNT_ID/${{ secrets.AWS_ACCOUNT_ID }}/g' -e 's/AWS_DATABASE_URL_SECRET_NAME/${{ vars.AWS_DATABASE_URL_SECRET_NAME }}/g' -e 's/AWS_EXECUTION_ROLE/${{ vars.AWS_EXECUTION_ROLE }}/g' -e 's/AWS_REGION/${{ vars.AWS_REGION }}/g' task-definition.json
- uses: aws-actions/amazon-ecs-render-task-definition@v1
id: render-task-definition
with:
container-name: atoti-session
image: ${{ steps.build-tag-and-push-docker-image.outputs.tag }}
task-definition: task-definition.json
- uses: aws-actions/amazon-ecs-deploy-task-definition@v2
with:
cluster: atoti-project-template
service: atoti-project-template
task-definition: ${{ steps.render-task-definition.outputs.task-definition }}
wait-for-service-stability: true
16 changes: 0 additions & 16 deletions .github/workflows/sync-branches.yml

This file was deleted.

16 changes: 10 additions & 6 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,13 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/setup
- run: poetry check --lock
- run: poetry run ruff format --check
- run: poetry run ruff check
- run: poetry run mypy
- run: poetry run pytest
- uses: astral-sh/setup-uv@v3
with:
enable-cache: true
version: "0.4.10"
- run: uv python install 3.10
- run: uv sync --locked
- run: uv run ruff format --check
- run: uv run ruff check
- run: uv run mypy
- run: uv run pytest
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
__pycache__/
.venv/
content/
5 changes: 4 additions & 1 deletion .vscode/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -16,5 +16,8 @@
"python.analysis.autoImportCompletions": true,
"python.languageServer": "Pylance",
"python.testing.pytestEnabled": true,
"python.testing.unittestEnabled": false
"python.testing.unittestEnabled": false,
"yaml.schemas": {
"https://json.schemastore.org/github-workflow.json": "file:///Users/tibdex/repositories/project-template/.github/workflows/deploy.yml"
}
}
2 changes: 1 addition & 1 deletion CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@

This project is not intended to become a showcase of all Atoti's features.
It should have just enough structure to show how to start a project on sound and modular foundations, not more.
https://docs.atoti.io is the right place to discover and learn what Atoti can do.
The right place to teach everything Atoti can do is https://docs.atoti.io.
28 changes: 17 additions & 11 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,22 +1,28 @@
# syntax=docker/dockerfile:1.2
# Inspired from https://github.com/astral-sh/uv-docker-example/blob/dee88a8c43be3b16b0ad58f0daee5eaee7e2157a/multistage.Dockerfile.

# `--platform=linux/am64` is required to build this image on macOS with Apple Silicon until https://github.com/activeviam/jdk4py/issues/73 is done.
FROM --platform=linux/amd64 python:3.9.18-slim AS builder
FROM ghcr.io/astral-sh/uv:0.4.10-python3.10-bookworm-slim AS builder

RUN pip install poetry==1.7.1
ENV UV_COMPILE_BYTECODE=1 UV_LINK_MODE=copy

COPY poetry.lock pyproject.toml ./
WORKDIR /venv

RUN POETRY_VIRTUALENVS_CREATE=false poetry install --no-cache --no-root --only main --sync
RUN --mount=type=cache,target=/root/.cache/uv \
--mount=type=bind,source=uv.lock,target=uv.lock \
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
uv sync --frozen --no-dev --no-install-project

FROM --platform=linux/amd64 python:3.9.18-slim AS runner
# Keep this synced with the builder image.
FROM python:3.10-slim-bookworm

ENV ATOTI_HIDE_EULA_MESSAGE=true
ENV PORT=80
COPY --from=builder /venv app

ENV PATH="/app/.venv/bin:$PATH"

COPY --from=builder /usr/local/lib/python3.9/site-packages /usr/local/lib/python3.9/site-packages
COPY app app

ENTRYPOINT ["python", "-u", "-m", "app"]
ENV ATOTI_HIDE_EULA_MESSAGE=true
ENV PORT=80

EXPOSE $PORT

CMD ["python", "-u", "-m", "app"]
18 changes: 8 additions & 10 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@ This template can be used to start Atoti projects where the goal is to [go into

On top of the `atoti` package, it comes with:

- Dependency management with [Poetry](https://python-poetry.org)
- Config management with [Pydantic Settings](https://docs.pydantic.dev/2.6/concepts/pydantic_settings)
- Dependency management with [uv](https://docs.astral.sh/uv)
- Config management with [Pydantic](https://docs.pydantic.dev/2.6/concepts/pydantic_settings)
- Testing with [pytest](https://docs.pytest.org)
- Type checking with [mypy](http://mypy-lang.org)
- Formatting and linting with [Ruff](https://docs.astral.sh/ruff)
Expand All @@ -15,26 +15,24 @@ On top of the `atoti` package, it comes with:

### Installation

- [Install `poetry`](https://python-poetry.org/docs/#installation)
- [Install `uv`](https://docs.astral.sh/uv/getting-started/installation)
- Install the dependencies:

```bash
poetry install
uv sync
```

### Commands

To start the app:

```bash
poetry run python -m main
uv run python -m app
```

Other useful commands can be found in [`test.yml`](.github/workflows/test.yml).

## Variants
## Deployment

This repository has the following long-lived branches showcasing different aspects:

- [`deploy-to-aws`](https://github.com/atoti/project-template/tree/deploy-to-aws) for deploying on AWS ECS.
- [`deploy-to-heroku`](https://github.com/atoti/project-template/tree/deploy-to-heroku) for a one-click deploy to Heroku.
This repository automatically deploys to [AWS ECS](https://aws.amazon.com/ecs/).
To deploy somewhere else, delete [`task-definition.json`][task-definition.json] and adapt [`deploy.yml`](.github/workflows/deploy.yml).
8 changes: 3 additions & 5 deletions app/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
from __future__ import annotations

from .config import *
from .constants import *
from .start_app import *
from .config import Config as Config
from .constants import * # noqa: F403
from .start_app import start_app as start_app
5 changes: 3 additions & 2 deletions app/__main__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
from __future__ import annotations
from urllib.parse import urlparse

from . import Config, start_app

with start_app(config=Config()) as session:
print(f"Session listening on port {session.port}") # noqa: T201
port = urlparse(session.url) or 80
print(f"Session listening on port {port}") # noqa: T201
session.wait()
25 changes: 9 additions & 16 deletions app/config.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,6 @@
# ruff: noqa: UP007
# Pydantic evaluates type annotations at runtime which does not support `|`.

from __future__ import annotations

from datetime import timedelta
from pathlib import Path
from typing import Annotated, Optional, Union
from typing import Annotated

from pydantic import (
AliasChoices,
Expand All @@ -19,7 +14,7 @@
)
from pydantic_settings import BaseSettings, SettingsConfigDict

from .util import normalize_postgres_dsn_for_atoti_sql
from .util import normalize_postgres_dsn_for_atoti_jdbc


class Config(BaseSettings):
Expand All @@ -30,27 +25,25 @@ class Config(BaseSettings):

model_config = SettingsConfigDict(frozen=True)

data_refresh_period: Optional[timedelta] = timedelta(minutes=1)
data_refresh_period: timedelta | None = timedelta(minutes=1)

# The $PORT environment variable is used by most PaaS to indicate the port the app server should bind to.
port: int = 9090

requests_timeout: timedelta = timedelta(seconds=30)

reverse_geocoding_path: Union[HttpUrl, FilePath] = TypeAdapter(
HttpUrl
).validate_python("https://api-adresse.data.gouv.fr/reverse/csv/")
reverse_geocoding_path: HttpUrl | FilePath = TypeAdapter(HttpUrl).validate_python(
"https://api-adresse.data.gouv.fr/reverse/csv/"
)

user_content_storage: Annotated[
Optional[Union[PostgresDsn, Path]],
PostgresDsn | Path | None,
Field(
# $DATABASE_URL is used by some PaaS such to designate the URL of the app's primary database.
# For instance: https://devcenter.heroku.com/articles/heroku-postgresql#designating-a-primary-database.
validation_alias=AliasChoices("user_content_storage", "database_url")
),
] = Path("content")

velib_data_base_path: Union[HttpUrl, DirectoryPath] = TypeAdapter(
velib_data_base_path: HttpUrl | DirectoryPath = TypeAdapter(
HttpUrl
).validate_python(
"https://velib-metropole-opendata.smovengo.cloud/opendata/Velib_Metropole"
Expand All @@ -61,6 +54,6 @@ class Config(BaseSettings):
def normalize_postgres_dsn(cls, value: object) -> object:
try:
postgres_dsn: PostgresDsn = TypeAdapter(PostgresDsn).validate_python(value)
return normalize_postgres_dsn_for_atoti_sql(postgres_dsn)
return normalize_postgres_dsn_for_atoti_jdbc(postgres_dsn)
except ValueError:
return value
2 changes: 0 additions & 2 deletions app/constants.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
from __future__ import annotations

from enum import Enum


Expand Down
2 changes: 0 additions & 2 deletions app/create_and_join_tables.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
from __future__ import annotations

import atoti as tt

from .constants import StationDetailsTableColumn, StationStatusTableColumn, Table
Expand Down
23 changes: 9 additions & 14 deletions app/create_cubes.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
from __future__ import annotations

import atoti as tt

from .constants import (
Expand Down Expand Up @@ -57,19 +55,16 @@ def create_station_cube(session: tt.Session, /) -> None:
}
)

m.update(
{
StationCubeMeasure.BIKES.value: tt.agg.sum(
station_status_table[StationStatusTableColumn.BIKES.value]
),
StationCubeMeasure.CAPACITY.value: tt.agg.sum(
tt.agg.single_value(
station_details_table[StationDetailsTableColumn.CAPACITY.value]
),
scope=tt.OriginScope(l[StationCubeStationLevel.ID.value]),
with session.data_model_transaction():
m[StationCubeMeasure.BIKES.value] = tt.agg.sum(
station_status_table[StationStatusTableColumn.BIKES.value]
)
m[StationCubeMeasure.CAPACITY.value] = tt.agg.sum(
tt.agg.single_value(
station_details_table[StationDetailsTableColumn.CAPACITY.value]
),
}
)
scope=tt.OriginScope({l[StationCubeStationLevel.ID.value]}),
)


def create_cubes(session: tt.Session, /) -> None:
Expand Down
23 changes: 5 additions & 18 deletions app/load_tables.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,4 @@
from __future__ import annotations

from collections.abc import Iterable, Mapping
from datetime import timedelta
from pathlib import Path
from typing import Any, cast

Expand All @@ -17,14 +14,11 @@
def read_station_details(
*,
reverse_geocoding_path: HttpUrl | Path,
timeout: timedelta,
velib_data_base_path: HttpUrl | Path,
) -> pd.DataFrame:
stations_data: Any = cast(
Any,
read_json(
velib_data_base_path, Path("station_information.json"), timeout=timeout
),
read_json(velib_data_base_path, Path("station_information.json")),
)["data"]["stations"]
station_information_df = pd.DataFrame(stations_data)[
["station_id", "name", "capacity", "lat", "lon"]
Expand All @@ -48,7 +42,7 @@ def read_station_details(
)

reverse_geocoded_df = reverse_geocode(
coordinates, reverse_geocoding_path=reverse_geocoding_path, timeout=timeout
coordinates, reverse_geocoding_path=reverse_geocoding_path
).rename(
columns={
"department": StationDetailsTableColumn.DEPARTMENT.value,
Expand All @@ -64,15 +58,10 @@ def read_station_details(
).drop(columns=coordinates_column_names)


def read_station_status(
velib_data_base_path: HttpUrl | Path,
/,
*,
timeout: timedelta,
) -> pd.DataFrame:
def read_station_status(velib_data_base_path: HttpUrl | Path, /) -> pd.DataFrame:
stations_data = cast(
Any,
read_json(velib_data_base_path, Path("station_status.json"), timeout=timeout),
read_json(velib_data_base_path, Path("station_status.json")),
)["data"]["stations"]
station_statuses: list[Mapping[str, Any]] = []
for station_status in stations_data:
Expand All @@ -97,14 +86,12 @@ def read_station_status(
def load_tables(session: tt.Session, /, *, config: Config) -> None:
station_details_df = read_station_details(
reverse_geocoding_path=config.reverse_geocoding_path,
timeout=config.requests_timeout,
velib_data_base_path=config.velib_data_base_path,
)
station_status_df = read_station_status(
config.velib_data_base_path,
timeout=config.requests_timeout,
)

with session.start_transaction():
with session.tables.data_transaction():
session.tables[Table.STATION_DETAILS.value].load_pandas(station_details_df)
session.tables[Table.STATION_STATUS.value].load_pandas(station_status_df)
Loading

0 comments on commit 1e01161

Please sign in to comment.