Skip to content

Commit

Permalink
Upgrade to pydantic v2 (GSI 338) (#68)
Browse files Browse the repository at this point in the history
* Update template files

* Change imports of BaseSettings

* Update ModSettings and yaml_settings_actory

Remove settings param from yaml_settings_factory

Convert Config class in ModSettings to model_config + method

* Remove unused ignores

* Actually add pydantic_settings to reqs

* Update template files

* Silence mypy

* Update minor documentation

Change pydantic.BaseSettings -> pydantic_settings.BaseSettings

* Tweak doc for config_from_yaml

* Use ConfigDict/SettingsConfigDict

* Use model_json_schema instead of deprecated schema

* Update other uses of .json, .copy, .schema

* Bump version to 0.11.0

* Fix typos and raise error on empty file path

---------

Co-authored-by: TheByronHimes <TheByronHimes@gmail.com>
  • Loading branch information
TheByronHimes and TheByronHimes authored Oct 18, 2023
1 parent df04920 commit e1b80fd
Show file tree
Hide file tree
Showing 30 changed files with 1,218 additions and 714 deletions.
5 changes: 5 additions & 0 deletions .coveragerc
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
[paths]
source =
src
/workspace/src
**/lib/python*/site-packages
1 change: 1 addition & 0 deletions .deprecated_files
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@

.github/workflows/check_mandatory_and_static_files.yaml
.github/workflows/dev_cd.yaml
.github/workflows/unit_and_int_tests.yaml

scripts/check_mandatory_and_static_files.py
scripts/update_static_files.py
Expand Down
Original file line number Diff line number Diff line change
@@ -1,24 +1,26 @@
name: Unit and Integration Tests
name: Tests

on: push

jobs:
unit_and_int_tests:
tests:
runs-on: ubuntu-latest
name: Unit and Integration Tests
name: Tests

steps:
- uses: actions/checkout@v3

- id: common
uses: ghga-de/gh-action-common@v3

- name: Run pytest
- id: pytest
run: |
export ${{ steps.common.outputs.CONFIG_YAML_ENV_VAR_NAME }}="${{ steps.common.outputs.CONFIG_YAML }}"
pytest \
--cov="${{ steps.common.outputs.MAIN_SRC_DIR }}" \
--cov-report=xml
--cov="${{ steps.common.outputs.PACKAGE_NAME }}" \
--cov-report=xml \
tests
- id: coveralls
name: Upload coverage to coveralls
Expand Down
3 changes: 2 additions & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ repos:
name: "ensure hooks are up to date"
language: python
additional_dependencies:
- "packaging"
- "typer"
fail_fast: true
always_run: true
Expand Down Expand Up @@ -50,7 +51,7 @@ repos:
- id: debug-statements
- id: debug-statements
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.0.291
rev: v0.0.292
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
Expand Down
2 changes: 1 addition & 1 deletion .ruff.toml
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ max-complexity = 10

[per-file-ignores]
"scripts/*" = ["PL", "S", "SIM", "D"]
"tests/*" = ["S", "SIM", "PLR"]
"tests/*" = ["S", "SIM", "PLR", "B011"]
".devcontainer/*" = ["S", "SIM", "D"]
"examples/*" = ["S", "D"]
"__init__.py" = ["D"]
Expand Down
4 changes: 3 additions & 1 deletion .static_files
Original file line number Diff line number Diff line change
Expand Up @@ -25,18 +25,20 @@ scripts/update_openapi_docs.py
scripts/update_readme.py
scripts/update_lock.py
scripts/update_hook_revs.py
scripts/list_outdated_dependencies.py
scripts/README.md

.github/workflows/check_config_docs.yaml
.github/workflows/check_template_files.yaml
.github/workflows/static_code_analysis.yaml
.github/workflows/unit_and_int_tests.yaml
.github/workflows/tests.yaml
.github/workflows/check_openapi_spec.yaml
.github/workflows/check_readme.yaml
.github/workflows/cd.yaml

example_data/README.md

.coveragerc
.editorconfig
.gitattributes
.gitignore
Expand Down
2 changes: 1 addition & 1 deletion examples/stream_calc/stream_calc/translators/eventpub.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

"""Translators that target the event publishing protocol."""

from pydantic import BaseSettings
from pydantic_settings import BaseSettings

from hexkit.custom_types import JsonObject
from hexkit.protocols.eventpub import EventPublisherProtocol
Expand Down
2 changes: 1 addition & 1 deletion examples/stream_calc/stream_calc/translators/eventsub.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

"""Translators that target the event publishing protocol."""

from pydantic import BaseSettings
from pydantic_settings import BaseSettings

from hexkit.custom_types import Ascii, JsonObject
from hexkit.protocols.eventsub import EventSubscriberProtocol
Expand Down
5 changes: 3 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"

[project]
name = "hexkit"
version = "0.10.3"
version = "0.11.0"
description = "A Toolkit for Building Microservices using the Hexagonal Architecture"
readme = "README.md"
authors = [
Expand All @@ -24,7 +24,8 @@ classifiers = [
"Intended Audience :: Developers",
]
dependencies = [
"pydantic >1.10.6, <2",
"pydantic >=2, <3",
"pydantic_settings >=2, <3",
"PyYAML >=6.0, <7",
"dependency-injector >=4.41.0, <5",
]
Expand Down
714 changes: 410 additions & 304 deletions requirements-dev.txt

Large diffs are not rendered by default.

567 changes: 340 additions & 227 deletions requirements.txt

Large diffs are not rendered by default.

9 changes: 4 additions & 5 deletions scripts/license_checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.

# pylint: skip-file

"""This script checks that the license and license headers
exists and that they are up to date.
exist and that they are up to date.
"""

import argparse
Expand Down Expand Up @@ -48,6 +46,7 @@
"sdist",
"wheels",
"pip-wheel-metadata",
".coveragerc",
".git",
".github",
".flake8",
Expand Down Expand Up @@ -237,7 +236,7 @@ def normalized_text(text: str, chars_to_trim: list[str] = COMMENT_CHARS) -> str:


def format_copyright_template(copyright_template: str, author: str) -> str:
"""Formats license header by inserting the specified author for every occurence of
"""Formats license header by inserting the specified author for every occurrence of
"{author}" in the header template.
"""
return normalized_text(copyright_template.replace("{author}", author))
Expand Down Expand Up @@ -330,7 +329,7 @@ def check_copyright_notice(
author (str, optional):
The author that shall be included in the license header.
It will replace any appearance of "{author}" in the license
header. This defaults to an auther info for GHGA.
header. This defaults to an author info for GHGA.
"""
# If the global_copyright is already set, check if the current copyright is
Expand Down
210 changes: 210 additions & 0 deletions scripts/list_outdated_dependencies.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,210 @@
#!/usr/bin/env python3

# Copyright 2021 - 2023 Universität Tübingen, DKFZ, EMBL, and Universität zu Köln
# for the German Human Genome-Phenome Archive (GHGA)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Check capped dependencies for newer versions."""
import sys
from collections.abc import Sequence
from pathlib import Path
from typing import Any, NamedTuple

import httpx
from packaging.requirements import Requirement

from script_utils import cli, deps, lock_deps

REPO_ROOT_DIR = Path(__file__).parent.parent.resolve()
PYPROJECT_TOML_PATH = REPO_ROOT_DIR / "pyproject.toml"
DEV_DEPS_PATH = REPO_ROOT_DIR / "requirements-dev.in"
LOCK_FILE_PATH = REPO_ROOT_DIR / "requirements-dev.txt"


class OutdatedDep(NamedTuple):
"""Encapsulates data of an outdated dependency"""

name: str
specified_version: str
pypi_version: str


def get_main_deps_pyproject(modified_pyproject: dict[str, Any]) -> list[Requirement]:
"""Get a list of the dependencies from pyproject.toml"""

dependencies: list[str] = []
if "dependencies" in modified_pyproject["project"]:
dependencies = modified_pyproject["project"]["dependencies"]

return [Requirement(dependency) for dependency in dependencies]


def get_optional_deps_pyproject(
modified_pyproject: dict[str, Any]
) -> list[Requirement]:
"""Get a list of the optional dependencies from pyproject.toml"""

dependencies: list[str] = []

if "optional-dependencies" in modified_pyproject["project"]:
for optional_dependency_list in modified_pyproject["project"][
"optional-dependencies"
]:
dependencies.extend(
modified_pyproject["project"]["optional-dependencies"][
optional_dependency_list
]
)

return [Requirement(dependency) for dependency in dependencies]


def get_deps_dev() -> list[Requirement]:
"""Get a list of raw dependency strings from requirements-dev.in"""
with open(DEV_DEPS_PATH, encoding="utf-8") as dev_deps:
dependencies = [
line
for line in (line.strip() for line in dev_deps)
if line # skip empty lines
and not line.startswith("#") # skip comments
and "requirements-dev-common.in" not in line # skip inclusion line
]

return [Requirement(dependency) for dependency in dependencies]


def get_version_from_pypi(package_name: str, client: httpx.Client) -> str:
"""Make a call to PyPI to get the version information about `package_name`."""
try:
response = client.get(f"https://pypi.org/pypi/{package_name}/json")
body = response.json()
version = body["info"]["version"]
except (httpx.RequestError, KeyError):
cli.echo_failure(f"Unable to retrieve information for package '{package_name}'")
sys.exit(1)

return version


def get_outdated_deps(
requirements: list[Requirement], strip: bool = False
) -> list[OutdatedDep]:
"""Determine which packages have updates available outside of pinned ranges."""
outdated: list[OutdatedDep] = []
with httpx.Client(timeout=10) as client:
for requirement in requirements:
pypi_version = get_version_from_pypi(requirement.name, client)

specified = str(requirement.specifier)

# Strip the specifier symbols from the front of the string if desired
if strip:
specified = specified.lstrip("<=>!~")

# append package name, specified version, and latest available version
if not requirement.specifier.contains(pypi_version):
outdated.append(OutdatedDep(requirement.name, specified, pypi_version))
outdated.sort()
return outdated


def print_table(
rows: Sequence[tuple[str, ...]],
headers: tuple[str, ...],
delimiter: str = " | ",
):
"""
List outdated dependencies in a formatted table.
Args:
`outdated`: A sequence of tuples containing strings.
`headers`: A tuple containing the header strings for the table columns.
"""
if rows and len(rows[0]) != len(headers):
raise RuntimeError("Number of headers doesn't match number of columns")

header_lengths = [len(header) for header in headers]

# Find the maximum length of each column
col_widths = [max(len(str(cell)) for cell in col) for col in zip(*rows)]

# Create a row format based on the maximum column widths
row_format = delimiter.join(
f"{{:<{max(width, header_len)}}}"
for width, header_len in zip(col_widths, header_lengths)
)

print(" " + row_format.format(*headers))
for dependency in rows:
print(" " + row_format.format(*dependency))


def main(transitive: bool = False):
"""Check capped dependencies for newer versions.
Examine `pyproject.toml` and `requirements-dev.in` for capped dependencies.
Make a call to PyPI to see if any newer versions exist.
Use `transitive` to show outdated transitive dependencies.
"""
modified_pyproject: dict[str, Any] = deps.get_modified_pyproject(
PYPROJECT_TOML_PATH
)
main_dependencies = get_main_deps_pyproject(modified_pyproject)
optional_dependencies = get_optional_deps_pyproject(modified_pyproject)
dev_dependencies = get_deps_dev()

outdated_main = get_outdated_deps(main_dependencies)
outdated_optional = get_outdated_deps(optional_dependencies)
outdated_dev = get_outdated_deps(dev_dependencies)

found_outdated = any([outdated_main, outdated_optional, outdated_dev])
transitive_headers = ("PACKAGE", "SPECIFIED", "AVAILABLE")
if outdated_main:
location = PYPROJECT_TOML_PATH.name + " - dependencies"
cli.echo_failure(f"Outdated dependencies from {location}:")
print_table(outdated_main, transitive_headers)
if outdated_optional:
location = PYPROJECT_TOML_PATH.name + " - optional-dependencies"
cli.echo_failure(f"Outdated dependencies from {location}:")
print_table(outdated_optional, transitive_headers)
if outdated_dev:
cli.echo_failure(f"Outdated dependencies from {DEV_DEPS_PATH.name}:")
print_table(outdated_dev, transitive_headers)

if not found_outdated:
cli.echo_success("All top-level dependencies up to date.")

if transitive:
top_level: set[str] = {
item.name for item in outdated_main + outdated_optional + outdated_dev
}

print("\nRetrieving transitive dependency information...")
transitive_dependencies = lock_deps.get_lock_file_deps(
LOCK_FILE_PATH, exclude=top_level
)
outdated_transitive = get_outdated_deps(transitive_dependencies, strip=True)

if outdated_transitive:
transitive_headers = ("PACKAGE", "PINNED", "AVAILABLE")

cli.echo_failure("Outdated transitive dependencies:")
print_table(outdated_transitive, transitive_headers)
else:
cli.echo_success("All transitive dependencies up to date.")


if __name__ == "__main__":
cli.run(main)
Loading

0 comments on commit e1b80fd

Please sign in to comment.