diff --git a/.github/workflows/pypi_publish.yaml b/.github/workflows/pypi_publish.yaml index 029e60f5..3ded6694 100644 --- a/.github/workflows/pypi_publish.yaml +++ b/.github/workflows/pypi_publish.yaml @@ -18,7 +18,7 @@ jobs: - name: Verify Package Version vs Tag Version run: | - PKG_VER="$(python setup.py -V)" + PKG_VER="$(grep -oP 'version = "\K[^"]+' pyproject.toml)" TAG_VER="${GITHUB_REF##*/}" echo "Package version is $PKG_VER" >&2 echo "Tag version is $TAG_VER" >&2 diff --git a/examples/stream_calc/sc_tests/integration/test_event_api.py b/examples/stream_calc/sc_tests/integration/test_event_api.py index 018b6ec1..217b457c 100644 --- a/examples/stream_calc/sc_tests/integration/test_event_api.py +++ b/examples/stream_calc/sc_tests/integration/test_event_api.py @@ -27,11 +27,11 @@ import pytest from kafka import KafkaConsumer, KafkaProducer -from stream_calc.config import Config -from stream_calc.main import main from testcontainers.kafka import KafkaContainer from hexkit.custom_types import JsonObject +from stream_calc.config import Config +from stream_calc.main import main DEFAULT_CONFIG = Config() diff --git a/examples/stream_calc/sc_tests/unit/test_calc.py b/examples/stream_calc/sc_tests/unit/test_calc.py index bca5016d..563ba6c8 100644 --- a/examples/stream_calc/sc_tests/unit/test_calc.py +++ b/examples/stream_calc/sc_tests/unit/test_calc.py @@ -20,6 +20,7 @@ from unittest.mock import AsyncMock import pytest + from stream_calc.core.calc import StreamCalculator diff --git a/examples/stream_calc/sc_tests/unit/test_eventpub.py b/examples/stream_calc/sc_tests/unit/test_eventpub.py index 5d00990e..f00de8bf 100644 --- a/examples/stream_calc/sc_tests/unit/test_eventpub.py +++ b/examples/stream_calc/sc_tests/unit/test_eventpub.py @@ -17,13 +17,13 @@ """Testing the `translators.eventpub` module.""" import pytest + +from hexkit.providers.testing.eventpub import InMemEventPublisher from stream_calc.translators.eventpub import ( EventResultEmitter, EventResultEmitterConfig, ) -from hexkit.providers.testing.eventpub import InMemEventPublisher - @pytest.mark.asyncio async def test_emit_result(): diff --git a/examples/stream_calc/sc_tests/unit/test_eventsub.py b/examples/stream_calc/sc_tests/unit/test_eventsub.py index e11713e5..b0dd9388 100644 --- a/examples/stream_calc/sc_tests/unit/test_eventsub.py +++ b/examples/stream_calc/sc_tests/unit/test_eventsub.py @@ -19,13 +19,13 @@ from unittest.mock import AsyncMock import pytest + +from hexkit.custom_types import JsonObject from stream_calc.translators.eventsub import ( EventProblemReceiver, EventProblemReceiverConfig, ) -from hexkit.custom_types import JsonObject - @pytest.mark.asyncio @pytest.mark.parametrize( diff --git a/examples/stream_calc/stream_calc/config.py b/examples/stream_calc/stream_calc/config.py index 5d275e09..a11790b3 100644 --- a/examples/stream_calc/stream_calc/config.py +++ b/examples/stream_calc/stream_calc/config.py @@ -16,11 +16,10 @@ """Config parameters.""" +from hexkit.providers.akafka import KafkaConfig from stream_calc.translators.eventpub import EventResultEmitterConfig from stream_calc.translators.eventsub import EventProblemReceiverConfig -from hexkit.providers.akafka import KafkaConfig - try: # workaround for https://github.com/pydantic/pydantic/issues/5821 from typing_extensions import Literal except ImportError: diff --git a/examples/stream_calc/stream_calc/container.py b/examples/stream_calc/stream_calc/container.py index 3dd6effe..1a57923b 100644 --- a/examples/stream_calc/stream_calc/container.py +++ b/examples/stream_calc/stream_calc/container.py @@ -22,14 +22,13 @@ """Module hosting the dependency injection container.""" # pylint: disable=wrong-import-order +from hexkit.inject import ContainerBase, get_configurator, get_constructor +from hexkit.providers.akafka import KafkaEventPublisher, KafkaEventSubscriber from stream_calc.config import Config from stream_calc.core.calc import StreamCalculator from stream_calc.translators.eventpub import EventResultEmitter from stream_calc.translators.eventsub import EventProblemReceiver -from hexkit.inject import ContainerBase, get_configurator, get_constructor -from hexkit.providers.akafka import KafkaEventPublisher, KafkaEventSubscriber - class Container(ContainerBase): """DI Container""" diff --git a/examples/stream_calc/stream_calc/translators/eventpub.py b/examples/stream_calc/stream_calc/translators/eventpub.py index 55c33aa2..45ce53dd 100644 --- a/examples/stream_calc/stream_calc/translators/eventpub.py +++ b/examples/stream_calc/stream_calc/translators/eventpub.py @@ -17,10 +17,10 @@ """Translators that target the event publishing protocol.""" from pydantic import BaseSettings -from stream_calc.ports.result_emitter import CalcResultEmitterPort from hexkit.custom_types import JsonObject from hexkit.protocols.eventpub import EventPublisherProtocol +from stream_calc.ports.result_emitter import CalcResultEmitterPort class EventResultEmitterConfig(BaseSettings): @@ -39,7 +39,7 @@ def __init__( self, *, config: EventResultEmitterConfig, - event_publisher: EventPublisherProtocol + event_publisher: EventPublisherProtocol, ) -> None: """Configure with provider for the the EventPublisherProto""" diff --git a/examples/stream_calc/stream_calc/translators/eventsub.py b/examples/stream_calc/stream_calc/translators/eventsub.py index 74b119d8..923dd44d 100644 --- a/examples/stream_calc/stream_calc/translators/eventsub.py +++ b/examples/stream_calc/stream_calc/translators/eventsub.py @@ -17,10 +17,10 @@ """Translators that target the event publishing protocol.""" from pydantic import BaseSettings -from stream_calc.ports.problem_receiver import ArithProblemHandlerPort from hexkit.custom_types import Ascii, JsonObject from hexkit.protocols.eventsub import EventSubscriberProtocol +from stream_calc.ports.problem_receiver import ArithProblemHandlerPort class EventProblemReceiverConfig(BaseSettings): diff --git a/requirements-dev.in b/requirements-dev.in index 4d2eec3c..1cb47f0c 100644 --- a/requirements-dev.in +++ b/requirements-dev.in @@ -3,5 +3,4 @@ # common requirements for development and testing -r requirements-dev-common.in -# additional requirements for testing hexkit providers -hexkit[test-all] +# additional requirements can be listed her diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index c2117de2..00000000 --- a/setup.cfg +++ /dev/null @@ -1,82 +0,0 @@ -# Copyright 2021 - 2023 Universität Tübingen, DKFZ, EMBL, and Universität zu Köln -# for the German Human Genome-Phenome Archive (GHGA) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -[metadata] -name = hexkit -version = attr: hexkit.__version__ -description = A Toolkit for Building Microservices using the Hexagonal Architecture -url = https://github.com/ghga-de/hexkit -long_description = file: README.md -long_description_content_type = text/markdown; charset=UTF-8 -author = German Human Genome Phenome Archive (GHGA) -author_email = contact@ghga.de -license = Apache 2.0 -classifiers = - Development Status :: 1 - Planning - Operating System :: POSIX :: Linux - Programming Language :: Python :: 3.9 - Programming Language :: Python :: 3.10 - Programming Language :: Python :: 3.11 - License :: OSI Approved :: Apache Software License - Topic :: Internet :: WWW/HTTP :: HTTP Servers - Topic :: Software Development :: Libraries - Intended Audience :: Developers - -[options] -zip_safe = False -include_package_data = True -packages = find: -install_requires = - pydantic==1.10.6 - PyYAML==6.0 - dependency-injector==4.41.0 -python_requires = >= 3.9 - -[options.package_data] -* = *.yaml, *.json - -[options.extras_require] -akafka = - aiokafka==0.8.0 - jsonschema>=4.17.3 -s3 = - boto3==1.26.50 - botocore==1.29.50 -mongodb = - motor==3.1.1 - -test-akafka = - %(akafka)s - testcontainers[kafka]==3.4.1 -test-s3 = - %(s3)s - testcontainers==3.4.1 -test-mongodb = - %(mongodb)s - testcontainers[mongo]==3.4.1 - -test-all = - %(test-akafka)s - %(test-s3)s - %(test-mongodb)s - -all = - %(akafka)s - %(s3)s - %(mongodb)s - %(test-all)s - -[options.packages.find] -exclude = tests diff --git a/src/hexkit/__init__.py b/src/hexkit/__init__.py index 675cc080..a179c1d9 100644 --- a/src/hexkit/__init__.py +++ b/src/hexkit/__init__.py @@ -15,4 +15,6 @@ """A Toolkit for Building Microservices using the Hexagonal Architecture""" -__version__ = "0.10.2" +from importlib.metadata import version + +__version__ = version(__package__) diff --git a/src/hexkit/config.py b/src/hexkit/config.py index 353cec80..9aec5f1f 100644 --- a/src/hexkit/config.py +++ b/src/hexkit/config.py @@ -17,7 +17,7 @@ import os from pathlib import Path -from typing import Any, Callable, Dict, Final, Optional +from typing import Any, Callable, Final, Optional import yaml from pydantic import BaseSettings @@ -155,9 +155,8 @@ def constructor_wrapper( # get default path if config_yaml not specified: if config_yaml is None: config_yaml = get_default_config_yaml(prefix) - else: - if not config_yaml.is_file(): - raise ConfigYamlDoesNotExist(path=config_yaml) + elif not config_yaml.is_file(): + raise ConfigYamlDoesNotExist(path=config_yaml) class ModSettings(settings): """Modifies the orginal Settings class provided by the user""" diff --git a/src/hexkit/protocols/dao.py b/src/hexkit/protocols/dao.py index 98fccb8e..edf9efa1 100644 --- a/src/hexkit/protocols/dao.py +++ b/src/hexkit/protocols/dao.py @@ -18,6 +18,8 @@ with the database. """ +# ruff: noqa: PLR0913 + import typing from abc import ABC, abstractmethod from collections.abc import AsyncGenerator, AsyncIterator, Collection, Mapping diff --git a/src/hexkit/protocols/objstorage.py b/src/hexkit/protocols/objstorage.py index 10050a5d..b17710aa 100644 --- a/src/hexkit/protocols/objstorage.py +++ b/src/hexkit/protocols/objstorage.py @@ -16,6 +16,7 @@ """Protocol for interacting with S3-like Object Storages.""" +# ruff: noqa: PLR0913 import re from abc import ABC, abstractmethod diff --git a/src/hexkit/providers/akafka/__init__.py b/src/hexkit/providers/akafka/__init__.py index f0fa5c00..216e92d9 100644 --- a/src/hexkit/providers/akafka/__init__.py +++ b/src/hexkit/providers/akafka/__init__.py @@ -21,7 +21,7 @@ from .provider import ( KafkaConfig, - KafkaEventPublisher, # noqa: F401 + KafkaEventPublisher, KafkaEventSubscriber, ) diff --git a/src/hexkit/providers/akafka/provider.py b/src/hexkit/providers/akafka/provider.py index e25b54fc..e3446f03 100644 --- a/src/hexkit/providers/akafka/provider.py +++ b/src/hexkit/providers/akafka/provider.py @@ -270,7 +270,7 @@ def get_event_type(event: ConsumerEvent) -> str: class KafkaConsumerCompatible(Protocol): """A python duck type protocol describing an AIOKafkaConsumer or equivalent.""" - def __init__( + def __init__( # noqa: PLR0913 self, *topics: Ascii, bootstrap_servers: str, diff --git a/src/hexkit/providers/mongodb/__init__.py b/src/hexkit/providers/mongodb/__init__.py index 624d1b21..6fe715c0 100644 --- a/src/hexkit/providers/mongodb/__init__.py +++ b/src/hexkit/providers/mongodb/__init__.py @@ -18,6 +18,6 @@ and associated utilities. """ -from .provider import MongoDbConfig, MongoDbDaoFactory # noqa: F401 +from .provider import MongoDbConfig, MongoDbDaoFactory __all__ = ["MongoDbConfig", "MongoDbDaoFactory"] diff --git a/src/hexkit/providers/mongodb/provider.py b/src/hexkit/providers/mongodb/provider.py index 19156106..3e748a3c 100644 --- a/src/hexkit/providers/mongodb/provider.py +++ b/src/hexkit/providers/mongodb/provider.py @@ -19,17 +19,16 @@ Utilities for testing are located in `./testutils.py`. """ +# ruff: noqa: PLR0913 + import json from abc import ABC from collections.abc import AsyncGenerator, AsyncIterator, Collection, Mapping from contextlib import AbstractAsyncContextManager from typing import Any, Generic, Optional, Union, overload -from motor.motor_asyncio import ( - AsyncIOMotorClient, - AsyncIOMotorClientSession, - AsyncIOMotorCollection, -) +from motor.core import AgnosticClientSession, AgnosticCollection +from motor.motor_asyncio import AsyncIOMotorClient from pydantic import BaseSettings, Field, SecretStr from hexkit.protocols.dao import ( @@ -59,8 +58,8 @@ def __init__( *, dto_model: type[Dto], id_field: str, - collection: AsyncIOMotorCollection, - session: Optional[AsyncIOMotorClientSession] = None, + collection: AgnosticCollection, + session: Optional[AgnosticClientSession] = None, ): """Initialize the DAO. @@ -260,9 +259,9 @@ def __init__( dto_model: type[Dto], dto_creation_model: type[DtoCreation_contra], id_field: str, - collection: AsyncIOMotorCollection, + collection: AgnosticCollection, id_generator: AsyncGenerator[str, None], - session: Optional[AsyncIOMotorClientSession] = None, + session: Optional[AgnosticClientSession] = None, ): """Initialize the DAO. @@ -408,7 +407,7 @@ def __init__( ) self._db = self._client[self._config.db_name] - def __repr__(self) -> str: + def __repr__(self) -> str: # noqa: D105 return f"{self.__class__.__qualname__}(config={repr(self._config)})" @overload diff --git a/src/hexkit/providers/s3/__init__.py b/src/hexkit/providers/s3/__init__.py index 137050ab..a01dc2e3 100644 --- a/src/hexkit/providers/s3/__init__.py +++ b/src/hexkit/providers/s3/__init__.py @@ -19,6 +19,6 @@ """ # shortcuts: -from hexkit.providers.s3.provider import S3Config, S3ObjectStorage # noqa: F401 +from hexkit.providers.s3.provider import S3Config, S3ObjectStorage __all__ = ["S3Config", "S3ObjectStorage"] diff --git a/src/hexkit/providers/s3/provider.py b/src/hexkit/providers/s3/provider.py index d10be847..07137770 100644 --- a/src/hexkit/providers/s3/provider.py +++ b/src/hexkit/providers/s3/provider.py @@ -19,6 +19,8 @@ Utilities for testing are located in `./testutils.py`. """ +# ruff: noqa: PLR0913 + import asyncio from functools import lru_cache from pathlib import Path @@ -172,7 +174,7 @@ def _format_s3_error_code(error_code: str): return f"S3 error with code: '{error_code}'" @classmethod - def _translate_s3_client_errors( # noqa: C901 + def _translate_s3_client_errors( cls, source_exception: botocore.exceptions.ClientError, *, @@ -202,27 +204,22 @@ def _translate_s3_client_errors( # noqa: C901 exception = cls.ObjectAlreadyExistsError( bucket_id=bucket_id, object_id=object_id ) - elif error_code == "ObjectAlreadyInActiveTierError": - exception = cls.ObjectAlreadyExistsError( - bucket_id=bucket_id, object_id=object_id - ) elif error_code == "NoSuchUpload": if upload_id is None or bucket_id is None or object_id is None: raise ValueError() exception = cls.MultiPartUploadNotFoundError( upload_id=upload_id, bucket_id=bucket_id, object_id=object_id ) + # exact match not found, match by keyword: + elif "Bucket" in error_code: + exception = cls.BucketError(cls._format_s3_error_code(error_code)) + elif "Object" in error_code or "Key" in error_code: + exception = cls.ObjectError(cls._format_s3_error_code(error_code)) else: - # exact match not found, match by keyword: - if "Bucket" in error_code: - exception = cls.BucketError(cls._format_s3_error_code(error_code)) - elif "Object" in error_code or "Key" in error_code: - exception = cls.ObjectError(cls._format_s3_error_code(error_code)) - else: - # if nothing matches, return a generic error: - exception = cls.ObjectStorageProtocolError( - cls._format_s3_error_code(error_code) - ) + # if nothing matches, return a generic error: + exception = cls.ObjectStorageProtocolError( + cls._format_s3_error_code(error_code) + ) return exception diff --git a/src/hexkit/providers/s3/testutils.py b/src/hexkit/providers/s3/testutils.py index 3148373e..eb931b6e 100644 --- a/src/hexkit/providers/s3/testutils.py +++ b/src/hexkit/providers/s3/testutils.py @@ -18,17 +18,20 @@ Please note, only use for testing purposes. """ + +# ruff: noqa: PLR0913 + import hashlib import os from collections.abc import Generator from contextlib import contextmanager from pathlib import Path from tempfile import NamedTemporaryFile -from typing import List, Optional +from typing import Optional import pytest import requests -from pydantic import BaseModel, validator +from pydantic import BaseModel, SecretStr, validator from testcontainers.localstack import LocalStackContainer from hexkit.custom_types import PytestScope @@ -49,7 +52,7 @@ def calc_md5(content: bytes) -> str: """Calc the md5 checksum for the specified bytes.""" - return hashlib.md5(content).hexdigest() # nosec + return hashlib.md5(content, usedforsecurity=False).hexdigest() # nosec class FileObject(BaseModel): @@ -63,14 +66,14 @@ class FileObject(BaseModel): # pylint: disable=no-self-argument @validator("content", always=True) - def read_content(cls, _, values): + def read_content(cls, _, values): # noqa: N805 """Read in the file content.""" with open(values["file_path"], "rb") as file: return file.read() # pylint: disable=no-self-argument @validator("md5", always=True) - def calc_md5_from_content(cls, _, values): + def calc_md5_from_content(cls, _, values): # noqa: N805 """Calculate md5 based on the content.""" return calc_md5(values["content"]) @@ -297,7 +300,7 @@ def download_and_check_test_file(presigned_url: str, expected_md5: str): observed_md5 = calc_md5(response.content) - assert ( # nosec + assert ( # noqa: S101 observed_md5 == expected_md5 ), "downloaded file has unexpected md5 checksum" @@ -329,10 +332,10 @@ async def populate_storage( def config_from_localstack_container(container: LocalStackContainer) -> S3Config: """Prepares a S3Config from an instance of a localstack test container.""" s3_endpoint_url = container.get_url() - return S3Config( # nosec + return S3Config( # type: ignore[call-arg] s3_endpoint_url=s3_endpoint_url, s3_access_key_id="test", - s3_secret_access_key="test", + s3_secret_access_key=SecretStr("test"), ) @@ -387,7 +390,7 @@ async def typical_workflow( await storage_client.create_bucket(bucket1_id) print(" - confirm bucket creation") - assert await storage_client.does_bucket_exist(bucket1_id) # nosec + assert await storage_client.does_bucket_exist(bucket1_id) # noqa: S101 if use_multipart_upload: await multipart_upload_file( @@ -407,7 +410,7 @@ async def typical_workflow( ) print(" - confirm object upload") - assert await storage_client.does_object_exist( # nosec + assert await storage_client.does_object_exist( # noqa: S101 bucket_id=bucket1_id, object_id=object_id ) @@ -430,10 +433,10 @@ async def typical_workflow( await storage_client.delete_object(bucket_id=bucket1_id, object_id=object_id) print(" - confirm move") - assert not await storage_client.does_object_exist( # nosec + assert not await storage_client.does_object_exist( # noqa: S101 bucket_id=bucket1_id, object_id=object_id ) - assert await storage_client.does_object_exist( # nosec + assert await storage_client.does_object_exist( # noqa: S101 bucket_id=bucket2_id, object_id=object_id ) @@ -441,7 +444,7 @@ async def typical_workflow( await storage_client.delete_bucket(bucket1_id) print(" - confirm bucket deletion") - assert not await storage_client.does_bucket_exist(bucket1_id) # nosec + assert not await storage_client.does_bucket_exist(bucket1_id) # noqa: S101 print(f" - download object from bucket {bucket2_id}") download_url2 = await storage_client.get_object_download_url( diff --git a/tests/fixtures/config.py b/tests/fixtures/config.py index 8ffb2e6c..3f6f237d 100644 --- a/tests/fixtures/config.py +++ b/tests/fixtures/config.py @@ -20,7 +20,7 @@ import re from dataclasses import dataclass from pathlib import Path -from typing import Any, Dict +from typing import Any from pydantic import BaseModel, BaseSettings @@ -37,7 +37,7 @@ class ConfigYamlFixture(BaseModel): """Container for config yaml fixtures""" path: Path - content: Dict[str, Any] + content: dict[str, Any] def read_config_yaml(name: str): @@ -61,14 +61,16 @@ class EnvVarFixture: """Container for env var set. This class can be used as context manager so that the env vars are available within a with block but, after leaving the with block, - the original enviroment is restored.""" + the original enviroment is restored. + """ - env_vars: Dict[str, str] + env_vars: dict[str, str] prefix: str = DEFAULT_CONFIG_PREFIX def __enter__(self): - """makes a backup of the environment and set the - env_vars""" + """Makes a backup of the environment and set the + env_vars + """ # pylint: disable=attribute-defined-outside-init self.env_backup = copy.deepcopy(os.environ) @@ -76,11 +78,12 @@ def __enter__(self): os.environ[f"{self.prefix}_{name}"] = value def __exit__(self, exc_type, exc_val, exc_tb): - """restores the original environment""" - os.environ = self.env_backup + """Restores the original environment""" + os.environ.clear() + os.environ = self.env_backup # noqa: B003 -def read_env_var_sets() -> Dict[str, EnvVarFixture]: +def read_env_var_sets() -> dict[str, EnvVarFixture]: """Read env vars sets and return a list of EnvVarFixtures.""" env_var_dict = utils.read_yaml(BASE_DIR / "config_env_var_sets.yaml") diff --git a/tests/fixtures/dummy_joint.py b/tests/fixtures/dummy_joint.py index 0a97b183..b3186da1 100644 --- a/tests/fixtures/dummy_joint.py +++ b/tests/fixtures/dummy_joint.py @@ -15,8 +15,8 @@ # """Simple joint fixture for testing the event loop fixture override's impact""" +from collections.abc import AsyncGenerator from dataclasses import dataclass -from typing import AsyncGenerator import pytest_asyncio @@ -35,4 +35,5 @@ class JointFixture: @pytest_asyncio.fixture(scope="module") async def joint_fixture(s3_fixture) -> AsyncGenerator[JointFixture, None]: + """Simple joint fixture only wrapping S3 fixture""" yield JointFixture(s3_fixture=s3_fixture) diff --git a/tests/fixtures/inject.py b/tests/fixtures/inject.py index 94482eed..25df86bf 100644 --- a/tests/fixtures/inject.py +++ b/tests/fixtures/inject.py @@ -24,9 +24,7 @@ class ValidResource(dependency_injector.resources.AsyncResource): - """ - An example of an AsyncResource as defined by the `dependency_injector` framework. - """ + """An example of an AsyncResource as defined by the `dependency_injector` framework.""" class Resource: """Returned upon executing the `init` method.""" @@ -41,13 +39,12 @@ async def init(self, foo: str = "foo") -> Resource: return self.Resource(foo=foo) async def shutdown(self, resource: Resource) -> None: # type: ignore + """Cleanup async resource""" resource.in_context = False class ValidSyncResource(dependency_injector.resources.Resource): - """ - An example of an ordinary Resource as defined by the `dependency_injector` framework. - """ + """An example of an ordinary Resource as defined by the `dependency_injector` framework.""" class Resource: """Returned upon executing the `init` method.""" @@ -62,6 +59,7 @@ def init(self, foo: str = "foo") -> Resource: return self.Resource(foo=foo) def shutdown(self, resource: Resource) -> None: # type: ignore + """Cleanup async resource""" resource.in_context = False diff --git a/tests/fixtures/utils.py b/tests/fixtures/utils.py index 2fc9b802..3e84e3f5 100644 --- a/tests/fixtures/utils.py +++ b/tests/fixtures/utils.py @@ -31,5 +31,5 @@ def read_yaml(path: Path) -> dict: """Read yaml file and return content as dict.""" - with open(path, "r", encoding="UTF-8") as file: + with open(path, encoding="UTF-8") as file: return yaml.safe_load(file) diff --git a/tests/integration/test_akafka_testutils.py b/tests/integration/test_akafka_testutils.py index c7daaf16..b504205d 100644 --- a/tests/integration/test_akafka_testutils.py +++ b/tests/integration/test_akafka_testutils.py @@ -15,18 +15,19 @@ # """Testing of the Kafka testutils.""" -from typing import Sequence +from collections.abc import Sequence import pytest from kafka import KafkaAdminClient +from kafka.admin.new_topic import NewTopic from hexkit.providers.akafka import KafkaConfig, KafkaEventPublisher -from hexkit.providers.akafka.testutils import kafka_fixture # noqa: F401 from hexkit.providers.akafka.testutils import ( ExpectedEvent, KafkaFixture, RecordedEvent, ValidationError, + kafka_fixture, # noqa: F401 ) @@ -35,18 +36,23 @@ def test_delete_topics_specific(kafka_fixture: KafkaFixture): # noqa: F811 admin_client = KafkaAdminClient( bootstrap_servers=kafka_fixture.config.kafka_servers ) + new_topics = [ + NewTopic(name="test", num_partitions=1, replication_factor=1), + NewTopic(name="test2", num_partitions=1, replication_factor=1), + ] + admin_client.create_topics(new_topics) - # get the topics created by default (confluent.support.metrics) initial_topics = admin_client.list_topics() - initial_length = len(initial_topics) - assert initial_length > 0 + assert "test" in initial_topics # delete that topic - kafka_fixture.delete_topics(initial_topics) + kafka_fixture.delete_topics("test") # make sure it got deleted - assert len(admin_client.list_topics()) + 1 == initial_length + final_topics = admin_client.list_topics() + assert "test" not in final_topics + assert "test2" in final_topics def test_delete_topics_all(kafka_fixture: KafkaFixture): # noqa: F811 @@ -55,12 +61,21 @@ def test_delete_topics_all(kafka_fixture: KafkaFixture): # noqa: F811 bootstrap_servers=kafka_fixture.config.kafka_servers ) - assert len(admin_client.list_topics()) > 0 + new_topics = [ + NewTopic(name="test", num_partitions=1, replication_factor=1), + NewTopic(name="test2", num_partitions=1, replication_factor=1), + ] + admin_client.create_topics(new_topics) + initial_topics = admin_client.list_topics() + assert "test" in initial_topics + assert "test2" in initial_topics # delete all topics by not specifying any kafka_fixture.delete_topics() - assert len(admin_client.list_topics()) == 0 + final_topics = admin_client.list_topics() + assert "test" not in final_topics + assert "test2" not in final_topics @pytest.mark.asyncio @@ -83,7 +98,6 @@ async def test_event_recorder( kafka_fixture: KafkaFixture, # noqa: F811 ): """Test event recording using the EventRecorder class.""" - topic = "test_topic" config = KafkaConfig( @@ -112,7 +126,6 @@ async def test_expect_events_happy( """Test successful validation of recorded events with the expect_events method of the KafkaFixture. """ - expected_events = [ ExpectedEvent( payload={"test_content": "Hello"}, type_="test_hello", key="test_key" @@ -219,7 +232,6 @@ async def test_expect_events_mismatch( the methods `start_recording` and `stop_and_check` so that we can nicely locate where the ValidationError is thrown. """ - expected_events = [ ExpectedEvent( payload={"test_content": "Hello"}, type_="test_hello", key="test_key" diff --git a/tests/integration/test_inject.py b/tests/integration/test_inject.py index cd7746c2..d9dd19b0 100644 --- a/tests/integration/test_inject.py +++ b/tests/integration/test_inject.py @@ -44,7 +44,6 @@ async def test_context_constructor_with_decl_container(): Test the context constructor together with the `DeclarativeContainer` from the `dependency_injector` framework. """ - foo = "bar" class Container(dependency_injector.containers.DeclarativeContainer): @@ -75,10 +74,7 @@ class Container(dependency_injector.containers.DeclarativeContainer): ], ) async def test_container_base(provides, constructor, has_context: bool): - """ - Test the ContainerBase and its contextual setup and teardown functionality. - """ - + """Test the ContainerBase and its contextual setup and teardown functionality.""" foo = "bar" class Container(ContainerBase): @@ -99,7 +95,8 @@ class Container(ContainerBase): @pytest.mark.asyncio async def test_container_base_sync_resouce(): """Make sure that using a non async Resource with the ContainerBase results in an - exception.""" + exception. + """ class Container(ContainerBase): test = dependency_injector.providers.Resource(ValidSyncResource, "bar") @@ -138,24 +135,26 @@ class ExampleConfig(BaseSettings): class SyncConfigConsumer: """A class that consumes an entire ExampleConfig instance (and not just - individual parameters).""" + individual parameters). + """ def __init__(self, *, config: ExampleConfig): """Takes an ExampleConfig instance and checks their values against the - expectation.""" - + expectation. + """ self.config = config class AsyncConfigConsumer(SyncConfigConsumer): """A class that consumes an entire ExampleConfig instance (and not just - individual parameters). Is constucted using an async context manager.""" + individual parameters). Is constucted using an async context manager. + """ @classmethod @asynccontextmanager async def construct(cls, *, config: ExampleConfig): """A constructor with setup and teardown logic. - Just there so that we can use the container as an async context manager.""" - + Just there so that we can use the container as an async context manager. + """ yield cls(config=config) class Container(ContainerBase): diff --git a/tests/integration/test_mongodb.py b/tests/integration/test_mongodb.py index d5e51bf7..fc4ca583 100644 --- a/tests/integration/test_mongodb.py +++ b/tests/integration/test_mongodb.py @@ -44,6 +44,8 @@ class ExampleCreationDto(BaseModel): field_c: bool class Config: + """Additional config options for model""" + frozen = True @@ -56,7 +58,6 @@ class ExampleDto(ExampleCreationDto): @pytest.mark.asyncio async def test_dao_find_all_with_id(mongodb_fixture: MongoDbFixture): # noqa: F811 """Test using the id field as part of the mapping in find_all()""" - dao = await mongodb_fixture.dao_factory.get_dao( name="example", dto_model=ExampleDto, @@ -110,7 +111,6 @@ async def test_dao_find_all_without_collection( mongodb_fixture: MongoDbFixture, # noqa: F811 ): """Test calling find_all() when there is no collection.""" - dao = await mongodb_fixture.dao_factory.get_dao( name="does-not-exist-at-all", dto_model=ExampleDto, @@ -140,8 +140,8 @@ async def test_empty_collections(mongodb_fixture: MongoDbFixture): # noqa: F811 @pytest.mark.asyncio async def test_dao_happy(mongodb_fixture: MongoDbFixture): # noqa: F811 """Test the happy path of performing basic CRUD database interactions using - the MongoDbDaoFactory in a surrograte ID setting.""" - + the MongoDbDaoFactory in a surrograte ID setting. + """ dao = await mongodb_fixture.dao_factory.get_dao( name="example", dto_model=ExampleDto, @@ -205,7 +205,6 @@ async def test_dao_insert_natural_id_happy( mongodb_fixture: MongoDbFixture, # noqa: F811 ): """Tests the happy path of inserting a new resource in a natural ID setting.""" - dao = await mongodb_fixture.dao_factory.get_dao( name="example", dto_model=ExampleDto, @@ -225,8 +224,8 @@ async def test_dao_upsert_natural_id_happy( mongodb_fixture: MongoDbFixture, # noqa: F811 ): """Tests the happy path of upserting new and existing resources in a natural ID - setting.""" - + setting. + """ dao = await mongodb_fixture.dao_factory.get_dao( name="example", dto_model=ExampleDto, @@ -254,7 +253,6 @@ async def test_dao_get_not_found( mongodb_fixture: MongoDbFixture, # noqa: F811 ): """Tests getting a non existing resource via its ID.""" - dao = await mongodb_fixture.dao_factory.get_dao( name="example", dto_model=ExampleDto, @@ -270,7 +268,6 @@ async def test_dao_update_not_found( mongodb_fixture: MongoDbFixture, # noqa: F811 ): """Tests updating a non existing resource.""" - dao = await mongodb_fixture.dao_factory.get_dao( name="example", dto_model=ExampleDto, @@ -290,7 +287,6 @@ async def test_dao_delete_not_found( mongodb_fixture: MongoDbFixture, # noqa: F811 ): """Tests deleting a non existing resource via its ID.""" - dao = await mongodb_fixture.dao_factory.get_dao( name="example", dto_model=ExampleDto, @@ -306,7 +302,6 @@ async def test_dao_find_invalid_mapping( mongodb_fixture: MongoDbFixture, # noqa: F811 ): """Tests find_one and find_all methods with an invalid mapping.""" - dao = await mongodb_fixture.dao_factory.get_dao( name="example", dto_model=ExampleDto, @@ -326,7 +321,6 @@ async def test_dao_find_no_hits( mongodb_fixture: MongoDbFixture, # noqa: F811 ): """Tests find_one and find_all methods with a mapping that results in no hits.""" - dao = await mongodb_fixture.dao_factory.get_dao( name="example", dto_model=ExampleDto, @@ -346,7 +340,6 @@ async def test_dao_find_one_with_multiple_hits( mongodb_fixture: MongoDbFixture, # noqa: F811 ): """Tests find_one with a mapping that results in multiple hits.""" - dao = await mongodb_fixture.dao_factory.get_dao( name="example", dto_model=ExampleDto, @@ -375,8 +368,8 @@ async def prefixed_count_id_generator( prefix: str, count_offset: int = 1 ) -> AsyncGenerator[str, None]: """A generator that yields IDs by counting upwards und prefixing that counts - with a predefined string.""" - + with a predefined string. + """ for count in itertools.count(start=count_offset): yield f"{prefix}-{count}" diff --git a/tests/integration/test_s3.py b/tests/integration/test_s3.py index dc9a48d8..47ea64c9 100644 --- a/tests/integration/test_s3.py +++ b/tests/integration/test_s3.py @@ -13,11 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -""" -Test S3 storage DAO -""" +"""Test S3 storage DAO""" -from typing import ContextManager, Optional +from contextlib import AbstractContextManager +from typing import Optional import pytest from black import nullcontext @@ -48,7 +47,6 @@ async def test_empty_buckets( s3_fixture: S3Fixture, file_fixture: FileObject # noqa: F811 ): """Make sure the empty_buckets function works""" - # bucket should not exist in the beginning: bucket_id = file_fixture.bucket_id assert not await s3_fixture.storage.does_bucket_exist(bucket_id=bucket_id) @@ -78,10 +76,7 @@ async def test_typical_workflow( use_multipart_upload: bool, s3_fixture: S3Fixture, # noqa: F811 ): - """ - Tests all methods of the ObjectStorageS3 DAO implementation in one long workflow. - """ - + """Tests all methods of the ObjectStorageS3 DAO implementation in one long workflow.""" with temp_file_object(size=20 * MEBIBYTE) as file: await typical_workflow( storage_client=s3_fixture.storage, @@ -100,7 +95,6 @@ async def test_object_existence_checks( file_fixture: FileObject, # noqa: F811 ): """Test if the checks for existence of objects work correctly.""" - # object should not exist in the beginning: assert not await s3_fixture.storage.does_object_exist( bucket_id=file_fixture.bucket_id, object_id=file_fixture.object_id @@ -121,7 +115,6 @@ async def test_get_object_size( file_fixture: FileObject, # noqa: F811 ): """Test if the get_object_size method returns the correct size.""" - expected_size = len(file_fixture.content) await s3_fixture.populate_file_objects([file_fixture]) @@ -138,7 +131,6 @@ async def test_list_all_object_ids( file_fixture: FileObject, # noqa: F811 ): """Test if listing all object IDs for a bucket works correctly.""" - file_fixture2 = file_fixture.copy(deep=True) file_fixture2.object_id = "mydefaulttestobject002" @@ -155,7 +147,6 @@ async def test_list_all_object_ids( @pytest.mark.asyncio async def test_bucket_existence_checks(s3_fixture: S3Fixture): # noqa: F811 """Test if the checks for existence of buckets work correctly.""" - bucket_id = EXAMPLE_BUCKETS[0] # bucket should not exist in the beginning: @@ -176,7 +167,6 @@ async def test_object_and_bucket_collisions( Tests whether overwriting (re-creation, re-upload, or copy to existing object) fails with the expected error. """ - await s3_fixture.populate_file_objects([file_fixture]) with pytest.raises(ObjectStorageProtocol.BucketAlreadyExistsError): @@ -207,7 +197,6 @@ async def test_handling_non_existing_file_and_bucket( Tests whether interacting with a non-existing bucket/file object fails with the expected result. """ - non_existing_bucket_id = "mynonexistingbucket001" non_existing_object_id = "mynonexistingobject001" existing_bucket_id = file_fixture.bucket_id @@ -278,10 +267,9 @@ async def test_delete_non_empty_bucket( file_fixture: FileObject, # noqa: F811 ): """Test deleting an non-empty bucket.""" - await s3_fixture.populate_file_objects([file_fixture]) - with nullcontext() if delete_content else pytest.raises( # type: ignore + with nullcontext() if delete_content else pytest.raises( ObjectStorageProtocol.BucketNotEmptyError ): await s3_fixture.storage.delete_bucket( @@ -309,7 +297,6 @@ async def test_using_non_existing_upload( Makes sure that using a non existing upload_id-bucket_id-object_id combination throws the right error. """ - # prepare a non-completed upload: real_upload_id, real_bucket_id, real_object_id = await prepare_non_completed_upload( s3_fixture @@ -319,8 +306,8 @@ async def test_using_non_existing_upload( bucket_id = real_bucket_id if bucket_id_correct else "wrong-bucket" object_id = real_object_id if object_id_correct else "wrong-object" - def get_exception_context() -> ContextManager: - return pytest.raises(exception) if exception else nullcontext() # type: ignore + def get_exception_context() -> AbstractContextManager: + return pytest.raises(exception) if exception else nullcontext() # call relevant methods from the provider: with pytest.raises(exception): @@ -350,7 +337,6 @@ async def test_invalid_part_number( s3_fixture: S3Fixture, # noqa: F811 ): """Check that invalid part numbers are cached correctly.""" - upload_id, bucket_id, object_id = await prepare_non_completed_upload(s3_fixture) with pytest.raises(exception) if exception else nullcontext(): # type: ignore @@ -413,10 +399,7 @@ async def test_complete_multipart_upload( exception: Optional[Exception], s3_fixture: S3Fixture, # noqa: F811 ): - """ - Test the complete_multipart_upload method. - """ - + """Test the complete_multipart_upload method.""" upload_id, bucket_id, object_id = await get_initialized_upload(s3_fixture) for part_idx, part_size in enumerate(part_sizes): await upload_part_of_size( @@ -444,10 +427,7 @@ async def test_abort_multipart_upload( empty_upload: bool, s3_fixture: S3Fixture, # noqa: F811 ): - """ - Test the abort_multipart_upload method. - """ - + """Test the abort_multipart_upload method.""" upload_id, bucket_id, object_id = await get_initialized_upload(s3_fixture) async def upload_part_shortcut(part_number): @@ -482,10 +462,7 @@ async def upload_part_shortcut(part_number): @pytest.mark.asyncio async def test_multiple_active_uploads(s3_fixture: S3Fixture): # noqa: F811 - """ - Test that multiple active uploads for the same object are not possible. - """ - + """Test that multiple active uploads for the same object are not possible.""" # initialize an upload: _, bucket_id, object_id = await get_initialized_upload(s3_fixture) @@ -504,7 +481,6 @@ async def test_handling_multiple_coexisting_uploads( Test that the invalid state of multiple uploads coexisting for the same object is correctly handled. """ - # initialize an upload: upload1_id, bucket_id, object_id = await get_initialized_upload(s3_fixture) @@ -560,7 +536,6 @@ async def test_handling_multiple_subsequent_uploads( 2. initiate an upload, upload some parts, complete it, then start another upload, uploads some parts, complete it (`abort_first` set to False) """ - # perform first upload: upload1_id, bucket_id, object_id = await get_initialized_upload(s3_fixture) diff --git a/tests/unit/test_akafka.py b/tests/unit/test_akafka.py index 4ebeec50..354cdff5 100644 --- a/tests/unit/test_akafka.py +++ b/tests/unit/test_akafka.py @@ -177,7 +177,7 @@ async def test_kafka_event_subscriber( assert callable(cc_kwargs["value_deserializer"]) # consume one event: - with pytest.raises(exception) if exception else nullcontext(): # type: ignore + with pytest.raises(exception) if exception else nullcontext(): await event_subscriber.run(forever=False) # check if producer was correctly started and stopped: diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py index 394c3430..f5a63e3a 100644 --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -26,8 +26,8 @@ def test_config_from_yaml(): """Test that config yaml correctly overwrites - default parameters""" - + default parameters + """ config_yaml = config_yamls["basic"] # update config class with content of config yaml @@ -41,7 +41,8 @@ def test_config_from_yaml(): def test_config_from_env(): """Test that env vars correctly overwrites - default parameters""" + default parameters + """ env_var_fixture = env_var_sets["basic_complete"] with env_var_fixture: # update config class with content of config yaml and @@ -50,14 +51,14 @@ def test_config_from_env(): config = config_constructor() # compare to expected content: - expected = BasicConfig(**env_var_fixture.env_vars) + expected = BasicConfig(**env_var_fixture.env_vars) # type: ignore assert config.dict() == expected def test_config_from_yaml_and_env(): """Test that config yaml and env vars correctly overwrites - default parameters""" - + default parameters + """ config_yaml = config_yamls["basic"] env_var_fixture = env_var_sets["basic_partly"] @@ -76,7 +77,6 @@ def test_config_from_yaml_and_env(): @pytest.mark.parametrize("cwd", [True, False]) def test_config_from_default_yaml(cwd: bool): """Test that default config yaml from home is correctly read""" - base_dir = Path(os.getcwd()) if cwd else Path.home() prefix = "test_prefix" @@ -99,8 +99,8 @@ def test_config_from_default_yaml(cwd: bool): def test_config_from_default_yaml_via_env(): """Test that default config yaml specified via an environment variable is correctly - read""" - + read + """ prefix = "test_prefix" # set env var: diff --git a/tests/unit/test_dao.py b/tests/unit/test_dao.py index 0d5525ee..79c2994d 100644 --- a/tests/unit/test_dao.py +++ b/tests/unit/test_dao.py @@ -71,12 +71,8 @@ async def _get_dao( id_generator: AsyncGenerator[str, None], ) -> Union[DaoSurrogateId[Dto, DtoCreation], DaoNaturalId[Dto]]: """*To be implemented by the provider. Input validation is done outside of this - method.*""" - ... - - """*To be implemented by the provider. Input validation is done outside of this - method.*""" - + method.* + """ raise NotImplementedError() @@ -89,7 +85,8 @@ class ExampleCreationDto(BaseModel): class ExampleInvalidCreationDto(ExampleCreationDto): """Example for a DTO creation model that is invalid because it contains a - parameter that the main DTO model is missing.""" + parameter that the main DTO model is missing. + """ unexpected_param: str @@ -103,7 +100,6 @@ class ExampleDto(ExampleCreationDto): @pytest.mark.asyncio async def test_get_dto_valid(): """Use the get_dao method of the DaoFactory with valid parameters.""" - dao_factory = FakeDaoFactory() with pytest.raises(NotImplementedError): @@ -119,8 +115,8 @@ async def test_get_dto_valid(): @pytest.mark.asyncio async def test_get_dto_invalid_id(): """Use the get_dao method of the DaoFactory with an invalid ID that is not found in - the provided DTO model.""" - + the provided DTO model. + """ dao_factory = FakeDaoFactory() with pytest.raises(DaoFactoryProtocol.IdFieldNotFoundError): @@ -136,7 +132,6 @@ async def test_get_dto_invalid_id(): ) async def test_get_dto_invalid_creation_model(dto_creation_model: type[BaseModel]): """Use the get_dao method of the DaoFactory with an invalid creation model.""" - dao_factory = FakeDaoFactory() with pytest.raises(DaoFactoryProtocol.CreationModelInvalidError): @@ -151,7 +146,6 @@ async def test_get_dto_invalid_creation_model(dto_creation_model: type[BaseModel @pytest.mark.asyncio async def test_get_dto_invalid_fields_to_index(): """Use the get_dao method of the DaoFactory with an invalid list of fields to index.""" - dao_factory = FakeDaoFactory() with pytest.raises(DaoFactoryProtocol.IndexFieldsInvalidError): diff --git a/tests/unit/test_eventpub.py b/tests/unit/test_eventpub.py index 09ce5067..1da5654a 100644 --- a/tests/unit/test_eventpub.py +++ b/tests/unit/test_eventpub.py @@ -67,7 +67,7 @@ async def test_ascii_val(type_, key, topic, exception): event_publisher = FakePublisher() # publish event using the provider: - with pytest.raises(exception) if exception else nullcontext(): # type: ignore + with pytest.raises(exception) if exception else nullcontext(): await event_publisher.publish( payload=payload, type_=type_, diff --git a/tests/unit/test_eventsub.py b/tests/unit/test_eventsub.py index fbd19dbb..0e2204ce 100644 --- a/tests/unit/test_eventsub.py +++ b/tests/unit/test_eventsub.py @@ -59,7 +59,7 @@ async def test_ascii_val(type_, topic, exception): event_submitter = FakeSubscriber() # publish event using the provider: - with pytest.raises(exception) if exception else nullcontext(): # type: ignore + with pytest.raises(exception) if exception else nullcontext(): await event_submitter.consume( payload=payload, type_=type_, diff --git a/tests/unit/test_inject.py b/tests/unit/test_inject.py index 845b5408..f6dc977e 100644 --- a/tests/unit/test_inject.py +++ b/tests/unit/test_inject.py @@ -56,11 +56,8 @@ async def test_assert_constructable( constructable: type[AsyncContextConstructable], does_raises: bool ): - """ - Test that assert_constructable can distinguish between - """ - - with pytest.raises(NotConstructableError) if does_raises else nullcontext(): # type: ignore + """Test that assert_constructable can distinguish between""" + with pytest.raises(NotConstructableError) if does_raises else nullcontext(): assert_async_constructable(constructable) @@ -85,8 +82,7 @@ async def test_context_constructor_init( Test the initialization of a context constructor with valid and invalid constructables. """ - - with pytest.raises(exception) if exception else nullcontext(): # type: ignore + with pytest.raises(exception) if exception else nullcontext(): test = AsyncConstructor(constructable) if not exception: @@ -108,12 +104,11 @@ async def test_context_constructor_setup_teardown( has_context: bool, ): """Test whether init and shutdown correctly works with a context constructor.""" - foo = "bar" test = AsyncConstructor(constructable, foo) - with pytest.raises(exception) if exception else nullcontext(): # type: ignore + with pytest.raises(exception) if exception else nullcontext(): resource = await test.async_() assert isinstance(resource, constructable) test_instance = await test.init() # type: ignore @@ -139,8 +134,8 @@ async def test_context_constructor_setup_teardown( ) def test_get_constructor(provides: type, args, kwargs, constructor_cls: type): """Tests whether the `get_constructor` function chooses the correct constructor - classes for the given `provides` classes.""" - + classes for the given `provides` classes. + """ constructor = get_constructor(provides, *args, **kwargs) assert isinstance(constructor, constructor_cls) diff --git a/tests/unit/test_testing_eventpub.py b/tests/unit/test_testing_eventpub.py index 05f43046..661a574b 100644 --- a/tests/unit/test_testing_eventpub.py +++ b/tests/unit/test_testing_eventpub.py @@ -28,7 +28,6 @@ @pytest.mark.asyncio async def test_in_mem_publisher(): """Test the InMemEventPublisher testing utilities.""" - type_ = "test_type" key = "test_key" topic = "test_topic" diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index b23b6303..a746caf2 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -52,7 +52,7 @@ def test_calc_part_size( preferred_part_size: int, file_size: int, expected_part_size: int ): """Test code to dynamically adapt part size""" - with pytest.raises(ValueError) if file_size > 5 * TiB else nullcontext(): # type: ignore + with pytest.raises(ValueError) if file_size > 5 * TiB else nullcontext(): adapted_part_size = calc_part_size( preferred_part_size=preferred_part_size, file_size=file_size ) @@ -88,13 +88,11 @@ class ExampleModel(BaseModel): @pytest.mark.parametrize("fields", ({"param_a"}, {"param_a", "param_b"})) def test_validate_fields_in_model_happy(fields: Collection[str]): """Test validate_fields_in_model with valid parameters.""" - validate_fields_in_model(model=ExampleModel, fields=fields) @pytest.mark.parametrize("fields", ({"param_c"}, {"param_a", "param_c"})) def test_validate_fields_in_model_error(fields: Collection[str]): """Test validate_fields_in_model with invalid parameters.""" - with pytest.raises(FieldNotInModelError): validate_fields_in_model(model=ExampleModel, fields=fields)