diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2cfb5dd4..ab5aca06 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -50,7 +50,7 @@ repos: - id: debug-statements - id: debug-statements - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.0.290 + rev: v0.0.291 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] diff --git a/examples/stream_calc/sc_tests/integration/test_event_api.py b/examples/stream_calc/sc_tests/integration/test_event_api.py index 018b6ec1..217b457c 100644 --- a/examples/stream_calc/sc_tests/integration/test_event_api.py +++ b/examples/stream_calc/sc_tests/integration/test_event_api.py @@ -27,11 +27,11 @@ import pytest from kafka import KafkaConsumer, KafkaProducer -from stream_calc.config import Config -from stream_calc.main import main from testcontainers.kafka import KafkaContainer from hexkit.custom_types import JsonObject +from stream_calc.config import Config +from stream_calc.main import main DEFAULT_CONFIG = Config() diff --git a/examples/stream_calc/sc_tests/unit/test_calc.py b/examples/stream_calc/sc_tests/unit/test_calc.py index bca5016d..563ba6c8 100644 --- a/examples/stream_calc/sc_tests/unit/test_calc.py +++ b/examples/stream_calc/sc_tests/unit/test_calc.py @@ -20,6 +20,7 @@ from unittest.mock import AsyncMock import pytest + from stream_calc.core.calc import StreamCalculator diff --git a/examples/stream_calc/sc_tests/unit/test_eventpub.py b/examples/stream_calc/sc_tests/unit/test_eventpub.py index 5d00990e..f00de8bf 100644 --- a/examples/stream_calc/sc_tests/unit/test_eventpub.py +++ b/examples/stream_calc/sc_tests/unit/test_eventpub.py @@ -17,13 +17,13 @@ """Testing the `translators.eventpub` module.""" import pytest + +from hexkit.providers.testing.eventpub import InMemEventPublisher from stream_calc.translators.eventpub import ( EventResultEmitter, EventResultEmitterConfig, ) -from hexkit.providers.testing.eventpub import InMemEventPublisher - @pytest.mark.asyncio async def test_emit_result(): diff --git a/examples/stream_calc/sc_tests/unit/test_eventsub.py b/examples/stream_calc/sc_tests/unit/test_eventsub.py index e11713e5..b0dd9388 100644 --- a/examples/stream_calc/sc_tests/unit/test_eventsub.py +++ b/examples/stream_calc/sc_tests/unit/test_eventsub.py @@ -19,13 +19,13 @@ from unittest.mock import AsyncMock import pytest + +from hexkit.custom_types import JsonObject from stream_calc.translators.eventsub import ( EventProblemReceiver, EventProblemReceiverConfig, ) -from hexkit.custom_types import JsonObject - @pytest.mark.asyncio @pytest.mark.parametrize( diff --git a/examples/stream_calc/stream_calc/config.py b/examples/stream_calc/stream_calc/config.py index 5d275e09..a11790b3 100644 --- a/examples/stream_calc/stream_calc/config.py +++ b/examples/stream_calc/stream_calc/config.py @@ -16,11 +16,10 @@ """Config parameters.""" +from hexkit.providers.akafka import KafkaConfig from stream_calc.translators.eventpub import EventResultEmitterConfig from stream_calc.translators.eventsub import EventProblemReceiverConfig -from hexkit.providers.akafka import KafkaConfig - try: # workaround for https://github.com/pydantic/pydantic/issues/5821 from typing_extensions import Literal except ImportError: diff --git a/examples/stream_calc/stream_calc/container.py b/examples/stream_calc/stream_calc/container.py index 3dd6effe..1a57923b 100644 --- a/examples/stream_calc/stream_calc/container.py +++ b/examples/stream_calc/stream_calc/container.py @@ -22,14 +22,13 @@ """Module hosting the dependency injection container.""" # pylint: disable=wrong-import-order +from hexkit.inject import ContainerBase, get_configurator, get_constructor +from hexkit.providers.akafka import KafkaEventPublisher, KafkaEventSubscriber from stream_calc.config import Config from stream_calc.core.calc import StreamCalculator from stream_calc.translators.eventpub import EventResultEmitter from stream_calc.translators.eventsub import EventProblemReceiver -from hexkit.inject import ContainerBase, get_configurator, get_constructor -from hexkit.providers.akafka import KafkaEventPublisher, KafkaEventSubscriber - class Container(ContainerBase): """DI Container""" diff --git a/examples/stream_calc/stream_calc/translators/eventpub.py b/examples/stream_calc/stream_calc/translators/eventpub.py index 55c33aa2..45ce53dd 100644 --- a/examples/stream_calc/stream_calc/translators/eventpub.py +++ b/examples/stream_calc/stream_calc/translators/eventpub.py @@ -17,10 +17,10 @@ """Translators that target the event publishing protocol.""" from pydantic import BaseSettings -from stream_calc.ports.result_emitter import CalcResultEmitterPort from hexkit.custom_types import JsonObject from hexkit.protocols.eventpub import EventPublisherProtocol +from stream_calc.ports.result_emitter import CalcResultEmitterPort class EventResultEmitterConfig(BaseSettings): @@ -39,7 +39,7 @@ def __init__( self, *, config: EventResultEmitterConfig, - event_publisher: EventPublisherProtocol + event_publisher: EventPublisherProtocol, ) -> None: """Configure with provider for the the EventPublisherProto""" diff --git a/examples/stream_calc/stream_calc/translators/eventsub.py b/examples/stream_calc/stream_calc/translators/eventsub.py index 74b119d8..923dd44d 100644 --- a/examples/stream_calc/stream_calc/translators/eventsub.py +++ b/examples/stream_calc/stream_calc/translators/eventsub.py @@ -17,10 +17,10 @@ """Translators that target the event publishing protocol.""" from pydantic import BaseSettings -from stream_calc.ports.problem_receiver import ArithProblemHandlerPort from hexkit.custom_types import Ascii, JsonObject from hexkit.protocols.eventsub import EventSubscriberProtocol +from stream_calc.ports.problem_receiver import ArithProblemHandlerPort class EventProblemReceiverConfig(BaseSettings): diff --git a/src/hexkit/config.py b/src/hexkit/config.py index 78f665b3..9aec5f1f 100644 --- a/src/hexkit/config.py +++ b/src/hexkit/config.py @@ -17,7 +17,7 @@ import os from pathlib import Path -from typing import Any, Callable, Dict, Final, Optional +from typing import Any, Callable, Final, Optional import yaml from pydantic import BaseSettings @@ -77,7 +77,7 @@ def get_default_config_yaml(prefix: str) -> Optional[Path]: def yaml_settings_factory( config_yaml: Optional[Path] = None, -) -> Callable[[BaseSettings], Dict[str, Any]]: +) -> Callable[[BaseSettings], dict[str, Any]]: """ A factory of source methods for pydantic's BaseSettings Config that load settings from a yaml file. @@ -89,12 +89,12 @@ def yaml_settings_factory( def yaml_settings( # pylint: disable=unused-argument settings: BaseSettings, - ) -> Dict[str, Any]: - """source method for loading pydantic BaseSettings from a yaml file""" + ) -> dict[str, Any]: + """Source method for loading pydantic BaseSettings from a yaml file""" if config_yaml is None: return {} - with open(config_yaml, "r", encoding="utf8") as yaml_file: + with open(config_yaml, encoding="utf8") as yaml_file: return yaml.safe_load(yaml_file) return yaml_settings @@ -136,7 +136,6 @@ def decorator(settings) -> Callable: settings (BaseSettings): A pydantic BaseSettings class to be modified. """ - # check if settings inherits from pydantic BaseSettings: if not issubclass(settings, BaseSettings): raise TypeError( @@ -153,13 +152,11 @@ def constructor_wrapper( config_yaml (str, optional): Path to a config yaml. Overwrites the default location. """ - # get default path if config_yaml not specified: if config_yaml is None: config_yaml = get_default_config_yaml(prefix) - else: - if not config_yaml.is_file(): - raise ConfigYamlDoesNotExist(path=config_yaml) + elif not config_yaml.is_file(): + raise ConfigYamlDoesNotExist(path=config_yaml) class ModSettings(settings): """Modifies the orginal Settings class provided by the user""" @@ -181,7 +178,7 @@ def customise_sources( env_settings, file_secret_settings, ): - """add custom yaml source""" + """Add custom yaml source""" return ( init_settings, env_settings, diff --git a/src/hexkit/inject.py b/src/hexkit/inject.py index 40bcb7c6..547a6af7 100644 --- a/src/hexkit/inject.py +++ b/src/hexkit/inject.py @@ -55,7 +55,8 @@ class NotConstructableError(TypeError): class AsyncInitShutdownError(TypeError): """Thrown when a container has sync `init_resource` or `shutdown_resource` methods - but coroutines are needed.""" + but coroutines are needed. + """ def assert_async_constructable( @@ -68,7 +69,6 @@ def assert_async_constructable( not check whether `construct` really returns an awaitable or an async context manager. """ - if not callable(getattr(constructable, "construct", None)): raise NotConstructableError( "Async(Context)Constructable class must have a callable `construct` attribute." @@ -77,7 +77,8 @@ def assert_async_constructable( class AsyncConstructor(dependency_injector.providers.Resource): """Maps an Async(Context)Constructable onto the Resource class from the - `dependency_injector` framework.""" + `dependency_injector` framework. + """ @staticmethod def constructable_to_resource( @@ -87,7 +88,6 @@ def constructable_to_resource( Converts an Async(Context)Constructable to an async generator that is compatible with the Resource definition of the `dependency_injector` framework. """ - assert_async_constructable(constructable) async def resource(*args: Any, **kwargs: Any) -> AsyncIterator[Any]: @@ -120,7 +120,6 @@ def __init__( **kwargs: dependency_injector.providers.Injection, ): """Initialize `dependency_injector`'s Resource with an AbstractAsyncContextManager.""" - if provides is None: super().__init__() else: @@ -130,8 +129,8 @@ def __init__( def get_constructor(provides: type, *args, **kwargs): """Automatically selects and applies the right constructor for the class given to - `provides`.""" - + `provides`. + """ constructor_cls: type try: @@ -148,11 +147,11 @@ def get_constructor(provides: type, *args, **kwargs): class CMDynamicContainer(dependency_injector.containers.DynamicContainer): """Adds a async context manager interface to the DynamicContainer base class from - the `dependency_injector` framework.""" + the `dependency_injector` framework. + """ async def __aenter__(self): """Init/setup resources.""" - init_future = self.init_resources() if not inspect.isawaitable(init_future): @@ -165,7 +164,6 @@ async def __aenter__(self): async def __aexit__(self, exc_type, exc_value, exc_trace): """Shutdown/teardown resources""" - shutdown_future = self.shutdown_resources() if not inspect.isawaitable(shutdown_future): @@ -206,11 +204,11 @@ async def __aexit__(self, exc_type, exc_value, exc_trace): class Configurator(dependency_injector.providers.Factory, Generic[PydanticConfig]): """A configuration constructor that holds configuration parameters using a pydantic - model.""" + model. + """ def load_config(self, config: PydanticConfig): """Loading config parameters form an pydantic config instance.""" - self.override(dependency_injector.providers.Callable(lambda: config)) @@ -220,6 +218,6 @@ def get_configurator( """Initializes a configuration provider. This helper function is necessary because the __init__ of Providers used by the - dependency_injector framework need to always use the same signature.""" - + dependency_injector framework need to always use the same signature. + """ return Configurator[PydanticConfig](pydantic_cls) diff --git a/src/hexkit/protocols/dao.py b/src/hexkit/protocols/dao.py index 47ad4f8a..edf9efa1 100644 --- a/src/hexkit/protocols/dao.py +++ b/src/hexkit/protocols/dao.py @@ -15,7 +15,10 @@ # """Protocol for creating Data Access Objects to perform CRUD (plus find) interactions -with the database.""" +with the database. +""" + +# ruff: noqa: PLR0913 import typing from abc import ABC, abstractmethod @@ -72,7 +75,8 @@ class InvalidFindMappingError(FindError): class MultipleHitsFoundError(FindError): """Raised when a DAO find operation did result in multiple hits while only a - single hit was expected.""" + single hit was expected. + """ def __init__(self, *, mapping: Mapping[str, str]): message = ( @@ -84,7 +88,8 @@ def __init__(self, *, mapping: Mapping[str, str]): class NoHitsFoundError(FindError): """Raised when a DAO find operation did result in no hits while a - single hit was expected.""" + single hit was expected. + """ def __init__(self, *, mapping: Mapping[str, str]): message = ( @@ -259,7 +264,6 @@ async def uuid4_id_generator() -> AsyncGenerator[str, None]: This is an AsyncGenerator to be compliant with the id_generator requirements of the DaoFactoryProtocol. """ - while True: yield str(uuid4()) @@ -274,7 +278,8 @@ class IdFieldNotFoundError(ValueError): class CreationModelInvalidError(ValueError): """Raised when the DtoCreationModel was invalid in relation to the main - DTO model.""" + DTO model. + """ class IndexFieldsInvalidError(ValueError): """Raised when providing an invalid list of fields to index.""" @@ -282,8 +287,8 @@ class IndexFieldsInvalidError(ValueError): @classmethod def _validate_dto_model_id(cls, *, dto_model: type[Dto], id_field: str) -> None: """Checks whether the dto_model contains the expected id_field. - Raises IdFieldNotFoundError otherwise.""" - + Raises IdFieldNotFoundError otherwise. + """ if id_field not in dto_model.schema()["properties"]: raise cls.IdFieldNotFoundError() @@ -296,8 +301,8 @@ def _validate_dto_creation_model( id_field: str, ) -> None: """Checks that the dto_creation_model has the same fields as the dto_model - except missing the ID. Raises CreationModelInvalidError otherwise.""" - + except missing the ID. Raises CreationModelInvalidError otherwise. + """ if dto_creation_model is None: return @@ -318,8 +323,8 @@ def _validate_fields_to_index( fields_to_index: Optional[Collection[str]], ) -> None: """Checks that all provided fields are present in the dto_model. - Raises IndexFieldsInvalidError otherwise.""" - + Raises IndexFieldsInvalidError otherwise. + """ if fields_to_index is None: return @@ -403,7 +408,6 @@ async def get_dao( self.IdFieldNotFoundError: Raised when the dto_model did not contain the expected id_field. """ - self._validate_dto_model_id(dto_model=dto_model, id_field=id_field) self._validate_dto_creation_model( @@ -470,5 +474,6 @@ async def _get_dao( id_generator: AsyncGenerator[str, None], ) -> Union[DaoSurrogateId[Dto, DtoCreation], DaoNaturalId[Dto]]: """*To be implemented by the provider. Input validation is done outside of this - method.*""" + method.* + """ ... diff --git a/src/hexkit/protocols/objstorage.py b/src/hexkit/protocols/objstorage.py index 4a2ae54e..b17710aa 100644 --- a/src/hexkit/protocols/objstorage.py +++ b/src/hexkit/protocols/objstorage.py @@ -16,6 +16,7 @@ """Protocol for interacting with S3-like Object Storages.""" +# ruff: noqa: PLR0913 import re from abc import ABC, abstractmethod @@ -28,16 +29,15 @@ class PresignedPostURL(NamedTuple): """Container for presigned POST URLs along with additional metadata fields that - should be attached as body data when sending the POST request.""" + should be attached as body data when sending the POST request. + """ url: str fields: dict[str, str] class ObjectStorageProtocol(ABC): - """ - Protocol for interacting with S3-like Object Storages. - """ + """Protocol for interacting with S3-like Object Storages.""" # constants for multipart uploads: # (shall not be changed by provider implementations) @@ -51,7 +51,6 @@ async def does_bucket_exist(self, bucket_id: str) -> bool: """Check whether a bucket with the specified ID (`bucket_id`) exists. Returns `True` if it exists and `False` otherwise. """ - self._validate_bucket_id(bucket_id) return await self._does_bucket_exist(bucket_id) @@ -60,7 +59,6 @@ async def create_bucket(self, bucket_id: str) -> None: Create a bucket (= a structure that can hold multiple file objects) with the specified unique ID. """ - self._validate_bucket_id(bucket_id) await self._create_bucket(bucket_id) @@ -73,14 +71,11 @@ async def delete_bucket( will be deleted, if False (the default) a BucketNotEmptyError will be raised if the bucket is not empty. """ - self._validate_bucket_id(bucket_id) await self._delete_bucket(bucket_id, delete_content=delete_content) async def list_all_object_ids(self, bucket_id: str) -> list[str]: - """ - Retrieve a list of IDs for all objects currently present in the specified bucket - """ + """Retrieve a list of IDs for all objects currently present in the specified bucket""" self._validate_bucket_id(bucket_id) return await self._list_all_object_ids(bucket_id=bucket_id) @@ -97,7 +92,6 @@ async def get_object_upload_url( You may also specify a custom expiry duration in seconds (`expires_after`) and a maximum size (bytes) for uploads (`max_upload_size`). """ - self._validate_bucket_id(bucket_id) self._validate_object_id(object_id) return await self._get_object_upload_url( @@ -114,7 +108,6 @@ async def init_multipart_upload( object_id: str, ) -> str: """Initiates a multipart upload procedure. Returns the upload ID.""" - self._validate_bucket_id(bucket_id) self._validate_object_id(object_id) return await self._init_multipart_upload( @@ -136,7 +129,6 @@ async def get_part_upload_url( Please note: the part number must be a non-zero, positive integer and parts should be uploaded in sequence. """ - self._validate_bucket_id(bucket_id) self._validate_object_id(object_id) return await self._get_part_upload_url( @@ -157,7 +149,6 @@ async def abort_multipart_upload( """Cancel a multipart upload with the specified ID. All uploaded content is deleted. """ - self._validate_bucket_id(bucket_id) self._validate_object_id(object_id) await self._abort_multipart_upload( @@ -180,7 +171,6 @@ async def complete_multipart_upload( This ensures that exactly the specified number of parts exist and that all parts (except the last one) have the specified size. """ - self._validate_bucket_id(bucket_id) self._validate_object_id(object_id) await self._complete_multipart_upload( @@ -198,7 +188,6 @@ async def get_object_download_url( the specified ID (`object_id`) from bucket with the specified id (`bucket_id`). You may also specify a custom expiry duration in seconds (`expires_after`). """ - self._validate_bucket_id(bucket_id) self._validate_object_id(object_id) return await self._get_object_download_url( @@ -213,7 +202,6 @@ async def does_object_exist( may be provided to check the objects content. Returns `True` if checks succeed and `False` otherwise. """ - self._validate_bucket_id(bucket_id) self._validate_object_id(object_id) return await self._does_object_exist( @@ -221,10 +209,7 @@ async def does_object_exist( ) async def get_object_size(self, *, bucket_id: str, object_id: str) -> int: - """ - Returns the size of an object in bytes. - """ - + """Returns the size of an object in bytes.""" self._validate_bucket_id(bucket_id) self._validate_object_id(object_id) return await self._get_object_size(bucket_id=bucket_id, object_id=object_id) @@ -240,7 +225,6 @@ async def copy_object( """Copy an object from one bucket (`source_bucket_id` and `source_object_id`) to another bucket (`dest_bucket_id` and `dest_object_id`). """ - self._validate_bucket_id(source_bucket_id) self._validate_object_id(source_object_id) self._validate_bucket_id(dest_bucket_id) @@ -256,7 +240,6 @@ async def delete_object(self, *, bucket_id: str, object_id: str) -> None: """Delete an object with the specified id (`object_id`) in the bucket with the specified id (`bucket_id`). """ - self._validate_bucket_id(bucket_id) self._validate_object_id(object_id) await self._delete_object(bucket_id=bucket_id, object_id=object_id) @@ -601,7 +584,8 @@ class MultiPartUploadError(ObjectError): class MultiPartUploadAlreadyExistsError(MultiPartUploadError): """Thrown when trying to create a multipart upload for an object for which another - upload is already active.""" + upload is already active. + """ def __init__(self, bucket_id: str, object_id: str): message = ( diff --git a/src/hexkit/providers/akafka/__init__.py b/src/hexkit/providers/akafka/__init__.py index 27fa5d5f..216e92d9 100644 --- a/src/hexkit/providers/akafka/__init__.py +++ b/src/hexkit/providers/akafka/__init__.py @@ -19,7 +19,10 @@ respectively. """ -from .provider import KafkaEventPublisher # noqa: F401 -from .provider import KafkaConfig, KafkaEventSubscriber +from .provider import ( + KafkaConfig, + KafkaEventPublisher, + KafkaEventSubscriber, +) __all__ = ["KafkaEventPublisher", "KafkaEventSubscriber", "KafkaConfig"] diff --git a/src/hexkit/providers/akafka/provider.py b/src/hexkit/providers/akafka/provider.py index a4d663c1..e14c41f3 100644 --- a/src/hexkit/providers/akafka/provider.py +++ b/src/hexkit/providers/akafka/provider.py @@ -107,7 +107,6 @@ def __init__( value_serializer: Function to serialize the values into bytes. """ - ... async def start(self): @@ -184,7 +183,6 @@ async def _publish_validated( key (str): The event type. ASCII characters only. topic (str): The event type. ASCII characters only. """ - event_headers = [("type", type_.encode("ascii"))] await self._producer.send_and_wait( @@ -217,7 +215,7 @@ def get_event_type(event: ConsumerEvent) -> str: class KafkaConsumerCompatible(Protocol): """A python duck type protocol describing an AIOKafkaConsumer or equivalent.""" - def __init__( + def __init__( # noqa: PLR0913 self, *topics: Ascii, bootstrap_servers: str, @@ -245,7 +243,6 @@ def __init__( value_serializer: Function to deserialize the values into strings. """ - ... async def start(self) -> None: @@ -290,7 +287,6 @@ async def construct( kafka_consumer_cls: Overwrite the used Kafka consumer class. Only intended for unit testing. """ - client_id = generate_client_id( service_name=config.service_name, instance_id=config.service_instance_id ) @@ -328,7 +324,6 @@ def __init__( type annotation) and an application-specific port (according to the triple hexagonal architecture). """ - self._consumer = consumer self._translator = translator self._types_whitelist = translator.types_of_interest @@ -376,7 +371,6 @@ async def run(self, forever: bool = True) -> None: However, you can set `forever` to `False` to make it return after handling one event. """ - if forever: async for event in self._consumer: await self._consume_event(event) diff --git a/src/hexkit/providers/akafka/testutils.py b/src/hexkit/providers/akafka/testutils.py index 47367e2b..7d0a3442 100644 --- a/src/hexkit/providers/akafka/testutils.py +++ b/src/hexkit/providers/akafka/testutils.py @@ -19,10 +19,11 @@ Please note, only use for testing purposes. """ import json +from collections.abc import AsyncGenerator, Sequence from contextlib import asynccontextmanager from dataclasses import dataclass from functools import partial -from typing import AsyncGenerator, Optional, Sequence, Union +from typing import Optional, Union import pytest_asyncio from aiokafka import AIOKafkaConsumer, TopicPartition @@ -75,8 +76,8 @@ def __init__( details: str, ): """Initialize the error with information on the recorded and - expected_events.""" - + expected_events. + """ event_log = ( " Events recorded: " + ", ".join([str(event) for event in recorded_events]) @@ -96,8 +97,8 @@ def check_recorded_events( recorded_events: Sequence[RecordedEvent], expected_events: Sequence[ExpectedEvent] ): """Check a sequence of recorded events against a sequence of expected events. - Raises ValidationError in case of mismatches.""" - + Raises ValidationError in case of mismatches. + """ get_detailed_error = partial( ValidationError, recorded_events=recorded_events, @@ -130,18 +131,21 @@ def get_field_mismatch_error(field, index): class EventRecorder: """Instances of this class can look at at specific topic and check for expected - events occurring with a specified event key.""" + events occurring with a specified event key. + """ class NotStartedError(RuntimeError): """Raised when the recording has not been started yet but is required for the - requested action.""" + requested action. + """ def __init__(self): super().__init__("Event recording has not been started yet.") class InProgressError(RuntimeError): """Raised when the recording is still in progress but need to be stopped for - the rested action.""" + the rested action. + """ def __init__(self): super().__init__( @@ -155,7 +159,6 @@ def __init__( topic: Ascii, ): """Initialize with connection details.""" - self._kafka_servers = kafka_servers self._topic = topic @@ -164,8 +167,8 @@ def __init__( def _assert_recording_stopped(self) -> None: """Assert that the recording has been stopped. Raises an InProgressError or a - NotStartedError otherwise.""" - + NotStartedError otherwise. + """ if self._recorded_events is None: if self._starting_offsets is None: raise self.NotStartedError() @@ -174,7 +177,6 @@ def _assert_recording_stopped(self) -> None: @property def recorded_events(self) -> Sequence[RecordedEvent]: """The recorded events. Only available after the recording has been stopped.""" - self._assert_recording_stopped() return self._recorded_events # type: ignore @@ -183,8 +185,8 @@ async def _get_consumer_offsets( ) -> dict[str, int]: """Returns a dictionary where the keys are partition IDs and the values are the current offsets in the corresponding partitions for the provided consumer. - The provided consumer instance must have been started.""" - + The provided consumer instance must have been started. + """ topic_partitions = [ topic_partition for topic_partition in consumer.assignment() @@ -209,7 +211,6 @@ def _set_consumer_offsets( have been started. """ - if self._starting_offsets is None: raise self.NotStartedError() @@ -221,7 +222,6 @@ def _set_consumer_offsets( def _get_consumer(self) -> AIOKafkaConsumer: """Get an AIOKafkaConsumer.""" - return AIOKafkaConsumer( self._topic, bootstrap_servers=",".join(self._kafka_servers), @@ -239,7 +239,6 @@ async def _count_events_since_start(self, *, consumer: AIOKafkaConsumer) -> int: since the starting offset. Thereby, sum over all partitions. This does not change the offset. The provided consumer instance must have been started. """ - if self._starting_offsets is None: raise self.NotStartedError() @@ -260,16 +259,16 @@ async def _count_events_since_start(self, *, consumer: AIOKafkaConsumer) -> int: @staticmethod async def _consume_event(*, consumer: AIOKafkaConsumer) -> ConsumerEvent: """Consume a single event from a consumer instance and return it. The provided - consumer instance must have been started.""" - + consumer instance must have been started. + """ return await consumer.__anext__() async def _get_events_since_start( self, *, consumer: AIOKafkaConsumer ) -> list[RecordedEvent]: """Consume events since the starting offset. The provided consumer instance must - have been started.""" - + have been started. + """ event_count = await self._count_events_since_start(consumer=consumer) # consume all the available events (but no more, as this would lead to infinite @@ -290,7 +289,6 @@ async def _get_events_since_start( async def start_recording(self) -> None: """Start looking for the expected events from now on.""" - if self._starting_offsets is not None: raise RuntimeError( "Recording has already been started. Cannot restart. Please define a" @@ -307,7 +305,6 @@ async def start_recording(self) -> None: async def stop_recording(self) -> None: """Stop recording and collect the recorded events""" - if self._starting_offsets is None: raise self.NotStartedError() @@ -323,14 +320,13 @@ async def stop_recording(self) -> None: async def __aenter__(self) -> "EventRecorder": """Start recording when entering the context block.""" - await self.start_recording() return self async def __aexit__(self, error_type, error_val, error_tb): """Stop recording and check the recorded events agains the expectation when - exiting the context block.""" - + exiting the context block. + """ await self.stop_recording() @@ -345,7 +341,6 @@ def __init__( publisher: KafkaEventPublisher, ): """Initialize with connection details and a ready-to-use publisher.""" - self.config = config self.kafka_servers = kafka_servers self.publisher = publisher @@ -354,7 +349,6 @@ async def publish_event( self, *, payload: JsonObject, type_: Ascii, topic: Ascii, key: Ascii = "test" ) -> None: """A convenience method to publish a test event.""" - await self.publisher.publish(payload=payload, type_=type_, key=key, topic=topic) def record_events(self, *, in_topic: Ascii) -> EventRecorder: @@ -362,7 +356,6 @@ def record_events(self, *, in_topic: Ascii) -> EventRecorder: record events in the specified topic upon __aenter__ and stops the recording upon __aexit__. """ - return EventRecorder(kafka_servers=self.kafka_servers, topic=in_topic) def delete_topics(self, topics: Optional[Union[str, list[str]]] = None): @@ -370,7 +363,6 @@ def delete_topics(self, topics: Optional[Union[str, list[str]]] = None): Delete given topic(s) from Kafka broker. When no topics are specified, all existing topics will be deleted. """ - admin_client = KafkaAdminClient(bootstrap_servers=self.kafka_servers) all_topics = admin_client.list_topics() if topics is None: @@ -398,7 +390,6 @@ async def expect_events( (on __aenter__) and check that they match the specified sequence of expected events (on __aexit__). """ - async with self.record_events(in_topic=in_topic) as event_recorder: yield event_recorder @@ -412,7 +403,6 @@ async def kafka_fixture_function() -> AsyncGenerator[KafkaFixture, None]: **Do not call directly** Instead, use get_kafka_fixture() """ - with KafkaContainer(image="confluentinc/cp-kafka:5.4.9-1-deb8") as kafka: kafka_servers = [kafka.get_bootstrap_server()] config = KafkaConfig( diff --git a/src/hexkit/providers/mongodb/__init__.py b/src/hexkit/providers/mongodb/__init__.py index 624d1b21..6fe715c0 100644 --- a/src/hexkit/providers/mongodb/__init__.py +++ b/src/hexkit/providers/mongodb/__init__.py @@ -18,6 +18,6 @@ and associated utilities. """ -from .provider import MongoDbConfig, MongoDbDaoFactory # noqa: F401 +from .provider import MongoDbConfig, MongoDbDaoFactory __all__ = ["MongoDbConfig", "MongoDbDaoFactory"] diff --git a/src/hexkit/providers/mongodb/provider.py b/src/hexkit/providers/mongodb/provider.py index 2d2c652e..e9cf4085 100644 --- a/src/hexkit/providers/mongodb/provider.py +++ b/src/hexkit/providers/mongodb/provider.py @@ -19,6 +19,8 @@ Utilities for testing are located in `./testutils.py`. """ +# ruff: noqa: PLR0913 + import json from abc import ABC from collections.abc import AsyncGenerator, AsyncIterator, Collection, Mapping @@ -59,8 +61,8 @@ def __init__( *, dto_model: type[Dto], id_field: str, - collection: AsyncIOMotorCollection, - session: Optional[AsyncIOMotorClientSession] = None, + collection: AsyncIOMotorCollection, # type: ignore + session: Optional[AsyncIOMotorClientSession] = None, # type: ignore ): """Initialize the DAO. @@ -78,7 +80,6 @@ def __init__( is provided, every database operation is immediately commited. Defaults to None. """ - self._collection = collection self._session = session self._dto_model = dto_model @@ -86,15 +87,15 @@ def __init__( def _document_to_dto(self, document: dict[str, Any]) -> Dto: """Converts a document obtained from the MongoDB database into a DTO model- - compliant representation.""" - + compliant representation. + """ document[self._id_field] = document.pop("_id") return self._dto_model(**document) def _dto_to_document(self, dto: Dto) -> dict[str, Any]: """Converts a DTO into a representation that is compatible documents for a - MongoDB Database.""" - + MongoDB Database. + """ document = json.loads(dto.json()) document["_id"] = document.pop(self._id_field) @@ -112,7 +113,6 @@ async def get_by_id(self, id_: str) -> Dto: Raises: ResourceNotFoundError: when resource with the specified id_ was not found """ - document = await self._collection.find_one({"_id": id_}, session=self._session) if document is None: @@ -132,7 +132,6 @@ async def update(self, dto: Dto) -> None: ResourceNotFoundError: when resource with the id specified in the dto was not found """ - document = self._dto_to_document(dto) result = await self._collection.replace_one( {"_id": document["_id"]}, document, session=self._session @@ -153,7 +152,6 @@ async def delete(self, *, id_: str) -> None: Raises: ResourceNotFoundError: when resource with the specified id_ was not found """ - result = await self._collection.delete_one({"_id": id_}, session=self._session) if result.deleted_count == 0: @@ -195,7 +193,6 @@ async def find_one(self, *, mapping: Mapping[str, Any]) -> Dto: MultipleHitsFoundError: Raised when obtaining more than one hit. """ - hits = self.find_all(mapping=mapping) try: @@ -223,7 +220,6 @@ async def find_all(self, *, mapping: Mapping[str, Any]) -> AsyncIterator[Dto]: An AsyncIterator of hits. All hits are in the form of the respective DTO model. """ - self._validate_find_mapping(mapping) if self._id_field in mapping: @@ -291,7 +287,6 @@ def __init__( is provided, every database operation is immediately commited. Defaults to None. """ - super().__init__( dto_model=dto_model, id_field=id_field, @@ -313,7 +308,6 @@ async def insert(self, dto: DtoCreation_contra) -> Dto: Returns: Returns a copy of the newly inserted resource including its assigned ID. """ - # complete the provided data with an autogenerated ID: data = dto.dict() data[self._id_field] = await self._id_generator.__anext__() @@ -342,7 +336,6 @@ def with_transaction(cls) -> AbstractAsyncContextManager["DaoNaturalId[Dto]"]: transaction is performed in case of an exception. Otherwise, the changes to the database are committed and flushed. """ - raise NotImplementedError() async def insert(self, dto: Dto) -> None: @@ -357,7 +350,6 @@ async def insert(self, dto: Dto) -> None: ResourceAlreadyExistsError: when a resource with the ID specified in the dto does already exist. """ - document = self._dto_to_document(dto) await self._collection.insert_one(document, session=self._session) @@ -369,7 +361,6 @@ async def upsert(self, dto: Dto) -> None: Resource content as a pydantic-based data transfer object including the resource ID. """ - document = self._dto_to_document(dto) await self._collection.replace_one( {"_id": document["_id"]}, document, session=self._session, upsert=True @@ -379,7 +370,8 @@ async def upsert(self, dto: Dto) -> None: class MongoDbConfig(BaseSettings): """Configuration parameters for connecting to a MongoDB server. - Inherit your config class from this class if your application uses MongoDB.""" + Inherit your config class from this class if your application uses MongoDB. + """ db_connection_str: SecretStr = Field( ..., @@ -410,7 +402,6 @@ def __init__( Args: config: MongoDB-specific config parameters. """ - self._config = config # get a database-specific client: @@ -419,7 +410,7 @@ def __init__( ) self._db = self._client[self._config.db_name] - def __repr__(self) -> str: + def __repr__(self) -> str: # noqa: D105 return f"{self.__class__.__qualname__}(config={repr(self._config)})" @overload @@ -468,7 +459,6 @@ async def _get_dao( from the database server. Thus for compliance with the DaoFactoryProtocol, this method is async. """ - if fields_to_index is not None: raise NotImplementedError( "Indexing on non-ID fields has not been implemented, yet." diff --git a/src/hexkit/providers/mongodb/testutils.py b/src/hexkit/providers/mongodb/testutils.py index d5c60742..1854339e 100644 --- a/src/hexkit/providers/mongodb/testutils.py +++ b/src/hexkit/providers/mongodb/testutils.py @@ -18,8 +18,9 @@ Please note, only use for testing purposes. """ +from collections.abc import Generator from dataclasses import dataclass -from typing import Generator, Optional, Union +from typing import Optional, Union import pytest from pymongo import MongoClient @@ -66,7 +67,6 @@ def empty_collections( def config_from_mongodb_container(container: MongoDbContainer) -> MongoDbConfig: """Prepares a MongoDbConfig from an instance of a MongoDbContainer container.""" - db_connection_str = container.get_connection_url() return MongoDbConfig(db_connection_str=db_connection_str, db_name="test") @@ -76,7 +76,6 @@ def mongodb_fixture_function() -> Generator[MongoDbFixture, None, None]: **Do not call directly** Instead, use get_mongodb_fixture() """ - with MongoDbContainer(image="mongo:6.0.3") as mongodb: config = config_from_mongodb_container(mongodb) dao_factory = MongoDbDaoFactory(config=config) diff --git a/src/hexkit/providers/s3/__init__.py b/src/hexkit/providers/s3/__init__.py index 137050ab..a01dc2e3 100644 --- a/src/hexkit/providers/s3/__init__.py +++ b/src/hexkit/providers/s3/__init__.py @@ -19,6 +19,6 @@ """ # shortcuts: -from hexkit.providers.s3.provider import S3Config, S3ObjectStorage # noqa: F401 +from hexkit.providers.s3.provider import S3Config, S3ObjectStorage __all__ = ["S3Config", "S3ObjectStorage"] diff --git a/src/hexkit/providers/s3/provider.py b/src/hexkit/providers/s3/provider.py index 5b99ab93..07137770 100644 --- a/src/hexkit/providers/s3/provider.py +++ b/src/hexkit/providers/s3/provider.py @@ -19,6 +19,8 @@ Utilities for testing are located in `./testutils.py`. """ +# ruff: noqa: PLR0913 + import asyncio from functools import lru_cache from pathlib import Path @@ -107,7 +109,6 @@ def read_aws_config_ini(aws_config_ini: Path) -> botocore.config.Config: https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html#using-a-configuration-file) and returns an botocore.config.Config object. """ - config_profile = botocore.configloader.load_config(config_filename=aws_config_ini) return botocore.config.Config(**config_profile) @@ -115,9 +116,7 @@ def read_aws_config_ini(aws_config_ini: Path) -> botocore.config.Config: class S3ObjectStorage( ObjectStorageProtocol ): # pylint: disable=too-many-instance-attributes - """ - S3-based provider implementing the ObjectStorageProtocol. - """ + """S3-based provider implementing the ObjectStorageProtocol.""" def __init__( # pylint: disable=too-many-arguments self, @@ -133,7 +132,6 @@ def __init__( # pylint: disable=too-many-arguments Args: config (S3Config): Config parameters specified using the S3Config model. """ - self._config = config self.endpoint_url = config.s3_endpoint_url @@ -173,11 +171,10 @@ def __repr__(self) -> str: @staticmethod def _format_s3_error_code(error_code: str): """Format a message to describe an S3 error code.""" - return f"S3 error with code: '{error_code}'" @classmethod - def _translate_s3_client_errors( # noqa: C901 + def _translate_s3_client_errors( cls, source_exception: botocore.exceptions.ClientError, *, @@ -189,7 +186,6 @@ def _translate_s3_client_errors( # noqa: C901 Translates S3 client errors based on their error codes into exceptions from the ObjectStorageProtocol modules """ - error_code = source_exception.response["Error"]["Code"] exception: Exception @@ -208,27 +204,22 @@ def _translate_s3_client_errors( # noqa: C901 exception = cls.ObjectAlreadyExistsError( bucket_id=bucket_id, object_id=object_id ) - elif error_code == "ObjectAlreadyInActiveTierError": - exception = cls.ObjectAlreadyExistsError( - bucket_id=bucket_id, object_id=object_id - ) elif error_code == "NoSuchUpload": if upload_id is None or bucket_id is None or object_id is None: raise ValueError() exception = cls.MultiPartUploadNotFoundError( upload_id=upload_id, bucket_id=bucket_id, object_id=object_id ) + # exact match not found, match by keyword: + elif "Bucket" in error_code: + exception = cls.BucketError(cls._format_s3_error_code(error_code)) + elif "Object" in error_code or "Key" in error_code: + exception = cls.ObjectError(cls._format_s3_error_code(error_code)) else: - # exact match not found, match by keyword: - if "Bucket" in error_code: - exception = cls.BucketError(cls._format_s3_error_code(error_code)) - elif "Object" in error_code or "Key" in error_code: - exception = cls.ObjectError(cls._format_s3_error_code(error_code)) - else: - # if nothing matches, return a generic error: - exception = cls.ObjectStorageProtocolError( - cls._format_s3_error_code(error_code) - ) + # if nothing matches, return a generic error: + exception = cls.ObjectStorageProtocolError( + cls._format_s3_error_code(error_code) + ) return exception @@ -236,7 +227,6 @@ async def _does_bucket_exist(self, bucket_id: str) -> bool: """Check whether a bucket with the specified ID (`bucket_id`) exists. Return `True` if it exists and `False` otherwise. """ - try: bucket_list = await asyncio.to_thread(self._client.list_buckets) except botocore.exceptions.ClientError as error: @@ -250,7 +240,6 @@ async def _assert_bucket_exists(self, bucket_id: str) -> None: """Checks if the bucket with specified ID (`bucket_id`) exists and throws an BucketNotFoundError otherwise. """ - if not await self.does_bucket_exist(bucket_id): raise self.BucketNotFoundError(bucket_id=bucket_id) @@ -258,7 +247,6 @@ async def _assert_bucket_not_exists(self, bucket_id: str) -> None: """Checks if the bucket with specified ID (`bucket_id`) exists. If so, it throws an BucketAlreadyExistsError. """ - if await self.does_bucket_exist(bucket_id): raise self.BucketAlreadyExistsError(bucket_id=bucket_id) @@ -267,7 +255,6 @@ async def _create_bucket(self, bucket_id: str) -> None: Create a bucket (= a structure that can hold multiple file objects) with the specified unique ID. """ - await self._assert_bucket_not_exists(bucket_id) try: @@ -286,7 +273,6 @@ async def _delete_bucket( will be deleted, if False (the default) a BucketNotEmptyError will be raised if the bucket is not empty. """ - await self._assert_bucket_exists(bucket_id) try: @@ -301,9 +287,7 @@ async def _delete_bucket( ) from error async def _list_all_object_ids(self, *, bucket_id: str) -> list[str]: - """ - Retrieve a list of IDs for all objects currently present in the specified bucket - """ + """Retrieve a list of IDs for all objects currently present in the specified bucket""" await self._assert_bucket_exists(bucket_id) try: @@ -323,7 +307,6 @@ async def _does_object_exist( may be provided to check the objects content. Return `True` if checks succeed and `False` otherwise. """ - if object_md5sum is not None: raise NotImplementedError("Md5 checking is not yet implemented.") @@ -343,7 +326,6 @@ async def _assert_object_exists(self, *, bucket_id: str, object_id: str) -> None the specified ID (`bucket_id`) and throws an ObjectNotFoundError otherwise. If the bucket does not exist it throws a BucketNotFoundError. """ - # first check if bucket exists: await self._assert_bucket_exists(bucket_id) @@ -357,7 +339,6 @@ async def _assert_object_not_exists( the specified ID (`bucket_id`). If so, it throws an ObjectAlreadyExistsError. If the bucket does not exist it throws a BucketNotFoundError. """ - # first check if bucket exists: await self._assert_bucket_exists(bucket_id) @@ -379,7 +360,6 @@ async def _get_object_upload_url( You may also specify a custom expiry duration in seconds (`expires_after`) and a maximum size (bytes) for uploads (`max_upload_size`). """ - await self._assert_object_not_exists(bucket_id=bucket_id, object_id=object_id) conditions = ( @@ -416,7 +396,6 @@ async def _list_multipart_upload_for_object( (S3 allows multiple ongoing multi-part uploads.) """ - try: uploads_info = await asyncio.to_thread( self._client.list_multipart_uploads, @@ -434,7 +413,6 @@ async def _list_multipart_upload_for_object( async def _assert_no_multipart_upload(self, *, bucket_id: str, object_id: str): """Ensure that there are no active multi-part uploads for the given object.""" - upload_ids = await self._list_multipart_upload_for_object( bucket_id=bucket_id, object_id=object_id ) @@ -457,7 +435,6 @@ async def _assert_multipart_upload_exists( By default, also verifies that this upload is the only upload active for that file. Otherwise, raises MultipleActiveUploadsError. """ - upload_ids = await self._list_multipart_upload_for_object( bucket_id=bucket_id, object_id=object_id ) @@ -477,7 +454,6 @@ async def _assert_multipart_upload_exists( async def _init_multipart_upload(self, *, bucket_id: str, object_id: str) -> str: """Initiates a mulipart upload procedure. Returns the upload ID.""" - await self._assert_no_multipart_upload(bucket_id=bucket_id, object_id=object_id) try: @@ -507,7 +483,6 @@ async def _get_part_upload_url( Please note: the part number must be a non-zero, positive integer and parts should be uploaded in sequence. """ - if not 0 < part_number <= self.MAX_FILE_PART_NUMBER: raise ValueError( "The part number must be a non-zero positive integer" @@ -546,7 +521,6 @@ async def _get_parts_info( object_id: str, ) -> dict: """Get information on parts uploaded as part of the specified multi-part upload.""" - await self._assert_multipart_upload_exists( upload_id=upload_id, bucket_id=bucket_id, object_id=object_id ) @@ -585,7 +559,6 @@ async def _check_uploaded_parts( anticipated_part_size: Optional[int] = None, ) -> None: """Check size and quantity of parts""" - # check the part quantity: parts = parts_info.get("Parts") if parts is None or len(parts) == 0: @@ -666,7 +639,6 @@ async def _abort_multipart_upload( """Abort a multipart upload with the specified ID. All uploaded content is deleted. """ - await self._assert_multipart_upload_exists( upload_id=upload_id, bucket_id=bucket_id, @@ -729,7 +701,6 @@ async def _complete_multipart_upload( This ensures that exactly the specified number of parts exist and that all parts (except the last one) have the specified size. """ - parts_info = await self._get_parts_info( upload_id=upload_id, bucket_id=bucket_id, @@ -775,7 +746,6 @@ async def _get_object_download_url( the specified ID (`object_id`) from bucket with the specified id (`bucket_id`). You may also specify a custom expiry duration in seconds (`expires_after`). """ - await self._assert_object_exists(bucket_id=bucket_id, object_id=object_id) try: @@ -793,10 +763,7 @@ async def _get_object_download_url( return presigned_url async def _get_object_size(self, *, bucket_id: str, object_id: str) -> int: - """ - Returns the size of an object in bytes. - """ - + """Returns the size of an object in bytes.""" await self._assert_object_exists(bucket_id=bucket_id, object_id=object_id) object_metadata = await self._get_object_metadata( @@ -814,9 +781,7 @@ async def _get_object_size(self, *, bucket_id: str, object_id: str) -> int: async def _get_object_metadata( self, *, bucket_id: str, object_id: str ) -> dict[str, Any]: - """ - Returns object metadata without downloading the actual object. - """ + """Returns object metadata without downloading the actual object.""" try: metadata = await asyncio.to_thread( self._client.head_object, @@ -871,7 +836,6 @@ async def _delete_object(self, *, bucket_id: str, object_id: str) -> None: """Delete an object with the specified id (`object_id`) in the bucket with the specified id (`bucket_id`). """ - await self._assert_object_exists(bucket_id=bucket_id, object_id=object_id) try: diff --git a/src/hexkit/providers/s3/testutils.py b/src/hexkit/providers/s3/testutils.py index ff5f3b26..a31dd1bc 100644 --- a/src/hexkit/providers/s3/testutils.py +++ b/src/hexkit/providers/s3/testutils.py @@ -18,16 +18,20 @@ Please note, only use for testing purposes. """ + +# ruff: noqa: PLR0913 + import hashlib import os +from collections.abc import Generator from contextlib import contextmanager from pathlib import Path from tempfile import NamedTemporaryFile -from typing import Generator, List, Optional +from typing import Optional import pytest import requests -from pydantic import BaseModel, validator +from pydantic import BaseModel, SecretStr, validator from testcontainers.localstack import LocalStackContainer from hexkit.custom_types import PytestScope @@ -47,11 +51,8 @@ def calc_md5(content: bytes) -> str: - """ - Calc the md5 checksum for the specified bytes. - """ - - return hashlib.md5(content).hexdigest() # nosec + """Calc the md5 checksum for the specified bytes.""" + return hashlib.md5(content, usedforsecurity=False).hexdigest() # nosec class FileObject(BaseModel): @@ -65,17 +66,15 @@ class FileObject(BaseModel): # pylint: disable=no-self-argument @validator("content", always=True) - def read_content(cls, _, values): + def read_content(self, _, values): """Read in the file content.""" - with open(values["file_path"], "rb") as file: return file.read() # pylint: disable=no-self-argument @validator("md5", always=True) - def calc_md5_from_content(cls, _, values): + def calc_md5_from_content(self, _, values): """Calculate md5 based on the content.""" - return calc_md5(values["content"]) @@ -106,7 +105,6 @@ async def empty_buckets(self, buckets_to_exclude: Optional[list[str]] = None): async def populate_buckets(self, buckets: list[str]): """Populate the storage with buckets.""" - await populate_storage( self.storage, bucket_fixtures=buckets, object_fixtures=[] ) @@ -115,7 +113,6 @@ async def populate_buckets(self, buckets: list[str]): async def populate_file_objects(self, file_objects: list[FileObject]): """Populate the storage with file objects.""" - await populate_storage( self.storage, bucket_fixtures=[], object_fixtures=file_objects ) @@ -126,7 +123,6 @@ def s3_fixture_function() -> Generator[S3Fixture, None, None]: **Do not call directly** Instead, use get_s3_fixture() """ - with LocalStackContainer(image="localstack/localstack:0.14.5").with_services( "s3" ) as localstack: @@ -151,7 +147,6 @@ def temp_file_object( size: int = 5 * MEBIBYTE, ) -> Generator[FileObject, None, None]: """Generates a file object with the specified size in bytes.""" - chunk_size = 1024 chunk = b"\0" * chunk_size current_size = 0 @@ -172,14 +167,12 @@ def temp_file_object( @pytest.fixture def file_fixture(): """A fixture that provides a temporary file.""" - with temp_file_object() as temp_file: yield temp_file def upload_file(presigned_url: PresignedPostURL, file_path: Path, file_md5: str): """Uploads the test file to the specified URL""" - with open(file_path, "rb") as test_file: files = {"file": (str(file_path), test_file)} headers = {"ContentMD5": file_md5} @@ -195,8 +188,8 @@ def upload_file(presigned_url: PresignedPostURL, file_path: Path, file_md5: str) def check_part_size(file_path: Path, anticipated_size: int) -> None: """Check if the anticipated part size can be used to upload the specified file - using the maximum number of file parts. Raises an exception otherwise.""" - + using the maximum number of file parts. Raises an exception otherwise. + """ file_size = os.path.getsize(file_path) if file_size / anticipated_size > ObjectStorageProtocol.MAX_FILE_PART_NUMBER: raise RuntimeError( @@ -217,7 +210,6 @@ async def upload_part( part_number: int = 1, ): """Upload the specified content as part to an initialized multipart upload.""" - upload_url = await storage_dao.get_part_upload_url( upload_id=upload_id, bucket_id=bucket_id, @@ -241,7 +233,6 @@ async def upload_part_of_size( Generate a bytes object of the specified size and uploads the part to an initialized multipart upload. """ - content = b"\0" * size await upload_part( storage_dao=storage_dao, @@ -255,7 +246,6 @@ async def upload_part_of_size( def upload_part_via_url(*, url: str, size: int): """Upload a file part of given size using the given URL.""" - content = b"\0" * size response = requests.put(url, data=content, timeout=TIMEOUT) response.raise_for_status() @@ -269,7 +259,6 @@ async def multipart_upload_file( part_size: int = ObjectStorageProtocol.DEFAULT_PART_SIZE, ) -> None: """Uploads the test file to the specified URL""" - check_part_size(file_path=file_path, anticipated_size=part_size) print(f" - initiate multipart upload for test object {object_id}") @@ -306,24 +295,22 @@ async def multipart_upload_file( def download_and_check_test_file(presigned_url: str, expected_md5: str): """Download the test file from the specified URL and check its integrity (md5).""" - response = requests.get(presigned_url, timeout=TIMEOUT) response.raise_for_status() observed_md5 = calc_md5(response.content) - assert ( # nosec + assert ( # noqa: S101 observed_md5 == expected_md5 ), "downloaded file has unexpected md5 checksum" async def populate_storage( storage: ObjectStorageProtocol, - bucket_fixtures: List[str], - object_fixtures: List[FileObject], + bucket_fixtures: list[str], + object_fixtures: list[FileObject], ): """Populate Storage with object and bucket fixtures""" - for bucket_fixture in bucket_fixtures: await storage.create_bucket(bucket_fixture) @@ -344,18 +331,16 @@ async def populate_storage( def config_from_localstack_container(container: LocalStackContainer) -> S3Config: """Prepares a S3Config from an instance of a localstack test container.""" - s3_endpoint_url = container.get_url() return S3Config( # nosec s3_endpoint_url=s3_endpoint_url, s3_access_key_id="test", - s3_secret_access_key="test", + s3_secret_access_key=SecretStr("test"), ) async def get_initialized_upload(s3_fixture_: S3Fixture): """Initialize a new empty multipart upload.""" - bucket_id = "mybucketwithupload001" object_id = "myobjecttobeuploaded001" @@ -370,7 +355,6 @@ async def get_initialized_upload(s3_fixture_: S3Fixture): async def prepare_non_completed_upload(s3_fixture_: S3Fixture): """Prepare an upload that has not been marked as completed, yet.""" - upload_id, bucket_id, object_id = await get_initialized_upload(s3_fixture_) with temp_file_object() as file: @@ -399,17 +383,14 @@ async def typical_workflow( use_multipart_upload: bool = True, part_size: int = ObjectStorageProtocol.DEFAULT_PART_SIZE, ): - """ - Run a typical workflow of basic object operations using a S3 service. - """ - + """Run a typical workflow of basic object operations using a S3 service.""" print("Run a workflow for testing basic object operations using a S3 service:") print(f" - create new bucket {bucket1_id}") await storage_client.create_bucket(bucket1_id) print(" - confirm bucket creation") - assert await storage_client.does_bucket_exist(bucket1_id) # nosec + assert await storage_client.does_bucket_exist(bucket1_id) # noqa: S101 if use_multipart_upload: await multipart_upload_file( @@ -429,7 +410,7 @@ async def typical_workflow( ) print(" - confirm object upload") - assert await storage_client.does_object_exist( # nosec + assert await storage_client.does_object_exist( # noqa: S101 bucket_id=bucket1_id, object_id=object_id ) @@ -452,10 +433,10 @@ async def typical_workflow( await storage_client.delete_object(bucket_id=bucket1_id, object_id=object_id) print(" - confirm move") - assert not await storage_client.does_object_exist( # nosec + assert not await storage_client.does_object_exist( # noqa: S101 bucket_id=bucket1_id, object_id=object_id ) - assert await storage_client.does_object_exist( # nosec + assert await storage_client.does_object_exist( # noqa: S101 bucket_id=bucket2_id, object_id=object_id ) @@ -463,7 +444,7 @@ async def typical_workflow( await storage_client.delete_bucket(bucket1_id) print(" - confirm bucket deletion") - assert not await storage_client.does_bucket_exist(bucket1_id) # nosec + assert not await storage_client.does_bucket_exist(bucket1_id) # noqa: S101 print(f" - download object from bucket {bucket2_id}") download_url2 = await storage_client.get_object_download_url( diff --git a/src/hexkit/providers/testing/eventpub.py b/src/hexkit/providers/testing/eventpub.py index cc37efdd..4d505956 100644 --- a/src/hexkit/providers/testing/eventpub.py +++ b/src/hexkit/providers/testing/eventpub.py @@ -53,12 +53,10 @@ def __init__(self): def post(self, topic: str, event: Event) -> None: """Queue a new event to a topic.""" - self.topics[topic].append(event) def get(self, topic) -> Event: """Get the next element in the queue corresponding to the specified topic.""" - try: return self.topics[topic].popleft() except IndexError as error: @@ -73,9 +71,7 @@ class InMemEventPublisher(EventPublisherProtocol): """ def __init__(self, event_store: Optional[InMemEventStore] = None): - """ - Initialize with existing event_store or let it create a new one. - """ + """Initialize with existing event_store or let it create a new one.""" self.event_store = event_store if event_store else InMemEventStore() async def _publish_validated( diff --git a/src/hexkit/utils.py b/src/hexkit/utils.py index a1f5e099..75d3385c 100644 --- a/src/hexkit/utils.py +++ b/src/hexkit/utils.py @@ -62,7 +62,6 @@ def calc_part_size(*, file_size: int, preferred_part_size: Optional[int] = None) Raises: ValueError if file size exceeds the maximum of 5 TiB """ - if preferred_part_size is None: preferred_part_size = 8 * 1024**2 @@ -91,7 +90,8 @@ def calc_part_size(*, file_size: int, preferred_part_size: Optional[int] = None) def check_ascii(*str_values: str): """Checks that the provided `str_values` are ASCII compatible, - raises an exception otherwise.""" + raises an exception otherwise. + """ for str_value in str_values: if not str_value.isascii(): raise NonAsciiStrError(str_value=str_value) @@ -103,8 +103,8 @@ def validate_fields_in_model( fields: Collection[str], ) -> None: """Checks that all provided fields are present in the dto_model. - Raises IndexFieldsInvalidError otherwise.""" - + Raises IndexFieldsInvalidError otherwise. + """ fields_set = set(fields) existing_fields_set = set(model.schema()["properties"]) diff --git a/tests/fixtures/config.py b/tests/fixtures/config.py index 8ffb2e6c..3f6f237d 100644 --- a/tests/fixtures/config.py +++ b/tests/fixtures/config.py @@ -20,7 +20,7 @@ import re from dataclasses import dataclass from pathlib import Path -from typing import Any, Dict +from typing import Any from pydantic import BaseModel, BaseSettings @@ -37,7 +37,7 @@ class ConfigYamlFixture(BaseModel): """Container for config yaml fixtures""" path: Path - content: Dict[str, Any] + content: dict[str, Any] def read_config_yaml(name: str): @@ -61,14 +61,16 @@ class EnvVarFixture: """Container for env var set. This class can be used as context manager so that the env vars are available within a with block but, after leaving the with block, - the original enviroment is restored.""" + the original enviroment is restored. + """ - env_vars: Dict[str, str] + env_vars: dict[str, str] prefix: str = DEFAULT_CONFIG_PREFIX def __enter__(self): - """makes a backup of the environment and set the - env_vars""" + """Makes a backup of the environment and set the + env_vars + """ # pylint: disable=attribute-defined-outside-init self.env_backup = copy.deepcopy(os.environ) @@ -76,11 +78,12 @@ def __enter__(self): os.environ[f"{self.prefix}_{name}"] = value def __exit__(self, exc_type, exc_val, exc_tb): - """restores the original environment""" - os.environ = self.env_backup + """Restores the original environment""" + os.environ.clear() + os.environ = self.env_backup # noqa: B003 -def read_env_var_sets() -> Dict[str, EnvVarFixture]: +def read_env_var_sets() -> dict[str, EnvVarFixture]: """Read env vars sets and return a list of EnvVarFixtures.""" env_var_dict = utils.read_yaml(BASE_DIR / "config_env_var_sets.yaml") diff --git a/tests/fixtures/dummy_joint.py b/tests/fixtures/dummy_joint.py index 0a97b183..b3186da1 100644 --- a/tests/fixtures/dummy_joint.py +++ b/tests/fixtures/dummy_joint.py @@ -15,8 +15,8 @@ # """Simple joint fixture for testing the event loop fixture override's impact""" +from collections.abc import AsyncGenerator from dataclasses import dataclass -from typing import AsyncGenerator import pytest_asyncio @@ -35,4 +35,5 @@ class JointFixture: @pytest_asyncio.fixture(scope="module") async def joint_fixture(s3_fixture) -> AsyncGenerator[JointFixture, None]: + """Simple joint fixture only wrapping S3 fixture""" yield JointFixture(s3_fixture=s3_fixture) diff --git a/tests/fixtures/inject.py b/tests/fixtures/inject.py index 94482eed..25df86bf 100644 --- a/tests/fixtures/inject.py +++ b/tests/fixtures/inject.py @@ -24,9 +24,7 @@ class ValidResource(dependency_injector.resources.AsyncResource): - """ - An example of an AsyncResource as defined by the `dependency_injector` framework. - """ + """An example of an AsyncResource as defined by the `dependency_injector` framework.""" class Resource: """Returned upon executing the `init` method.""" @@ -41,13 +39,12 @@ async def init(self, foo: str = "foo") -> Resource: return self.Resource(foo=foo) async def shutdown(self, resource: Resource) -> None: # type: ignore + """Cleanup async resource""" resource.in_context = False class ValidSyncResource(dependency_injector.resources.Resource): - """ - An example of an ordinary Resource as defined by the `dependency_injector` framework. - """ + """An example of an ordinary Resource as defined by the `dependency_injector` framework.""" class Resource: """Returned upon executing the `init` method.""" @@ -62,6 +59,7 @@ def init(self, foo: str = "foo") -> Resource: return self.Resource(foo=foo) def shutdown(self, resource: Resource) -> None: # type: ignore + """Cleanup async resource""" resource.in_context = False diff --git a/tests/fixtures/utils.py b/tests/fixtures/utils.py index 2fc9b802..3e84e3f5 100644 --- a/tests/fixtures/utils.py +++ b/tests/fixtures/utils.py @@ -31,5 +31,5 @@ def read_yaml(path: Path) -> dict: """Read yaml file and return content as dict.""" - with open(path, "r", encoding="UTF-8") as file: + with open(path, encoding="UTF-8") as file: return yaml.safe_load(file) diff --git a/tests/integration/test_akafka_testutils.py b/tests/integration/test_akafka_testutils.py index c7daaf16..ec0e94fa 100644 --- a/tests/integration/test_akafka_testutils.py +++ b/tests/integration/test_akafka_testutils.py @@ -15,18 +15,18 @@ # """Testing of the Kafka testutils.""" -from typing import Sequence +from collections.abc import Sequence import pytest from kafka import KafkaAdminClient from hexkit.providers.akafka import KafkaConfig, KafkaEventPublisher -from hexkit.providers.akafka.testutils import kafka_fixture # noqa: F401 from hexkit.providers.akafka.testutils import ( ExpectedEvent, KafkaFixture, RecordedEvent, ValidationError, + kafka_fixture, # noqa: F401 ) @@ -83,7 +83,6 @@ async def test_event_recorder( kafka_fixture: KafkaFixture, # noqa: F811 ): """Test event recording using the EventRecorder class.""" - topic = "test_topic" config = KafkaConfig( @@ -112,7 +111,6 @@ async def test_expect_events_happy( """Test successful validation of recorded events with the expect_events method of the KafkaFixture. """ - expected_events = [ ExpectedEvent( payload={"test_content": "Hello"}, type_="test_hello", key="test_key" @@ -219,7 +217,6 @@ async def test_expect_events_mismatch( the methods `start_recording` and `stop_and_check` so that we can nicely locate where the ValidationError is thrown. """ - expected_events = [ ExpectedEvent( payload={"test_content": "Hello"}, type_="test_hello", key="test_key" diff --git a/tests/integration/test_inject.py b/tests/integration/test_inject.py index cd7746c2..d9dd19b0 100644 --- a/tests/integration/test_inject.py +++ b/tests/integration/test_inject.py @@ -44,7 +44,6 @@ async def test_context_constructor_with_decl_container(): Test the context constructor together with the `DeclarativeContainer` from the `dependency_injector` framework. """ - foo = "bar" class Container(dependency_injector.containers.DeclarativeContainer): @@ -75,10 +74,7 @@ class Container(dependency_injector.containers.DeclarativeContainer): ], ) async def test_container_base(provides, constructor, has_context: bool): - """ - Test the ContainerBase and its contextual setup and teardown functionality. - """ - + """Test the ContainerBase and its contextual setup and teardown functionality.""" foo = "bar" class Container(ContainerBase): @@ -99,7 +95,8 @@ class Container(ContainerBase): @pytest.mark.asyncio async def test_container_base_sync_resouce(): """Make sure that using a non async Resource with the ContainerBase results in an - exception.""" + exception. + """ class Container(ContainerBase): test = dependency_injector.providers.Resource(ValidSyncResource, "bar") @@ -138,24 +135,26 @@ class ExampleConfig(BaseSettings): class SyncConfigConsumer: """A class that consumes an entire ExampleConfig instance (and not just - individual parameters).""" + individual parameters). + """ def __init__(self, *, config: ExampleConfig): """Takes an ExampleConfig instance and checks their values against the - expectation.""" - + expectation. + """ self.config = config class AsyncConfigConsumer(SyncConfigConsumer): """A class that consumes an entire ExampleConfig instance (and not just - individual parameters). Is constucted using an async context manager.""" + individual parameters). Is constucted using an async context manager. + """ @classmethod @asynccontextmanager async def construct(cls, *, config: ExampleConfig): """A constructor with setup and teardown logic. - Just there so that we can use the container as an async context manager.""" - + Just there so that we can use the container as an async context manager. + """ yield cls(config=config) class Container(ContainerBase): diff --git a/tests/integration/test_mongodb.py b/tests/integration/test_mongodb.py index d5e51bf7..fc4ca583 100644 --- a/tests/integration/test_mongodb.py +++ b/tests/integration/test_mongodb.py @@ -44,6 +44,8 @@ class ExampleCreationDto(BaseModel): field_c: bool class Config: + """Additional config options for model""" + frozen = True @@ -56,7 +58,6 @@ class ExampleDto(ExampleCreationDto): @pytest.mark.asyncio async def test_dao_find_all_with_id(mongodb_fixture: MongoDbFixture): # noqa: F811 """Test using the id field as part of the mapping in find_all()""" - dao = await mongodb_fixture.dao_factory.get_dao( name="example", dto_model=ExampleDto, @@ -110,7 +111,6 @@ async def test_dao_find_all_without_collection( mongodb_fixture: MongoDbFixture, # noqa: F811 ): """Test calling find_all() when there is no collection.""" - dao = await mongodb_fixture.dao_factory.get_dao( name="does-not-exist-at-all", dto_model=ExampleDto, @@ -140,8 +140,8 @@ async def test_empty_collections(mongodb_fixture: MongoDbFixture): # noqa: F811 @pytest.mark.asyncio async def test_dao_happy(mongodb_fixture: MongoDbFixture): # noqa: F811 """Test the happy path of performing basic CRUD database interactions using - the MongoDbDaoFactory in a surrograte ID setting.""" - + the MongoDbDaoFactory in a surrograte ID setting. + """ dao = await mongodb_fixture.dao_factory.get_dao( name="example", dto_model=ExampleDto, @@ -205,7 +205,6 @@ async def test_dao_insert_natural_id_happy( mongodb_fixture: MongoDbFixture, # noqa: F811 ): """Tests the happy path of inserting a new resource in a natural ID setting.""" - dao = await mongodb_fixture.dao_factory.get_dao( name="example", dto_model=ExampleDto, @@ -225,8 +224,8 @@ async def test_dao_upsert_natural_id_happy( mongodb_fixture: MongoDbFixture, # noqa: F811 ): """Tests the happy path of upserting new and existing resources in a natural ID - setting.""" - + setting. + """ dao = await mongodb_fixture.dao_factory.get_dao( name="example", dto_model=ExampleDto, @@ -254,7 +253,6 @@ async def test_dao_get_not_found( mongodb_fixture: MongoDbFixture, # noqa: F811 ): """Tests getting a non existing resource via its ID.""" - dao = await mongodb_fixture.dao_factory.get_dao( name="example", dto_model=ExampleDto, @@ -270,7 +268,6 @@ async def test_dao_update_not_found( mongodb_fixture: MongoDbFixture, # noqa: F811 ): """Tests updating a non existing resource.""" - dao = await mongodb_fixture.dao_factory.get_dao( name="example", dto_model=ExampleDto, @@ -290,7 +287,6 @@ async def test_dao_delete_not_found( mongodb_fixture: MongoDbFixture, # noqa: F811 ): """Tests deleting a non existing resource via its ID.""" - dao = await mongodb_fixture.dao_factory.get_dao( name="example", dto_model=ExampleDto, @@ -306,7 +302,6 @@ async def test_dao_find_invalid_mapping( mongodb_fixture: MongoDbFixture, # noqa: F811 ): """Tests find_one and find_all methods with an invalid mapping.""" - dao = await mongodb_fixture.dao_factory.get_dao( name="example", dto_model=ExampleDto, @@ -326,7 +321,6 @@ async def test_dao_find_no_hits( mongodb_fixture: MongoDbFixture, # noqa: F811 ): """Tests find_one and find_all methods with a mapping that results in no hits.""" - dao = await mongodb_fixture.dao_factory.get_dao( name="example", dto_model=ExampleDto, @@ -346,7 +340,6 @@ async def test_dao_find_one_with_multiple_hits( mongodb_fixture: MongoDbFixture, # noqa: F811 ): """Tests find_one with a mapping that results in multiple hits.""" - dao = await mongodb_fixture.dao_factory.get_dao( name="example", dto_model=ExampleDto, @@ -375,8 +368,8 @@ async def prefixed_count_id_generator( prefix: str, count_offset: int = 1 ) -> AsyncGenerator[str, None]: """A generator that yields IDs by counting upwards und prefixing that counts - with a predefined string.""" - + with a predefined string. + """ for count in itertools.count(start=count_offset): yield f"{prefix}-{count}" diff --git a/tests/integration/test_s3.py b/tests/integration/test_s3.py index dc9a48d8..47ea64c9 100644 --- a/tests/integration/test_s3.py +++ b/tests/integration/test_s3.py @@ -13,11 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -""" -Test S3 storage DAO -""" +"""Test S3 storage DAO""" -from typing import ContextManager, Optional +from contextlib import AbstractContextManager +from typing import Optional import pytest from black import nullcontext @@ -48,7 +47,6 @@ async def test_empty_buckets( s3_fixture: S3Fixture, file_fixture: FileObject # noqa: F811 ): """Make sure the empty_buckets function works""" - # bucket should not exist in the beginning: bucket_id = file_fixture.bucket_id assert not await s3_fixture.storage.does_bucket_exist(bucket_id=bucket_id) @@ -78,10 +76,7 @@ async def test_typical_workflow( use_multipart_upload: bool, s3_fixture: S3Fixture, # noqa: F811 ): - """ - Tests all methods of the ObjectStorageS3 DAO implementation in one long workflow. - """ - + """Tests all methods of the ObjectStorageS3 DAO implementation in one long workflow.""" with temp_file_object(size=20 * MEBIBYTE) as file: await typical_workflow( storage_client=s3_fixture.storage, @@ -100,7 +95,6 @@ async def test_object_existence_checks( file_fixture: FileObject, # noqa: F811 ): """Test if the checks for existence of objects work correctly.""" - # object should not exist in the beginning: assert not await s3_fixture.storage.does_object_exist( bucket_id=file_fixture.bucket_id, object_id=file_fixture.object_id @@ -121,7 +115,6 @@ async def test_get_object_size( file_fixture: FileObject, # noqa: F811 ): """Test if the get_object_size method returns the correct size.""" - expected_size = len(file_fixture.content) await s3_fixture.populate_file_objects([file_fixture]) @@ -138,7 +131,6 @@ async def test_list_all_object_ids( file_fixture: FileObject, # noqa: F811 ): """Test if listing all object IDs for a bucket works correctly.""" - file_fixture2 = file_fixture.copy(deep=True) file_fixture2.object_id = "mydefaulttestobject002" @@ -155,7 +147,6 @@ async def test_list_all_object_ids( @pytest.mark.asyncio async def test_bucket_existence_checks(s3_fixture: S3Fixture): # noqa: F811 """Test if the checks for existence of buckets work correctly.""" - bucket_id = EXAMPLE_BUCKETS[0] # bucket should not exist in the beginning: @@ -176,7 +167,6 @@ async def test_object_and_bucket_collisions( Tests whether overwriting (re-creation, re-upload, or copy to existing object) fails with the expected error. """ - await s3_fixture.populate_file_objects([file_fixture]) with pytest.raises(ObjectStorageProtocol.BucketAlreadyExistsError): @@ -207,7 +197,6 @@ async def test_handling_non_existing_file_and_bucket( Tests whether interacting with a non-existing bucket/file object fails with the expected result. """ - non_existing_bucket_id = "mynonexistingbucket001" non_existing_object_id = "mynonexistingobject001" existing_bucket_id = file_fixture.bucket_id @@ -278,10 +267,9 @@ async def test_delete_non_empty_bucket( file_fixture: FileObject, # noqa: F811 ): """Test deleting an non-empty bucket.""" - await s3_fixture.populate_file_objects([file_fixture]) - with nullcontext() if delete_content else pytest.raises( # type: ignore + with nullcontext() if delete_content else pytest.raises( ObjectStorageProtocol.BucketNotEmptyError ): await s3_fixture.storage.delete_bucket( @@ -309,7 +297,6 @@ async def test_using_non_existing_upload( Makes sure that using a non existing upload_id-bucket_id-object_id combination throws the right error. """ - # prepare a non-completed upload: real_upload_id, real_bucket_id, real_object_id = await prepare_non_completed_upload( s3_fixture @@ -319,8 +306,8 @@ async def test_using_non_existing_upload( bucket_id = real_bucket_id if bucket_id_correct else "wrong-bucket" object_id = real_object_id if object_id_correct else "wrong-object" - def get_exception_context() -> ContextManager: - return pytest.raises(exception) if exception else nullcontext() # type: ignore + def get_exception_context() -> AbstractContextManager: + return pytest.raises(exception) if exception else nullcontext() # call relevant methods from the provider: with pytest.raises(exception): @@ -350,7 +337,6 @@ async def test_invalid_part_number( s3_fixture: S3Fixture, # noqa: F811 ): """Check that invalid part numbers are cached correctly.""" - upload_id, bucket_id, object_id = await prepare_non_completed_upload(s3_fixture) with pytest.raises(exception) if exception else nullcontext(): # type: ignore @@ -413,10 +399,7 @@ async def test_complete_multipart_upload( exception: Optional[Exception], s3_fixture: S3Fixture, # noqa: F811 ): - """ - Test the complete_multipart_upload method. - """ - + """Test the complete_multipart_upload method.""" upload_id, bucket_id, object_id = await get_initialized_upload(s3_fixture) for part_idx, part_size in enumerate(part_sizes): await upload_part_of_size( @@ -444,10 +427,7 @@ async def test_abort_multipart_upload( empty_upload: bool, s3_fixture: S3Fixture, # noqa: F811 ): - """ - Test the abort_multipart_upload method. - """ - + """Test the abort_multipart_upload method.""" upload_id, bucket_id, object_id = await get_initialized_upload(s3_fixture) async def upload_part_shortcut(part_number): @@ -482,10 +462,7 @@ async def upload_part_shortcut(part_number): @pytest.mark.asyncio async def test_multiple_active_uploads(s3_fixture: S3Fixture): # noqa: F811 - """ - Test that multiple active uploads for the same object are not possible. - """ - + """Test that multiple active uploads for the same object are not possible.""" # initialize an upload: _, bucket_id, object_id = await get_initialized_upload(s3_fixture) @@ -504,7 +481,6 @@ async def test_handling_multiple_coexisting_uploads( Test that the invalid state of multiple uploads coexisting for the same object is correctly handled. """ - # initialize an upload: upload1_id, bucket_id, object_id = await get_initialized_upload(s3_fixture) @@ -560,7 +536,6 @@ async def test_handling_multiple_subsequent_uploads( 2. initiate an upload, upload some parts, complete it, then start another upload, uploads some parts, complete it (`abort_first` set to False) """ - # perform first upload: upload1_id, bucket_id, object_id = await get_initialized_upload(s3_fixture) diff --git a/tests/unit/test_akafka.py b/tests/unit/test_akafka.py index 4ebeec50..354cdff5 100644 --- a/tests/unit/test_akafka.py +++ b/tests/unit/test_akafka.py @@ -177,7 +177,7 @@ async def test_kafka_event_subscriber( assert callable(cc_kwargs["value_deserializer"]) # consume one event: - with pytest.raises(exception) if exception else nullcontext(): # type: ignore + with pytest.raises(exception) if exception else nullcontext(): await event_subscriber.run(forever=False) # check if producer was correctly started and stopped: diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py index 394c3430..a5c040b0 100644 --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -26,8 +26,8 @@ def test_config_from_yaml(): """Test that config yaml correctly overwrites - default parameters""" - + default parameters + """ config_yaml = config_yamls["basic"] # update config class with content of config yaml @@ -41,7 +41,8 @@ def test_config_from_yaml(): def test_config_from_env(): """Test that env vars correctly overwrites - default parameters""" + default parameters + """ env_var_fixture = env_var_sets["basic_complete"] with env_var_fixture: # update config class with content of config yaml and @@ -56,8 +57,8 @@ def test_config_from_env(): def test_config_from_yaml_and_env(): """Test that config yaml and env vars correctly overwrites - default parameters""" - + default parameters + """ config_yaml = config_yamls["basic"] env_var_fixture = env_var_sets["basic_partly"] @@ -76,7 +77,6 @@ def test_config_from_yaml_and_env(): @pytest.mark.parametrize("cwd", [True, False]) def test_config_from_default_yaml(cwd: bool): """Test that default config yaml from home is correctly read""" - base_dir = Path(os.getcwd()) if cwd else Path.home() prefix = "test_prefix" @@ -99,8 +99,8 @@ def test_config_from_default_yaml(cwd: bool): def test_config_from_default_yaml_via_env(): """Test that default config yaml specified via an environment variable is correctly - read""" - + read + """ prefix = "test_prefix" # set env var: diff --git a/tests/unit/test_dao.py b/tests/unit/test_dao.py index 0d5525ee..79c2994d 100644 --- a/tests/unit/test_dao.py +++ b/tests/unit/test_dao.py @@ -71,12 +71,8 @@ async def _get_dao( id_generator: AsyncGenerator[str, None], ) -> Union[DaoSurrogateId[Dto, DtoCreation], DaoNaturalId[Dto]]: """*To be implemented by the provider. Input validation is done outside of this - method.*""" - ... - - """*To be implemented by the provider. Input validation is done outside of this - method.*""" - + method.* + """ raise NotImplementedError() @@ -89,7 +85,8 @@ class ExampleCreationDto(BaseModel): class ExampleInvalidCreationDto(ExampleCreationDto): """Example for a DTO creation model that is invalid because it contains a - parameter that the main DTO model is missing.""" + parameter that the main DTO model is missing. + """ unexpected_param: str @@ -103,7 +100,6 @@ class ExampleDto(ExampleCreationDto): @pytest.mark.asyncio async def test_get_dto_valid(): """Use the get_dao method of the DaoFactory with valid parameters.""" - dao_factory = FakeDaoFactory() with pytest.raises(NotImplementedError): @@ -119,8 +115,8 @@ async def test_get_dto_valid(): @pytest.mark.asyncio async def test_get_dto_invalid_id(): """Use the get_dao method of the DaoFactory with an invalid ID that is not found in - the provided DTO model.""" - + the provided DTO model. + """ dao_factory = FakeDaoFactory() with pytest.raises(DaoFactoryProtocol.IdFieldNotFoundError): @@ -136,7 +132,6 @@ async def test_get_dto_invalid_id(): ) async def test_get_dto_invalid_creation_model(dto_creation_model: type[BaseModel]): """Use the get_dao method of the DaoFactory with an invalid creation model.""" - dao_factory = FakeDaoFactory() with pytest.raises(DaoFactoryProtocol.CreationModelInvalidError): @@ -151,7 +146,6 @@ async def test_get_dto_invalid_creation_model(dto_creation_model: type[BaseModel @pytest.mark.asyncio async def test_get_dto_invalid_fields_to_index(): """Use the get_dao method of the DaoFactory with an invalid list of fields to index.""" - dao_factory = FakeDaoFactory() with pytest.raises(DaoFactoryProtocol.IndexFieldsInvalidError): diff --git a/tests/unit/test_eventpub.py b/tests/unit/test_eventpub.py index 09ce5067..1da5654a 100644 --- a/tests/unit/test_eventpub.py +++ b/tests/unit/test_eventpub.py @@ -67,7 +67,7 @@ async def test_ascii_val(type_, key, topic, exception): event_publisher = FakePublisher() # publish event using the provider: - with pytest.raises(exception) if exception else nullcontext(): # type: ignore + with pytest.raises(exception) if exception else nullcontext(): await event_publisher.publish( payload=payload, type_=type_, diff --git a/tests/unit/test_eventsub.py b/tests/unit/test_eventsub.py index fbd19dbb..0e2204ce 100644 --- a/tests/unit/test_eventsub.py +++ b/tests/unit/test_eventsub.py @@ -59,7 +59,7 @@ async def test_ascii_val(type_, topic, exception): event_submitter = FakeSubscriber() # publish event using the provider: - with pytest.raises(exception) if exception else nullcontext(): # type: ignore + with pytest.raises(exception) if exception else nullcontext(): await event_submitter.consume( payload=payload, type_=type_, diff --git a/tests/unit/test_inject.py b/tests/unit/test_inject.py index 845b5408..f6dc977e 100644 --- a/tests/unit/test_inject.py +++ b/tests/unit/test_inject.py @@ -56,11 +56,8 @@ async def test_assert_constructable( constructable: type[AsyncContextConstructable], does_raises: bool ): - """ - Test that assert_constructable can distinguish between - """ - - with pytest.raises(NotConstructableError) if does_raises else nullcontext(): # type: ignore + """Test that assert_constructable can distinguish between""" + with pytest.raises(NotConstructableError) if does_raises else nullcontext(): assert_async_constructable(constructable) @@ -85,8 +82,7 @@ async def test_context_constructor_init( Test the initialization of a context constructor with valid and invalid constructables. """ - - with pytest.raises(exception) if exception else nullcontext(): # type: ignore + with pytest.raises(exception) if exception else nullcontext(): test = AsyncConstructor(constructable) if not exception: @@ -108,12 +104,11 @@ async def test_context_constructor_setup_teardown( has_context: bool, ): """Test whether init and shutdown correctly works with a context constructor.""" - foo = "bar" test = AsyncConstructor(constructable, foo) - with pytest.raises(exception) if exception else nullcontext(): # type: ignore + with pytest.raises(exception) if exception else nullcontext(): resource = await test.async_() assert isinstance(resource, constructable) test_instance = await test.init() # type: ignore @@ -139,8 +134,8 @@ async def test_context_constructor_setup_teardown( ) def test_get_constructor(provides: type, args, kwargs, constructor_cls: type): """Tests whether the `get_constructor` function chooses the correct constructor - classes for the given `provides` classes.""" - + classes for the given `provides` classes. + """ constructor = get_constructor(provides, *args, **kwargs) assert isinstance(constructor, constructor_cls) diff --git a/tests/unit/test_testing_eventpub.py b/tests/unit/test_testing_eventpub.py index 05f43046..661a574b 100644 --- a/tests/unit/test_testing_eventpub.py +++ b/tests/unit/test_testing_eventpub.py @@ -28,7 +28,6 @@ @pytest.mark.asyncio async def test_in_mem_publisher(): """Test the InMemEventPublisher testing utilities.""" - type_ = "test_type" key = "test_key" topic = "test_topic" diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index b23b6303..a746caf2 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -52,7 +52,7 @@ def test_calc_part_size( preferred_part_size: int, file_size: int, expected_part_size: int ): """Test code to dynamically adapt part size""" - with pytest.raises(ValueError) if file_size > 5 * TiB else nullcontext(): # type: ignore + with pytest.raises(ValueError) if file_size > 5 * TiB else nullcontext(): adapted_part_size = calc_part_size( preferred_part_size=preferred_part_size, file_size=file_size ) @@ -88,13 +88,11 @@ class ExampleModel(BaseModel): @pytest.mark.parametrize("fields", ({"param_a"}, {"param_a", "param_b"})) def test_validate_fields_in_model_happy(fields: Collection[str]): """Test validate_fields_in_model with valid parameters.""" - validate_fields_in_model(model=ExampleModel, fields=fields) @pytest.mark.parametrize("fields", ({"param_c"}, {"param_a", "param_c"})) def test_validate_fields_in_model_error(fields: Collection[str]): """Test validate_fields_in_model with invalid parameters.""" - with pytest.raises(FieldNotInModelError): validate_fields_in_model(model=ExampleModel, fields=fields)