Skip to content

Commit

Permalink
Update dependencies and requirements (#148)
Browse files Browse the repository at this point in the history
  • Loading branch information
Cito authored Dec 13, 2024
1 parent f9a3906 commit 7f1279e
Show file tree
Hide file tree
Showing 25 changed files with 972 additions and 933 deletions.
4 changes: 2 additions & 2 deletions .devcontainer/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,14 @@ services:
user: vscode

zookeeper:
image: confluentinc/cp-zookeeper:7.6.1
image: confluentinc/cp-zookeeper:7.8.0
environment:
ZOOKEEPER_SERVER_ID: 1
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000

kafka:
image: confluentinc/cp-server:7.6.1
image: confluentinc/cp-server:7.8.0
restart: always
depends_on:
- zookeeper
Expand Down
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ repos:
args: [--check]
pass_filenames: false
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
rev: v5.0.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
Expand All @@ -48,7 +48,7 @@ repos:
- id: no-commit-to-branch
args: [--branch, dev, --branch, int, --branch, main]
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.7.4
rev: v0.8.3
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix, --exclude, scripts]
Expand Down
12 changes: 6 additions & 6 deletions .pyproject_generation/pyproject_custom.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,17 +18,17 @@ classifiers = [
dependencies = [
"pydantic >=2, <3",
"pydantic_settings >=2, <3",
"PyYAML >=6.0, <7",
"PyYAML >=6, <7",
]

[project.optional-dependencies]
akafka = ["aiokafka~=0.11.0", "jsonschema >=4.22, <5"]
s3 = ["boto3 >=1.34.140, <2", "botocore >=1.34.140, <2"]
akafka = ["aiokafka~=0.12.0", "jsonschema >=4.23, <5"]
s3 = ["boto3 >=1.35.80, <2", "botocore >=1.35.80, <2"]
mongodb = ["motor >=3.5.0, <4"]

test-akafka = ["hexkit[akafka]", "testcontainers[kafka] >=4.7.1, <5"]
test-s3 = ["hexkit[s3]", "testcontainers >=4.7.1, <5"]
test-mongodb = ["hexkit[mongodb]", "testcontainers[mongo] >=4.7.1, <5"]
test-akafka = ["hexkit[akafka]", "testcontainers[kafka] >=4.9.0, <5"]
test-s3 = ["hexkit[s3]", "testcontainers >=4.9.0, <5"]
test-mongodb = ["hexkit[mongodb]", "testcontainers[mongo] >=4.9.0, <5"]
test = ["hexkit[test-akafka,test-s3,test-mongodb]"]

all = ["hexkit[test]"]
Expand Down
2 changes: 2 additions & 0 deletions .pyproject_generation/pyproject_template.toml
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ fixable = [
"UP", # e.g. List -> list
"I", # sort imports
"D", # pydocstyle
"RUF022", # sort items in __all__
]
ignore = [
"E111", # indentation with invalid multiple (for formatter)
Expand All @@ -59,6 +60,7 @@ ignore = [
"D206", # indent-with-spaces (for formatter)
"D300", # triple-single-quotes (for formatter)
"UP040", # type statement (not yet supported by mypy)
"PLC0206", # Extracting value from dictionary without calling `.items()`
]
select = [
"C90", # McCabe Complexity
Expand Down
6 changes: 3 additions & 3 deletions lock/requirements-dev.in
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,6 @@
# additional requirements can be listed her
exceptiongroup>=1.2.1 # for Python<3.11
tomli>=2.0.1 # for Python<3.11
cryptography>=42.0.8
pytest-asyncio>=0.23.6
tox>=4.16.0
cryptography>=44.0.0
pytest-asyncio>=0.25.0
tox>=4.23.2
1,009 changes: 529 additions & 480 deletions lock/requirements-dev.txt

Large diffs are not rendered by default.

624 changes: 322 additions & 302 deletions lock/requirements.txt

Large diffs are not rendered by default.

18 changes: 10 additions & 8 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,35 +28,35 @@ description = "A Toolkit for Building Microservices using the Hexagonal Architec
dependencies = [
"pydantic >=2, <3",
"pydantic_settings >=2, <3",
"PyYAML >=6.0, <7",
"PyYAML >=6, <7",
]

[project.license]
text = "Apache 2.0"

[project.optional-dependencies]
akafka = [
"aiokafka~=0.11.0",
"jsonschema >=4.22, <5",
"aiokafka~=0.12.0",
"jsonschema >=4.23, <5",
]
s3 = [
"boto3 >=1.34.140, <2",
"botocore >=1.34.140, <2",
"boto3 >=1.35.80, <2",
"botocore >=1.35.80, <2",
]
mongodb = [
"motor >=3.5.0, <4",
]
test-akafka = [
"hexkit[akafka]",
"testcontainers[kafka] >=4.7.1, <5",
"testcontainers[kafka] >=4.9.0, <5",
]
test-s3 = [
"hexkit[s3]",
"testcontainers >=4.7.1, <5",
"testcontainers >=4.9.0, <5",
]
test-mongodb = [
"hexkit[mongodb]",
"testcontainers[mongo] >=4.7.1, <5",
"testcontainers[mongo] >=4.9.0, <5",
]
test = [
"hexkit[test-akafka,test-s3,test-mongodb]",
Expand Down Expand Up @@ -95,6 +95,7 @@ fixable = [
"UP",
"I",
"D",
"RUF022",
]
ignore = [
"E111",
Expand All @@ -114,6 +115,7 @@ ignore = [
"D206",
"D300",
"UP040",
"PLC0206",
]
select = [
"C90",
Expand Down
8 changes: 4 additions & 4 deletions src/hexkit/correlation.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,13 +28,13 @@
correlation_id_var: ContextVar[str] = ContextVar("correlation_id", default="")

__all__ = [
"set_correlation_id",
"set_new_correlation_id",
"CorrelationIdContextError",
"InvalidCorrelationIdError",
"get_correlation_id",
"new_correlation_id",
"set_correlation_id",
"set_new_correlation_id",
"validate_correlation_id",
"CorrelationIdContextError",
"InvalidCorrelationIdError",
]


Expand Down
8 changes: 4 additions & 4 deletions src/hexkit/protocols/dao.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,12 +34,12 @@
from hexkit.utils import FieldNotInModelError, validate_fields_in_model

__all__ = [
"ResourceNotFoundError",
"ResourceAlreadyExistsError",
"FindError",
"MultipleHitsFoundError",
"Dao",
"DaoFactoryProtocol",
"FindError",
"MultipleHitsFoundError",
"ResourceAlreadyExistsError",
"ResourceNotFoundError",
"UUID4Field",
]

Expand Down
2 changes: 1 addition & 1 deletion src/hexkit/protocols/objstorage.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from abc import ABC, abstractmethod
from typing import NamedTuple, Optional

__all__ = ["PresignedPostURL", "ObjectStorageProtocol"]
__all__ = ["ObjectStorageProtocol", "PresignedPostURL"]

DEFAULT_URL_EXPIRATION_PERIOD = 24 * 60 * 60 # default expiration time 24 hours

Expand Down
2 changes: 1 addition & 1 deletion src/hexkit/providers/akafka/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,8 @@
)

__all__ = [
"KafkaConfig",
"KafkaEventPublisher",
"KafkaEventSubscriber",
"KafkaConfig",
"KafkaOutboxSubscriber",
]
2 changes: 1 addition & 1 deletion src/hexkit/providers/akafka/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ class KafkaConfig(BaseSettings):
default="", description="Optional filename containing the client private key."
)
kafka_ssl_password: SecretStr = Field(
default="",
default=SecretStr(""),
description="Optional password to be used for the client private key.",
)
generate_correlation_id: bool = Field(
Expand Down
8 changes: 4 additions & 4 deletions src/hexkit/providers/akafka/provider/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,13 +32,13 @@
)

__all__ = [
"KafkaEventPublisher",
"ConsumerEvent",
"ExtractedEventInfo",
"KafkaDLQSubscriber",
"KafkaEventPublisher",
"KafkaEventSubscriber",
"ConsumerEvent",
"headers_as_dict",
"KafkaOutboxSubscriber",
"KafkaDLQSubscriber",
"headers_as_dict",
"process_dlq_event",
"validate_dlq_headers",
]
17 changes: 8 additions & 9 deletions src/hexkit/providers/akafka/provider/eventsub.py
Original file line number Diff line number Diff line change
Expand Up @@ -490,11 +490,10 @@ async def _consume_event(self, event: ConsumerEvent) -> None:
await self._consumer.commit()

async def run(self, forever: bool = True) -> None:
"""
Start consuming events and passing them down to the translator.
By default, it blocks forever.
However, you can set `forever` to `False` to make it return after handling one
event.
"""Start consuming events and passing them down to the translator.
By default, this method blocks forever. However, you can set `forever`
to `False` to make it return after handling one event.
"""
if forever:
async for event in self._consumer:
Expand Down Expand Up @@ -717,11 +716,11 @@ async def _ignore_event(self, event: ConsumerEvent) -> None:
await self._consumer.commit()

async def run(self, ignore: bool = False) -> None:
"""
Handles one event and returns.
"""Handle one event and return.
If `ignore` is True, the event will be ignored outright.
Otherwise, `_process_dlq_event` will be used to validate and determine what to
do with the event.
Otherwise, `_process_dlq_event` will be used to validate
and determine what to do with the event.
"""
event = await self._consumer.__anext__()
if ignore:
Expand Down
94 changes: 29 additions & 65 deletions src/hexkit/providers/akafka/testcontainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,27 +16,22 @@

"""Improved Kafka test containers."""

import tarfile
import time
from io import BytesIO
from textwrap import dedent
from typing import Literal, Optional

from testcontainers.core.container import DockerContainer
from testcontainers.core.waiting_utils import wait_for_logs
from testcontainers.kafka import KafkaContainer

__all__ = ["KafkaSSLContainer"]

DEFAULT_IMAGE = "confluentinc/cp-kafka:7.6.1"
DEFAULT_IMAGE = "confluentinc/cp-kafka:7.8.0"

DEFAULT_PORT = 9093 # default port for the Kafka container
BROKER_PORT = 9092 # auxiliary port for inter broker listener


class KafkaSSLContainer(DockerContainer):
class KafkaSSLContainer(KafkaContainer):
"""Kafka container that supports SSL (or actually TLS)."""

TC_START_SCRIPT = "/tc-start.sh"
SECRETS_PATH = "/etc/kafka/secrets"

def __init__( # noqa: C901, PLR0912, PLR0913
Expand All @@ -58,24 +53,18 @@ def __init__( # noqa: C901, PLR0912, PLR0913
"trusted" must contain the trusted certificates. In "client_auth" you can
specify whether authentication is requested, required or not needed at all.
"""
super().__init__(image, **kwargs)
env = self.with_env
self.port = port
super().__init__(image, port, **kwargs)
ssl = bool(cert or trusted or client_auth)
protocol = "SSL" if ssl else "PLAINTEXT"
self.protocol = protocol
self.with_exposed_ports(port)
self.broker_port = DEFAULT_PORT if port == BROKER_PORT else BROKER_PORT
listeners = f"{protocol}://0.0.0.0:{port},BROKER://0.0.0.0:{self.broker_port}"
protocol_map = f"BROKER:PLAINTEXT,{protocol}:{protocol}"
env("KAFKA_LISTENERS", listeners)
env("KAFKA_INTER_BROKER_LISTENER_NAME", "BROKER")
env("KAFKA_LISTENER_SECURITY_PROTOCOL_MAP", protocol_map)
env("KAFKA_BROKER_ID", "1")
env("KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR", "1")
env("KAFKA_OFFSETS_TOPIC_NUM_PARTITIONS", "1")
env("KAFKA_LOG_FLUSH_INTERVAL_MESSAGES", "10000000")
env("KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS", "0")
self.listeners = (
f"{protocol}://0.0.0.0:{port},BROKER://0.0.0.0:{self.broker_port}"
)
self.security_protocol_map = f"BROKER:PLAINTEXT,{protocol}:{protocol}"
env = self.with_env
env("KAFKA_LISTENERS", self.listeners)
env("KAFKA_LISTENER_SECURITY_PROTOCOL_MAP", self.security_protocol_map)
if ssl:
if cert:
cert = cert.strip().replace("\n", "\\n")
Expand Down Expand Up @@ -106,52 +95,27 @@ def __init__( # noqa: C901, PLR0912, PLR0913
env("KAFKA_SSL_CLIENT_AUTH", client_auth)
env("KAFKA_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM", " ")

def get_bootstrap_server(self) -> str:
"""Get the Kafka bootstrap server."""
host = self.get_container_host_ip()
port = self.get_exposed_port(self.port)
return f"{host}:{port}"

def start(self, timeout: Optional[float] = 30) -> "KafkaSSLContainer":
"""Start the Docker container."""
script = self.TC_START_SCRIPT
command = f'sh -c "while [ ! -f {script} ]; do sleep 0.1; done; sh {script}"'
self.with_command(command)
super().start()
self.tc_start()
wait_for_logs(self, r".*\[KafkaServer id=\d+\] started.*", timeout=timeout)
return self

def tc_start(self) -> None:
"""Start the test container."""
protocol = self.protocol
host = self.get_container_host_ip()
port = self.get_exposed_port(self.port)
listeners = f"{protocol}://{host}:{port},BROKER://127.0.0.1:{self.broker_port}"
script = f"""
#!/bin/bash
c=/etc/confluent/docker
. $c/bash-config
export KAFKA_ADVERTISED_LISTENERS={listeners}
export KAFKA_ZOOKEEPER_CONNECT=localhost:2181
p=zookeeper.properties
echo "clientPort=2181" > $p
echo "dataDir=/var/lib/zookeeper/data" >> $p
echo "dataLogDir=/var/lib/zookeeper/log" >> $p
zookeeper-server-start $p &
# workaround for https://github.com/confluentinc/kafka-images/issues/244
sed -i -E '/^if .*LISTENERS.*SSL:/,/^fi/d' $c/configure
$c/configure && $c/launch
"""
self.create_file(dedent(script).strip().encode("utf-8"), self.TC_START_SCRIPT)

def create_file(self, content: bytes, path: str) -> None:
"""Create a file inside the container."""
with BytesIO() as archive:
with tarfile.TarFile(fileobj=archive, mode="w") as tar:
tarinfo = tarfile.TarInfo(name=path)
tarinfo.size = len(content)
tarinfo.mtime = time.time()
tar.addfile(tarinfo, BytesIO(content))
archive.seek(0)
self.get_wrapped_container().put_archive("/", archive)
data = (
dedent(
f"""
#!/bin/bash
{self.boot_command}
export KAFKA_ADVERTISED_LISTENERS={listeners}
c=/etc/confluent/docker
. $c/bash-config
# workaround for https://github.com/confluentinc/kafka-images/issues/244
sed -i -E '/^if .*LISTENERS.*SSL:/,/^fi/d' $c/configure
$c/configure
$c/launch
"""
)
.strip()
.encode("utf-8")
)
self.create_file(data, KafkaContainer.TC_START_SCRIPT)
Loading

0 comments on commit 7f1279e

Please sign in to comment.