From a0081f3ce4630484027c3d7930a4aa05cbf2a641 Mon Sep 17 00:00:00 2001 From: walzph Date: Wed, 13 Sep 2023 18:34:13 +0200 Subject: [PATCH 01/29] Add security_protocol, ssl_context to KafkaConfig --- src/hexkit/providers/akafka/provider.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/src/hexkit/providers/akafka/provider.py b/src/hexkit/providers/akafka/provider.py index c1b40b24..41b49357 100644 --- a/src/hexkit/providers/akafka/provider.py +++ b/src/hexkit/providers/akafka/provider.py @@ -24,6 +24,7 @@ import json import logging from contextlib import asynccontextmanager +import ssl from typing import Any, Callable, Protocol, TypeVar from aiokafka import AIOKafkaConsumer, AIOKafkaProducer @@ -70,6 +71,16 @@ class KafkaConfig(BaseSettings): examples=[["localhost:9092"]], description="A list of connection strings to connect to Kafka bootstrap servers.", ) + security_protocol: str = Field( + "PLAINTEXT", + description="Protocol used to communicate with brokers. " + + "Valid values are: PLAINTEXT, SSL." + ) + ssl_context: ssl.SSLContext = Field( + None, + description="pre-configured SSLContext for wrapping socket connections. " + + "Directly passed into asyncio’s create_connection(). Default: None" + ) class EventTypeNotFoundError(RuntimeError): @@ -149,6 +160,8 @@ async def construct( producer = kafka_producer_cls( bootstrap_servers=",".join(config.kafka_servers), + security_protocol=config.security_protocol, + ssl_context=config.ssl_context, client_id=client_id, key_serializer=lambda key: key.encode("ascii"), value_serializer=lambda event_value: json.dumps(event_value).encode( @@ -297,6 +310,8 @@ async def construct( consumer = kafka_consumer_cls( *topics, bootstrap_servers=",".join(config.kafka_servers), + security_protocol=config.security_protocol, + ssl_context=config.ssl_context, client_id=client_id, group_id=config.service_name, auto_offset_reset="earliest", From 9dc45e12af68847c640d7af4d892ad2c94d9422f Mon Sep 17 00:00:00 2001 From: walzph Date: Wed, 20 Sep 2023 09:30:19 +0200 Subject: [PATCH 02/29] Change field type and unravel ssl_context fields --- src/hexkit/providers/akafka/provider.py | 57 +++++++++++++++++++++---- 1 file changed, 49 insertions(+), 8 deletions(-) diff --git a/src/hexkit/providers/akafka/provider.py b/src/hexkit/providers/akafka/provider.py index 41b49357..1d95a922 100644 --- a/src/hexkit/providers/akafka/provider.py +++ b/src/hexkit/providers/akafka/provider.py @@ -25,11 +25,12 @@ import logging from contextlib import asynccontextmanager import ssl -from typing import Any, Callable, Protocol, TypeVar +from typing import Any, Callable, Protocol, TypeVar, Optional from aiokafka import AIOKafkaConsumer, AIOKafkaProducer from pydantic import Field from pydantic_settings import BaseSettings +from aiokafka.helpers import create_ssl_context from hexkit.base import InboundProviderBase from hexkit.custom_types import Ascii, JsonObject @@ -71,15 +72,40 @@ class KafkaConfig(BaseSettings): examples=[["localhost:9092"]], description="A list of connection strings to connect to Kafka bootstrap servers.", ) - security_protocol: str = Field( + security_protocol: Literal["PLAINTEXT", "SSL"] = Field( "PLAINTEXT", description="Protocol used to communicate with brokers. " - + "Valid values are: PLAINTEXT, SSL." + + "Valid values are: PLAINTEXT, SSL.", ) - ssl_context: ssl.SSLContext = Field( + ssl_cafile: str = Field( None, - description="pre-configured SSLContext for wrapping socket connections. " - + "Directly passed into asyncio’s create_connection(). Default: None" + description="""Certificate Authority file path containing certificates + used to sign broker certificates. If CA not specified (by either + cafile, capath, cadata) default system CA will be used if found by + OpenSSL. For more information see + :meth:`~ssl.SSLContext.load_verify_locations`. + Default: :data:`None`""", + ) + ssl_certfile: str = Field( + None, + description="""optional filename of file in PEM format containing + the client certificate, as well as any CA certificates needed to + establish the certificate's authenticity. For more information see + :meth:`~ssl.SSLContext.load_cert_chain`. + Default: :data:`None`.""", + ) + ssl_keyfile: str = Field( + None, + description=""""optional filename containing the client private key. + For more information see :meth:`~ssl.SSLContext.load_cert_chain`. + Default: :data:`None`.""", + ) + ssl_password: str = Field( + None, + description="""optional password to be used when loading the + certificate chain. For more information see + :meth:`~ssl.SSLContext.load_cert_chain`. + Default: :data:`None`.""", ) @@ -95,6 +121,21 @@ def generate_client_id(*, service_name: str, instance_id: str) -> str: return f"{service_name}.{instance_id}" +def generate_ssl_context(config: KafkaConfig) -> Optional[ssl.SSLContext]: + """ + Generate ssl_context for connecting to Kafka broker via an encrypted SSL connection + """ + if config.security_protocol == "SSL": + return create_ssl_context( + cafile=config.ssl_cafile, # CA used to sign certificate. + # `CARoot` of JKS store container + certfile=config.ssl_certfile, # Signed certificate + keyfile=config.ssl_keyfile, # Private Key file of `certfile` certificate + password=config.ssl_password, + ) + return None + + class KafkaProducerCompatible(Protocol): """A python duck type protocol describing an AIOKafkaProducer or equivalent.""" @@ -161,7 +202,7 @@ async def construct( producer = kafka_producer_cls( bootstrap_servers=",".join(config.kafka_servers), security_protocol=config.security_protocol, - ssl_context=config.ssl_context, + ssl_context=generate_ssl_context(config), client_id=client_id, key_serializer=lambda key: key.encode("ascii"), value_serializer=lambda event_value: json.dumps(event_value).encode( @@ -311,7 +352,7 @@ async def construct( *topics, bootstrap_servers=",".join(config.kafka_servers), security_protocol=config.security_protocol, - ssl_context=config.ssl_context, + ssl_context=generate_ssl_context(config), client_id=client_id, group_id=config.service_name, auto_offset_reset="earliest", From df3a8e37640022a0b6fc5706c3c7c227065cbdc9 Mon Sep 17 00:00:00 2001 From: walzph Date: Mon, 2 Oct 2023 09:36:37 +0000 Subject: [PATCH 03/29] Fix comments --- src/hexkit/providers/akafka/provider.py | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/src/hexkit/providers/akafka/provider.py b/src/hexkit/providers/akafka/provider.py index 1d95a922..42795795 100644 --- a/src/hexkit/providers/akafka/provider.py +++ b/src/hexkit/providers/akafka/provider.py @@ -23,9 +23,9 @@ import json import logging -from contextlib import asynccontextmanager import ssl -from typing import Any, Callable, Protocol, TypeVar, Optional +from contextlib import asynccontextmanager +from typing import Any, Callable, Optional, Protocol, TypeVar from aiokafka import AIOKafkaConsumer, AIOKafkaProducer from pydantic import Field @@ -125,15 +125,16 @@ def generate_ssl_context(config: KafkaConfig) -> Optional[ssl.SSLContext]: """ Generate ssl_context for connecting to Kafka broker via an encrypted SSL connection """ - if config.security_protocol == "SSL": - return create_ssl_context( - cafile=config.ssl_cafile, # CA used to sign certificate. - # `CARoot` of JKS store container - certfile=config.ssl_certfile, # Signed certificate - keyfile=config.ssl_keyfile, # Private Key file of `certfile` certificate + return ( + create_ssl_context( + cafile=config.ssl_cafile, + certfile=config.ssl_certfile, + keyfile=config.ssl_keyfile, password=config.ssl_password, ) - return None + if config.security_protocol == "SSL" + else None + ) class KafkaProducerCompatible(Protocol): @@ -143,6 +144,8 @@ def __init__( self, *, bootstrap_servers: str, + security_protocol: str, + ssl_context: Optional[ssl.SSLContext], client_id: str, key_serializer: Callable[[Any], bytes], value_serializer: Callable[[Any], bytes], @@ -274,6 +277,8 @@ def __init__( # noqa: PLR0913 self, *topics: Ascii, bootstrap_servers: str, + security_protocol: str, + ssl_context: Optional[ssl.SSLContext], client_id: str, group_id: str, auto_offset_reset: Literal["earliest"], From d487be40188da6ae6f5ea3d9f62397fde8eea7ec Mon Sep 17 00:00:00 2001 From: KerstenBreuer Date: Fri, 6 Oct 2023 15:20:37 +0000 Subject: [PATCH 04/29] update to template --- migrate_v3.py | 240 ++++++++++++++++++++++++ src/hexkit/providers/akafka/provider.py | 4 +- 2 files changed, 241 insertions(+), 3 deletions(-) create mode 100755 migrate_v3.py diff --git a/migrate_v3.py b/migrate_v3.py new file mode 100755 index 00000000..0ccbb9ab --- /dev/null +++ b/migrate_v3.py @@ -0,0 +1,240 @@ +#!/usr/bin/env python3 + +import configparser +import os +import re +from importlib import import_module + +from scripts.get_package_name import get_package_name +from scripts.update_template_files import update_files + + +def get_version_from_init(filepath: str) -> str: + """Pull `__version__` from the top `__init__.py`.""" + with open(filepath, encoding="utf-8") as file: + for line in file: + if line.startswith("__version__"): + # Split the string on the equals sign and strip spaces and quotes + return line.split("=")[1].strip().strip('"') + raise RuntimeError("Unable to get version from __init__") + + +def get_config_from_setup(is_microservice: bool) -> dict[str, str]: + """Get all applicable config from `setup.cfg`: + + 1. metadata.description + 2. metadata.url + 3. options.install_requires + 4. options.extras_require + 5. options.entry_points.console_scripts + """ + parser = configparser.ConfigParser() + parser.read("./setup.cfg") + metadata = parser["metadata"] + options = parser["options"] + extras_require = "options.extras_require" + entrypoints = "options.entry_points" + + config = {} + config["description"] = metadata["description"] + config["url"] = metadata["url"] + + dependencies = options["install_requires"].strip() + if is_microservice: + dependencies = dependencies.replace("==", ">=") + config["dependencies"] = [dependency for dependency in dependencies.split("\n")] + + if not is_microservice and extras_require in parser: + extras = [] + for extra, deps in parser[extras_require].items(): + deps = [dep.replace("==", ">=") for dep in deps] + + if not deps: + continue + deps_formatted = [f'"{dep}"' for dep in deps] + extra_formatted = f'{extra} = [{", ".join(deps_formatted)}]' + extras.append(extra_formatted) + if extras: + config["optional-dependencies"] = extras + + if entrypoints in parser: + config["scripts"] = {} + scripts = parser[entrypoints]["console_scripts"].strip().split("\n") + for script in scripts: + name, value = script.split("=") + config["scripts"][name.strip()] = value.strip() + + return config + + +def update_init(package_name: str, version: str): + """Replace static version value in __init__.py with retrieval code.""" + with open(f"./src/{package_name}/__init__.py", encoding="utf-8") as init_read: + content = init_read.read() + + content = re.sub( + pattern=rf"__version__\s*=\s*\"{version}\"", + repl="from importlib.metadata import version\n\n__version__ = version(__package__)", + string=content, + ) + with open(f"./src/{package_name}/__init__.py", "w", encoding="utf-8") as init_write: + init_write.write(content) + + +def update_structure(package_name: str): + """Move the code into `/src/`.""" + os.renames(f"./{package_name}", f"./src/{package_name}") + + +def pre_process_pyproject(): + """Remove comments from pyproject so tomli can write values okay""" + with open("pyproject.toml", encoding="utf-8") as file: + pre_processed = file.read() + + pre_processed = re.sub("# please adapt to package name", "", pre_processed) + + with open("pyproject.toml", "w", encoding="utf-8") as file: + file.write(pre_processed) + + +def update_pyproject_toml(package_name: str, version: str, config: dict[str, str]): + """Update the project metadata.""" + os.system("pip install tomli") + tomli = import_module("tomli") + with open("pyproject.toml", "rb") as pyproject_toml: + pyproject = tomli.load(pyproject_toml) + + pyproject["project"]["name"] = package_name + pyproject["project"]["version"] = version + pyproject["project"]["description"] = config["description"] + pyproject["project"]["dependencies"] = config["dependencies"] + if "optional-dependencies" in config: + pyproject["project"]["optional-dependencies"] = config["optional-dependencies"] + pyproject["project"]["urls"] = {"Repository": config["url"]} + + # write the final output + os.system("pip install tomli_w") + tomli_w = import_module("tomli_w") + with open("pyproject.toml", "wb") as modified_pyproject_toml: + tomli_w.dump(pyproject, modified_pyproject_toml) + + +def collapse_list_string(match: re.Match) -> str: + """Remove whitespace and trailing comma for one-element list""" + i, j = match.span() + substr = match.string[i:j] + formatted = re.sub(r",\s*|\s*", "", substr) + return formatted + + +def post_process_pyproject(): + """Collapse single-element lists""" + with open("pyproject.toml", encoding="utf-8") as file: + post_processed = file.read() + + arr_pattern = r"\[\s*(?:\".*?\",){1}\s*]" + + post_processed = re.sub( + pattern=arr_pattern, + repl=collapse_list_string, + string=post_processed, + ) + + with open("pyproject.toml", "w", encoding="utf-8") as file: + file.write(post_processed) + + +def update_template_files(): + """Run `scripts/update_template_files.py`.""" + os.system("scripts/update_template_files.py") + + +def prompt(msg: str): + """Prompt user to do something before continuing.""" + while ( + input(f"{msg} (type 'done' to continue, CTRL+Z to quit)\n> ").strip().lower() + != "done" + ): + pass + + +def delete(filename: str): + """Remove the given file.""" + try: + os.remove(filename) + except: + print(f"Skipped deleting {filename} because it doesn't exist") + + +def install_new_tools(package_name: str): + """Install the packages needed to run the new scripts.""" + # grab the deps.py and lock_deps.py scripts because I forgot to update .static_files + update_files(["scripts/script_utils/deps.py", "scripts/script_utils/lock_deps.py"]) + os.system("pip install httpx") + os.system("pip install pip-tools") + os.system("pip install stringcase") + os.system(f"pip uninstall {package_name}") + os.system("pip install -e .") + + +def ask_yes_no(msg: str) -> bool: + """Ask user yes or no.""" + while (answer := input(f"{msg}\n> ").strip().lower()) not in ("y", "n"): + pass + + return answer == "y" + + +def fix_requirements_dev(): + """Copy contents of `requirements-dev.txt` over to `.in` version + + Also make sure that dependencies are uncapped here. + """ + with open("requirements-dev.txt", encoding="utf-8") as file: + initial_content = file.read() + + updated_content = re.sub("common.txt", "common.in", initial_content) + + if initial_content == updated_content: + updated_content += "\n-r requirements-dev-common.in" + + updated_content = re.sub("==", ">=", updated_content) + + with open("requirements-dev.txt", "w", encoding="utf-8") as file: + file.write(updated_content) + + os.rename("./requirements-dev.txt", "./requirements-dev.in") + + +def main(): + """Apply all needed changes to bring the repo in line with latest template version.""" + is_microservice = ask_yes_no("Is this a microservice (i.e. not a library?) (y/n)") + package_name = get_package_name() + version = "0.11.0" + + update_structure(package_name) + update_init(package_name, version) + + delete("./requirements.txt") + delete("./requirements-dev.in") + delete("./requirements-dev-common.txt") + fix_requirements_dev() + + delete("./.pre-commit-config.yaml") + update_files([".pre-commit-config.yaml"]) + + install_new_tools(package_name) + os.system("scripts/list_outdated_dependencies.py") + prompt("Review the list above. Consider updating any outdated dependencies now.") + + print("\nBuilding lock files. This could take a few minutes...") + os.system("scripts/update_lock.py") + + print("\nUpdating pre-commit-hook versions...") + os.system("scripts/update_hook_revs.py") + + print("Done - rebuild the dev container") + + +if __name__ == "__main__": + main() diff --git a/src/hexkit/providers/akafka/provider.py b/src/hexkit/providers/akafka/provider.py index 42795795..9469fc89 100644 --- a/src/hexkit/providers/akafka/provider.py +++ b/src/hexkit/providers/akafka/provider.py @@ -122,9 +122,7 @@ def generate_client_id(*, service_name: str, instance_id: str) -> str: def generate_ssl_context(config: KafkaConfig) -> Optional[ssl.SSLContext]: - """ - Generate ssl_context for connecting to Kafka broker via an encrypted SSL connection - """ + """Generate ssl_context for connecting to Kafka broker via an encrypted SSL connection""" return ( create_ssl_context( cafile=config.ssl_cafile, From 1085ed74a284eb224115a85f8134d996983239ad Mon Sep 17 00:00:00 2001 From: KerstenBreuer Date: Sat, 7 Oct 2023 12:35:56 +0000 Subject: [PATCH 05/29] update to template --- migrate_v3.py | 240 ----------------------- src/hexkit/providers/akafka/provider.py | 2 +- src/hexkit/providers/akafka/testutils.py | 2 +- 3 files changed, 2 insertions(+), 242 deletions(-) delete mode 100755 migrate_v3.py diff --git a/migrate_v3.py b/migrate_v3.py deleted file mode 100755 index 0ccbb9ab..00000000 --- a/migrate_v3.py +++ /dev/null @@ -1,240 +0,0 @@ -#!/usr/bin/env python3 - -import configparser -import os -import re -from importlib import import_module - -from scripts.get_package_name import get_package_name -from scripts.update_template_files import update_files - - -def get_version_from_init(filepath: str) -> str: - """Pull `__version__` from the top `__init__.py`.""" - with open(filepath, encoding="utf-8") as file: - for line in file: - if line.startswith("__version__"): - # Split the string on the equals sign and strip spaces and quotes - return line.split("=")[1].strip().strip('"') - raise RuntimeError("Unable to get version from __init__") - - -def get_config_from_setup(is_microservice: bool) -> dict[str, str]: - """Get all applicable config from `setup.cfg`: - - 1. metadata.description - 2. metadata.url - 3. options.install_requires - 4. options.extras_require - 5. options.entry_points.console_scripts - """ - parser = configparser.ConfigParser() - parser.read("./setup.cfg") - metadata = parser["metadata"] - options = parser["options"] - extras_require = "options.extras_require" - entrypoints = "options.entry_points" - - config = {} - config["description"] = metadata["description"] - config["url"] = metadata["url"] - - dependencies = options["install_requires"].strip() - if is_microservice: - dependencies = dependencies.replace("==", ">=") - config["dependencies"] = [dependency for dependency in dependencies.split("\n")] - - if not is_microservice and extras_require in parser: - extras = [] - for extra, deps in parser[extras_require].items(): - deps = [dep.replace("==", ">=") for dep in deps] - - if not deps: - continue - deps_formatted = [f'"{dep}"' for dep in deps] - extra_formatted = f'{extra} = [{", ".join(deps_formatted)}]' - extras.append(extra_formatted) - if extras: - config["optional-dependencies"] = extras - - if entrypoints in parser: - config["scripts"] = {} - scripts = parser[entrypoints]["console_scripts"].strip().split("\n") - for script in scripts: - name, value = script.split("=") - config["scripts"][name.strip()] = value.strip() - - return config - - -def update_init(package_name: str, version: str): - """Replace static version value in __init__.py with retrieval code.""" - with open(f"./src/{package_name}/__init__.py", encoding="utf-8") as init_read: - content = init_read.read() - - content = re.sub( - pattern=rf"__version__\s*=\s*\"{version}\"", - repl="from importlib.metadata import version\n\n__version__ = version(__package__)", - string=content, - ) - with open(f"./src/{package_name}/__init__.py", "w", encoding="utf-8") as init_write: - init_write.write(content) - - -def update_structure(package_name: str): - """Move the code into `/src/`.""" - os.renames(f"./{package_name}", f"./src/{package_name}") - - -def pre_process_pyproject(): - """Remove comments from pyproject so tomli can write values okay""" - with open("pyproject.toml", encoding="utf-8") as file: - pre_processed = file.read() - - pre_processed = re.sub("# please adapt to package name", "", pre_processed) - - with open("pyproject.toml", "w", encoding="utf-8") as file: - file.write(pre_processed) - - -def update_pyproject_toml(package_name: str, version: str, config: dict[str, str]): - """Update the project metadata.""" - os.system("pip install tomli") - tomli = import_module("tomli") - with open("pyproject.toml", "rb") as pyproject_toml: - pyproject = tomli.load(pyproject_toml) - - pyproject["project"]["name"] = package_name - pyproject["project"]["version"] = version - pyproject["project"]["description"] = config["description"] - pyproject["project"]["dependencies"] = config["dependencies"] - if "optional-dependencies" in config: - pyproject["project"]["optional-dependencies"] = config["optional-dependencies"] - pyproject["project"]["urls"] = {"Repository": config["url"]} - - # write the final output - os.system("pip install tomli_w") - tomli_w = import_module("tomli_w") - with open("pyproject.toml", "wb") as modified_pyproject_toml: - tomli_w.dump(pyproject, modified_pyproject_toml) - - -def collapse_list_string(match: re.Match) -> str: - """Remove whitespace and trailing comma for one-element list""" - i, j = match.span() - substr = match.string[i:j] - formatted = re.sub(r",\s*|\s*", "", substr) - return formatted - - -def post_process_pyproject(): - """Collapse single-element lists""" - with open("pyproject.toml", encoding="utf-8") as file: - post_processed = file.read() - - arr_pattern = r"\[\s*(?:\".*?\",){1}\s*]" - - post_processed = re.sub( - pattern=arr_pattern, - repl=collapse_list_string, - string=post_processed, - ) - - with open("pyproject.toml", "w", encoding="utf-8") as file: - file.write(post_processed) - - -def update_template_files(): - """Run `scripts/update_template_files.py`.""" - os.system("scripts/update_template_files.py") - - -def prompt(msg: str): - """Prompt user to do something before continuing.""" - while ( - input(f"{msg} (type 'done' to continue, CTRL+Z to quit)\n> ").strip().lower() - != "done" - ): - pass - - -def delete(filename: str): - """Remove the given file.""" - try: - os.remove(filename) - except: - print(f"Skipped deleting {filename} because it doesn't exist") - - -def install_new_tools(package_name: str): - """Install the packages needed to run the new scripts.""" - # grab the deps.py and lock_deps.py scripts because I forgot to update .static_files - update_files(["scripts/script_utils/deps.py", "scripts/script_utils/lock_deps.py"]) - os.system("pip install httpx") - os.system("pip install pip-tools") - os.system("pip install stringcase") - os.system(f"pip uninstall {package_name}") - os.system("pip install -e .") - - -def ask_yes_no(msg: str) -> bool: - """Ask user yes or no.""" - while (answer := input(f"{msg}\n> ").strip().lower()) not in ("y", "n"): - pass - - return answer == "y" - - -def fix_requirements_dev(): - """Copy contents of `requirements-dev.txt` over to `.in` version - - Also make sure that dependencies are uncapped here. - """ - with open("requirements-dev.txt", encoding="utf-8") as file: - initial_content = file.read() - - updated_content = re.sub("common.txt", "common.in", initial_content) - - if initial_content == updated_content: - updated_content += "\n-r requirements-dev-common.in" - - updated_content = re.sub("==", ">=", updated_content) - - with open("requirements-dev.txt", "w", encoding="utf-8") as file: - file.write(updated_content) - - os.rename("./requirements-dev.txt", "./requirements-dev.in") - - -def main(): - """Apply all needed changes to bring the repo in line with latest template version.""" - is_microservice = ask_yes_no("Is this a microservice (i.e. not a library?) (y/n)") - package_name = get_package_name() - version = "0.11.0" - - update_structure(package_name) - update_init(package_name, version) - - delete("./requirements.txt") - delete("./requirements-dev.in") - delete("./requirements-dev-common.txt") - fix_requirements_dev() - - delete("./.pre-commit-config.yaml") - update_files([".pre-commit-config.yaml"]) - - install_new_tools(package_name) - os.system("scripts/list_outdated_dependencies.py") - prompt("Review the list above. Consider updating any outdated dependencies now.") - - print("\nBuilding lock files. This could take a few minutes...") - os.system("scripts/update_lock.py") - - print("\nUpdating pre-commit-hook versions...") - os.system("scripts/update_hook_revs.py") - - print("Done - rebuild the dev container") - - -if __name__ == "__main__": - main() diff --git a/src/hexkit/providers/akafka/provider.py b/src/hexkit/providers/akafka/provider.py index 9469fc89..c863139e 100644 --- a/src/hexkit/providers/akafka/provider.py +++ b/src/hexkit/providers/akafka/provider.py @@ -138,7 +138,7 @@ def generate_ssl_context(config: KafkaConfig) -> Optional[ssl.SSLContext]: class KafkaProducerCompatible(Protocol): """A python duck type protocol describing an AIOKafkaProducer or equivalent.""" - def __init__( + def __init__( # noqa: PLR0913 self, *, bootstrap_servers: str, diff --git a/src/hexkit/providers/akafka/testutils.py b/src/hexkit/providers/akafka/testutils.py index 7d0a3442..340f462e 100644 --- a/src/hexkit/providers/akafka/testutils.py +++ b/src/hexkit/providers/akafka/testutils.py @@ -405,7 +405,7 @@ async def kafka_fixture_function() -> AsyncGenerator[KafkaFixture, None]: """ with KafkaContainer(image="confluentinc/cp-kafka:5.4.9-1-deb8") as kafka: kafka_servers = [kafka.get_bootstrap_server()] - config = KafkaConfig( + config = KafkaConfig( # type: ignore service_name="test_publisher", service_instance_id="001", kafka_servers=kafka_servers, From 731170f7d63cfd70e9f45e0185cacad4eac08b39 Mon Sep 17 00:00:00 2001 From: KerstenBreuer Date: Sat, 7 Oct 2023 12:48:15 +0000 Subject: [PATCH 06/29] fix mypy --- examples/stream_calc/sc_tests/integration/test_event_api.py | 4 ++-- examples/stream_calc/stream_calc/main.py | 4 +++- tests/integration/test_akafka.py | 4 ++-- tests/integration/test_akafka_testutils.py | 6 +++--- tests/unit/test_akafka.py | 4 ++-- 5 files changed, 12 insertions(+), 10 deletions(-) diff --git a/examples/stream_calc/sc_tests/integration/test_event_api.py b/examples/stream_calc/sc_tests/integration/test_event_api.py index 217b457c..34606ce1 100644 --- a/examples/stream_calc/sc_tests/integration/test_event_api.py +++ b/examples/stream_calc/sc_tests/integration/test_event_api.py @@ -33,7 +33,7 @@ from stream_calc.config import Config from stream_calc.main import main -DEFAULT_CONFIG = Config() +DEFAULT_CONFIG = Config() # type: ignore class Event(NamedTuple): @@ -207,7 +207,7 @@ async def test_receive_calc_publish(cases: list[Case] = deepcopy(CASES)): # run the stream_calc app: # (for each problem separately to avoid running forever) - config = Config(kafka_servers=[kafka_server]) + config = Config(kafka_servers=[kafka_server]) # type: ignore for _ in cases: await main(config=config, run_forever=False) diff --git a/examples/stream_calc/stream_calc/main.py b/examples/stream_calc/stream_calc/main.py index 73b8cf4e..3dbe1484 100644 --- a/examples/stream_calc/stream_calc/main.py +++ b/examples/stream_calc/stream_calc/main.py @@ -38,7 +38,9 @@ def get_container(config: Config) -> Container: return container -async def main(*, config: Config = Config(), run_forever: bool = True) -> None: +async def main( + *, config: Config = Config(), run_forever: bool = True # type: ignore +) -> None: """ Coroutine to run the stream calculator. diff --git a/tests/integration/test_akafka.py b/tests/integration/test_akafka.py index 5d245326..aa1049c8 100644 --- a/tests/integration/test_akafka.py +++ b/tests/integration/test_akafka.py @@ -41,7 +41,7 @@ async def test_kafka_event_publisher(kafka_fixture: KafkaFixture): # noqa: F811 key = "test_key" topic = "test_topic" - config = KafkaConfig( + config = KafkaConfig( # type: ignore service_name="test_publisher", service_instance_id="1", kafka_servers=kafka_fixture.kafka_servers, @@ -79,7 +79,7 @@ async def test_kafka_event_subscriber(kafka_fixture: KafkaFixture): # noqa: F81 ) # setup the provider: - config = KafkaConfig( + config = KafkaConfig( # type: ignore service_name="event_subscriber", service_instance_id="1", kafka_servers=kafka_fixture.kafka_servers, diff --git a/tests/integration/test_akafka_testutils.py b/tests/integration/test_akafka_testutils.py index b504205d..9e54c16a 100644 --- a/tests/integration/test_akafka_testutils.py +++ b/tests/integration/test_akafka_testutils.py @@ -100,7 +100,7 @@ async def test_event_recorder( """Test event recording using the EventRecorder class.""" topic = "test_topic" - config = KafkaConfig( + config = KafkaConfig( # type: ignore service_name="test_publisher", service_instance_id="1", kafka_servers=kafka_fixture.kafka_servers, @@ -142,7 +142,7 @@ async def test_expect_events_happy( ] topic = "test_topic" - config = KafkaConfig( + config = KafkaConfig( # type: ignore service_name="test_publisher", service_instance_id="1", kafka_servers=kafka_fixture.kafka_servers, @@ -240,7 +240,7 @@ async def test_expect_events_mismatch( ] topic = "test_topic" - config = KafkaConfig( + config = KafkaConfig( # type: ignore service_name="test_publisher", service_instance_id="1", kafka_servers=kafka_fixture.kafka_servers, diff --git a/tests/unit/test_akafka.py b/tests/unit/test_akafka.py index 354cdff5..98ce958f 100644 --- a/tests/unit/test_akafka.py +++ b/tests/unit/test_akafka.py @@ -45,7 +45,7 @@ async def test_kafka_event_publisher(): producer_class = Mock(return_value=producer) # publish event using the provider: - config = KafkaConfig( + config = KafkaConfig( # type: ignore service_name="test_publisher", service_instance_id="1", kafka_servers=["my-fake-kafka-server"], @@ -156,7 +156,7 @@ async def test_kafka_event_subscriber( translator.types_of_interest = types_of_interest # setup the provider: - config = KafkaConfig( + config = KafkaConfig( # type: ignore service_name=service_name, service_instance_id="1", kafka_servers=["my-fake-kafka-server"], From 2cd86411937ea5d87d2e2d43621a980df9199f6f Mon Sep 17 00:00:00 2001 From: Seyit Zor Date: Thu, 2 Nov 2023 09:41:58 +0000 Subject: [PATCH 07/29] WIP test --- pyproject.toml | 2 + requirements-dev.txt | 144 +++++++++++++++++++- requirements.txt | 163 ++++++++++++++++++++++- src/hexkit/providers/akafka/provider.py | 10 +- src/hexkit/providers/akafka/testutils.py | 94 ++++++++++++- tests/integration/test_akafka.py | 6 + 6 files changed, 411 insertions(+), 8 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 4f6c68fe..8bab22dd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,6 +28,8 @@ dependencies = [ "pydantic_settings >=2, <3", "PyYAML >=6.0, <7", "dependency-injector >=4.41.0, <5", + "pyjks >=20, <21", + "pyOpenSSL >=23.3.0, <24" ] [project.optional-dependencies] diff --git a/requirements-dev.txt b/requirements-dev.txt index e7c617f1..1fea0a05 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --all-extras --generate-hashes --output-file=/workspace/requirements-dev.txt /tmp/tmpye1zf3r4/pyproject.toml /workspace/requirements-dev.in +# pip-compile --all-extras --generate-hashes --output-file=/workspace/requirements-dev.txt /tmp/tmp3yb_4wf4/pyproject.toml /workspace/requirements-dev.in # aiokafka==0.8.1 \ --hash=sha256:1e24839088fd6d3ff481cc09a48ea487b997328df11630bc0a1b88255edbcfe9 \ @@ -95,6 +95,60 @@ certifi==2023.7.22 \ # httpcore # httpx # requests +cffi==1.16.0 \ + --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ + --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \ + --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \ + --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \ + --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \ + --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \ + --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \ + --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \ + --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \ + --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \ + --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \ + --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \ + --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \ + --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \ + --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \ + --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \ + --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \ + --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \ + --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \ + --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \ + --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \ + --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \ + --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \ + --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \ + --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \ + --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \ + --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \ + --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \ + --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \ + --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \ + --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \ + --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \ + --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \ + --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \ + --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \ + --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \ + --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \ + --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \ + --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \ + --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \ + --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \ + --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \ + --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \ + --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \ + --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \ + --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \ + --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \ + --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \ + --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \ + --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \ + --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \ + --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357 + # via cryptography cfgv==3.4.0 \ --hash=sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9 \ --hash=sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560 @@ -255,6 +309,31 @@ coverage[toml]==7.3.2 \ # via # coverage # pytest-cov +cryptography==41.0.5 \ + --hash=sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf \ + --hash=sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84 \ + --hash=sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e \ + --hash=sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8 \ + --hash=sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7 \ + --hash=sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1 \ + --hash=sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88 \ + --hash=sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86 \ + --hash=sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179 \ + --hash=sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81 \ + --hash=sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20 \ + --hash=sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548 \ + --hash=sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d \ + --hash=sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d \ + --hash=sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5 \ + --hash=sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1 \ + --hash=sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147 \ + --hash=sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936 \ + --hash=sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797 \ + --hash=sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696 \ + --hash=sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72 \ + --hash=sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da \ + --hash=sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723 + # via pyopenssl dependency-injector==4.41.0 \ --hash=sha256:02620454ee8101f77a317f3229935ce687480883d72a40858ff4b0c87c935cce \ --hash=sha256:059fbb48333148143e8667a5323d162628dfe27c386bd0ed3deeecfc390338bf \ @@ -390,6 +469,10 @@ iniconfig==2.0.0 \ --hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \ --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 # via pytest +javaobj-py3==0.4.3 \ + --hash=sha256:38f74db3a57e9998a9774e3614afb95cb396f139f29b3fdb130c5af554435259 \ + --hash=sha256:f6ac64cab49e282cf8171d4c479de413dedbbb1a69c64499648185f974080db3 + # via pyjks jmespath==1.0.1 \ --hash=sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 \ --hash=sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe @@ -490,6 +573,54 @@ pre-commit==3.4.0 \ --hash=sha256:6bbd5129a64cad4c0dfaeeb12cd8f7ea7e15b77028d985341478c8af3c759522 \ --hash=sha256:96d529a951f8b677f730a7212442027e8ba53f9b04d217c4c67dc56c393ad945 # via -r /workspace/requirements-dev-common.in +pyasn1==0.5.0 \ + --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \ + --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde + # via + # pyasn1-modules + # pyjks +pyasn1-modules==0.3.0 \ + --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ + --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d + # via pyjks +pycparser==2.21 \ + --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ + --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 + # via cffi +pycryptodomex==3.19.0 \ + --hash=sha256:09c9401dc06fb3d94cb1ec23b4ea067a25d1f4c6b7b118ff5631d0b5daaab3cc \ + --hash=sha256:0b2f1982c5bc311f0aab8c293524b861b485d76f7c9ab2c3ac9a25b6f7655975 \ + --hash=sha256:136b284e9246b4ccf4f752d435c80f2c44fc2321c198505de1d43a95a3453b3c \ + --hash=sha256:1789d89f61f70a4cd5483d4dfa8df7032efab1118f8b9894faae03c967707865 \ + --hash=sha256:2126bc54beccbede6eade00e647106b4f4c21e5201d2b0a73e9e816a01c50905 \ + --hash=sha256:258c4233a3fe5a6341780306a36c6fb072ef38ce676a6d41eec3e591347919e8 \ + --hash=sha256:263de9a96d2fcbc9f5bd3a279f14ea0d5f072adb68ebd324987576ec25da084d \ + --hash=sha256:50cb18d4dd87571006fd2447ccec85e6cec0136632a550aa29226ba075c80644 \ + --hash=sha256:5b883e1439ab63af976656446fb4839d566bb096f15fc3c06b5a99cde4927188 \ + --hash=sha256:5d73e9fa3fe830e7b6b42afc49d8329b07a049a47d12e0ef9225f2fd220f19b2 \ + --hash=sha256:61056a1fd3254f6f863de94c233b30dd33bc02f8c935b2000269705f1eeeffa4 \ + --hash=sha256:67c8eb79ab33d0fbcb56842992298ddb56eb6505a72369c20f60bc1d2b6fb002 \ + --hash=sha256:6e45bb4635b3c4e0a00ca9df75ef6295838c85c2ac44ad882410cb631ed1eeaa \ + --hash=sha256:7cb51096a6a8d400724104db8a7e4f2206041a1f23e58924aa3d8d96bcb48338 \ + --hash=sha256:800a2b05cfb83654df80266692f7092eeefe2a314fa7901dcefab255934faeec \ + --hash=sha256:8df69e41f7e7015a90b94d1096ec3d8e0182e73449487306709ec27379fff761 \ + --hash=sha256:917033016ecc23c8933205585a0ab73e20020fdf671b7cd1be788a5c4039840b \ + --hash=sha256:a12144d785518f6491ad334c75ccdc6ad52ea49230b4237f319dbb7cef26f464 \ + --hash=sha256:a3866d68e2fc345162b1b9b83ef80686acfe5cec0d134337f3b03950a0a8bf56 \ + --hash=sha256:a588a1cb7781da9d5e1c84affd98c32aff9c89771eac8eaa659d2760666f7139 \ + --hash=sha256:a77b79852175064c822b047fee7cf5a1f434f06ad075cc9986aa1c19a0c53eb0 \ + --hash=sha256:af83a554b3f077564229865c45af0791be008ac6469ef0098152139e6bd4b5b6 \ + --hash=sha256:b801216c48c0886742abf286a9a6b117e248ca144d8ceec1f931ce2dd0c9cb40 \ + --hash=sha256:bfb040b5dda1dff1e197d2ef71927bd6b8bfcb9793bc4dfe0bb6df1e691eaacb \ + --hash=sha256:c01678aee8ac0c1a461cbc38ad496f953f9efcb1fa19f5637cbeba7544792a53 \ + --hash=sha256:c74eb1f73f788facece7979ce91594dc177e1a9b5d5e3e64697dd58299e5cb4d \ + --hash=sha256:c9a68a2f7bd091ccea54ad3be3e9d65eded813e6d79fdf4cc3604e26cdd6384f \ + --hash=sha256:d4dd3b381ff5a5907a3eb98f5f6d32c64d319a840278ceea1dcfcc65063856f3 \ + --hash=sha256:e8e5ecbd4da4157889fce8ba49da74764dd86c891410bfd6b24969fa46edda51 \ + --hash=sha256:eb2fc0ec241bf5e5ef56c8fbec4a2634d631e4c4f616a59b567947a0f35ad83c \ + --hash=sha256:edbe083c299835de7e02c8aa0885cb904a75087d35e7bab75ebe5ed336e8c3e2 \ + --hash=sha256:ff64fd720def623bf64d8776f8d0deada1cc1bf1ec3c1f9d6f5bb5bd098d034f + # via pyjks pydantic==2.4.2 \ --hash=sha256:94f336138093a5d7f426aac732dcfe7ab4eb4da243c88f891d65deb4a2556ee7 \ --hash=sha256:bc3ddf669d234f4220e6e1c4d96b061abe0998185a8d7855c0126782b7abc8c1 @@ -608,6 +739,10 @@ pydantic-settings==2.0.3 \ --hash=sha256:962dc3672495aad6ae96a4390fac7e593591e144625e5112d359f8f67fb75945 \ --hash=sha256:ddd907b066622bd67603b75e2ff791875540dc485b7307c4fffc015719da8625 # via hexkit (pyproject.toml) +pyjks==20.0.0 \ + --hash=sha256:0378cec15fb11b2ed27ba54dad9fd987d48e6f62f49fcff138f5f7a8b312b044 \ + --hash=sha256:394dee142ecff6b1adc36f64356e5584732f1859575aa03b9cf5d5541a9e3460 + # via hexkit (pyproject.toml) pymongo==4.5.0 \ --hash=sha256:076afa0a4a96ca9f77fec0e4a0d241200b3b3a1766f8d7be9a905ecf59a7416b \ --hash=sha256:08819da7864f9b8d4a95729b2bea5fffed08b63d3b9c15b4fea47de655766cf5 \ @@ -693,6 +828,10 @@ pymongo==4.5.0 \ # via # motor # testcontainers +pyopenssl==23.3.0 \ + --hash=sha256:6756834481d9ed5470f4a9393455154bc92fe7a64b7bc6ee2c804e78c52099b2 \ + --hash=sha256:6b2cba5cc46e822750ec3e5a81ee12819850b11303630d575e98108a079c2b12 + # via hexkit (pyproject.toml) pyproject-hooks==1.0.0 \ --hash=sha256:283c11acd6b928d2f6a7c73fa0d01cb2bdc5f07c57a2eeb6e83d5e56b97976f8 \ --hash=sha256:f271b298b97f5955d53fb12b72c1fb1948c22c1a6b70b315c54cedaca0264ef5 @@ -978,6 +1117,9 @@ tornado==6.3.3 \ --hash=sha256:ceb917a50cd35882b57600709dd5421a418c29ddc852da8bcdab1f0db33406b0 \ --hash=sha256:e7d8db41c0181c80d76c982aacc442c0783a2c54d6400fe028954201a2e032fe # via snakeviz +twofish==0.3.0 \ + --hash=sha256:b09d8bb50d33b23ff34cafb1f9209f858f752935c6a5c901efb92a41acb830fa + # via pyjks typer==0.9.0 \ --hash=sha256:50922fd79aea2f4751a8e0408ff10d2662bd0c8bbfa84755a699f3bada2978b2 \ --hash=sha256:5d96d986a21493606a358cae4461bd8cdf83cbf33a5aa950ae629ca3b51467ee diff --git a/requirements.txt b/requirements.txt index bbf5063f..02c22ca1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --all-extras --constraint=/workspace/requirements-dev.txt --generate-hashes --output-file=/workspace/requirements.txt /tmp/tmpye1zf3r4/pyproject.toml +# pip-compile --all-extras --constraint=/workspace/requirements-dev.txt --generate-hashes --output-file=/workspace/requirements.txt /tmp/tmp3yb_4wf4/pyproject.toml # aiokafka==0.8.1 \ --hash=sha256:1e24839088fd6d3ff481cc09a48ea487b997328df11630bc0a1b88255edbcfe9 \ @@ -72,6 +72,62 @@ certifi==2023.7.22 \ # via # -c /workspace/requirements-dev.txt # requests +cffi==1.16.0 \ + --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ + --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \ + --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \ + --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \ + --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \ + --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \ + --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \ + --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \ + --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \ + --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \ + --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \ + --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \ + --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \ + --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \ + --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \ + --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \ + --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \ + --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \ + --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \ + --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \ + --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \ + --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \ + --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \ + --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \ + --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \ + --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \ + --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \ + --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \ + --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \ + --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \ + --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \ + --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \ + --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \ + --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \ + --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \ + --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \ + --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \ + --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \ + --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \ + --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \ + --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \ + --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \ + --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \ + --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \ + --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \ + --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \ + --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \ + --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \ + --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \ + --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \ + --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \ + --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357 + # via + # -c /workspace/requirements-dev.txt + # cryptography charset-normalizer==3.3.0 \ --hash=sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843 \ --hash=sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786 \ @@ -166,6 +222,33 @@ charset-normalizer==3.3.0 \ # via # -c /workspace/requirements-dev.txt # requests +cryptography==41.0.5 \ + --hash=sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf \ + --hash=sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84 \ + --hash=sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e \ + --hash=sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8 \ + --hash=sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7 \ + --hash=sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1 \ + --hash=sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88 \ + --hash=sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86 \ + --hash=sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179 \ + --hash=sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81 \ + --hash=sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20 \ + --hash=sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548 \ + --hash=sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d \ + --hash=sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d \ + --hash=sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5 \ + --hash=sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1 \ + --hash=sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147 \ + --hash=sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936 \ + --hash=sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797 \ + --hash=sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696 \ + --hash=sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72 \ + --hash=sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da \ + --hash=sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723 + # via + # -c /workspace/requirements-dev.txt + # pyopenssl dependency-injector==4.41.0 \ --hash=sha256:02620454ee8101f77a317f3229935ce687480883d72a40858ff4b0c87c935cce \ --hash=sha256:059fbb48333148143e8667a5323d162628dfe27c386bd0ed3deeecfc390338bf \ @@ -264,6 +347,12 @@ idna==3.4 \ # via # -c /workspace/requirements-dev.txt # requests +javaobj-py3==0.4.3 \ + --hash=sha256:38f74db3a57e9998a9774e3614afb95cb396f139f29b3fdb130c5af554435259 \ + --hash=sha256:f6ac64cab49e282cf8171d4c479de413dedbbb1a69c64499648185f974080db3 + # via + # -c /workspace/requirements-dev.txt + # pyjks jmespath==1.0.1 \ --hash=sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 \ --hash=sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe @@ -304,6 +393,61 @@ packaging==23.2 \ # aiokafka # deprecation # docker +pyasn1==0.5.0 \ + --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \ + --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde + # via + # -c /workspace/requirements-dev.txt + # pyasn1-modules + # pyjks +pyasn1-modules==0.3.0 \ + --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ + --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d + # via + # -c /workspace/requirements-dev.txt + # pyjks +pycparser==2.21 \ + --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ + --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 + # via + # -c /workspace/requirements-dev.txt + # cffi +pycryptodomex==3.19.0 \ + --hash=sha256:09c9401dc06fb3d94cb1ec23b4ea067a25d1f4c6b7b118ff5631d0b5daaab3cc \ + --hash=sha256:0b2f1982c5bc311f0aab8c293524b861b485d76f7c9ab2c3ac9a25b6f7655975 \ + --hash=sha256:136b284e9246b4ccf4f752d435c80f2c44fc2321c198505de1d43a95a3453b3c \ + --hash=sha256:1789d89f61f70a4cd5483d4dfa8df7032efab1118f8b9894faae03c967707865 \ + --hash=sha256:2126bc54beccbede6eade00e647106b4f4c21e5201d2b0a73e9e816a01c50905 \ + --hash=sha256:258c4233a3fe5a6341780306a36c6fb072ef38ce676a6d41eec3e591347919e8 \ + --hash=sha256:263de9a96d2fcbc9f5bd3a279f14ea0d5f072adb68ebd324987576ec25da084d \ + --hash=sha256:50cb18d4dd87571006fd2447ccec85e6cec0136632a550aa29226ba075c80644 \ + --hash=sha256:5b883e1439ab63af976656446fb4839d566bb096f15fc3c06b5a99cde4927188 \ + --hash=sha256:5d73e9fa3fe830e7b6b42afc49d8329b07a049a47d12e0ef9225f2fd220f19b2 \ + --hash=sha256:61056a1fd3254f6f863de94c233b30dd33bc02f8c935b2000269705f1eeeffa4 \ + --hash=sha256:67c8eb79ab33d0fbcb56842992298ddb56eb6505a72369c20f60bc1d2b6fb002 \ + --hash=sha256:6e45bb4635b3c4e0a00ca9df75ef6295838c85c2ac44ad882410cb631ed1eeaa \ + --hash=sha256:7cb51096a6a8d400724104db8a7e4f2206041a1f23e58924aa3d8d96bcb48338 \ + --hash=sha256:800a2b05cfb83654df80266692f7092eeefe2a314fa7901dcefab255934faeec \ + --hash=sha256:8df69e41f7e7015a90b94d1096ec3d8e0182e73449487306709ec27379fff761 \ + --hash=sha256:917033016ecc23c8933205585a0ab73e20020fdf671b7cd1be788a5c4039840b \ + --hash=sha256:a12144d785518f6491ad334c75ccdc6ad52ea49230b4237f319dbb7cef26f464 \ + --hash=sha256:a3866d68e2fc345162b1b9b83ef80686acfe5cec0d134337f3b03950a0a8bf56 \ + --hash=sha256:a588a1cb7781da9d5e1c84affd98c32aff9c89771eac8eaa659d2760666f7139 \ + --hash=sha256:a77b79852175064c822b047fee7cf5a1f434f06ad075cc9986aa1c19a0c53eb0 \ + --hash=sha256:af83a554b3f077564229865c45af0791be008ac6469ef0098152139e6bd4b5b6 \ + --hash=sha256:b801216c48c0886742abf286a9a6b117e248ca144d8ceec1f931ce2dd0c9cb40 \ + --hash=sha256:bfb040b5dda1dff1e197d2ef71927bd6b8bfcb9793bc4dfe0bb6df1e691eaacb \ + --hash=sha256:c01678aee8ac0c1a461cbc38ad496f953f9efcb1fa19f5637cbeba7544792a53 \ + --hash=sha256:c74eb1f73f788facece7979ce91594dc177e1a9b5d5e3e64697dd58299e5cb4d \ + --hash=sha256:c9a68a2f7bd091ccea54ad3be3e9d65eded813e6d79fdf4cc3604e26cdd6384f \ + --hash=sha256:d4dd3b381ff5a5907a3eb98f5f6d32c64d319a840278ceea1dcfcc65063856f3 \ + --hash=sha256:e8e5ecbd4da4157889fce8ba49da74764dd86c891410bfd6b24969fa46edda51 \ + --hash=sha256:eb2fc0ec241bf5e5ef56c8fbec4a2634d631e4c4f616a59b567947a0f35ad83c \ + --hash=sha256:edbe083c299835de7e02c8aa0885cb904a75087d35e7bab75ebe5ed336e8c3e2 \ + --hash=sha256:ff64fd720def623bf64d8776f8d0deada1cc1bf1ec3c1f9d6f5bb5bd098d034f + # via + # -c /workspace/requirements-dev.txt + # pyjks pydantic==2.4.2 \ --hash=sha256:94f336138093a5d7f426aac732dcfe7ab4eb4da243c88f891d65deb4a2556ee7 \ --hash=sha256:bc3ddf669d234f4220e6e1c4d96b061abe0998185a8d7855c0126782b7abc8c1 @@ -427,6 +571,12 @@ pydantic-settings==2.0.3 \ # via # -c /workspace/requirements-dev.txt # hexkit (pyproject.toml) +pyjks==20.0.0 \ + --hash=sha256:0378cec15fb11b2ed27ba54dad9fd987d48e6f62f49fcff138f5f7a8b312b044 \ + --hash=sha256:394dee142ecff6b1adc36f64356e5584732f1859575aa03b9cf5d5541a9e3460 + # via + # -c /workspace/requirements-dev.txt + # hexkit (pyproject.toml) pymongo==4.5.0 \ --hash=sha256:076afa0a4a96ca9f77fec0e4a0d241200b3b3a1766f8d7be9a905ecf59a7416b \ --hash=sha256:08819da7864f9b8d4a95729b2bea5fffed08b63d3b9c15b4fea47de655766cf5 \ @@ -513,6 +663,12 @@ pymongo==4.5.0 \ # -c /workspace/requirements-dev.txt # motor # testcontainers +pyopenssl==23.3.0 \ + --hash=sha256:6756834481d9ed5470f4a9393455154bc92fe7a64b7bc6ee2c804e78c52099b2 \ + --hash=sha256:6b2cba5cc46e822750ec3e5a81ee12819850b11303630d575e98108a079c2b12 + # via + # -c /workspace/requirements-dev.txt + # hexkit (pyproject.toml) python-dateutil==2.8.2 \ --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 @@ -715,6 +871,11 @@ testcontainers[kafka,mongo]==3.7.1 \ # -c /workspace/requirements-dev.txt # hexkit (pyproject.toml) # testcontainers +twofish==0.3.0 \ + --hash=sha256:b09d8bb50d33b23ff34cafb1f9209f858f752935c6a5c901efb92a41acb830fa + # via + # -c /workspace/requirements-dev.txt + # pyjks typing-extensions==4.8.0 \ --hash=sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0 \ --hash=sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef diff --git a/src/hexkit/providers/akafka/provider.py b/src/hexkit/providers/akafka/provider.py index c863139e..32a65e24 100644 --- a/src/hexkit/providers/akafka/provider.py +++ b/src/hexkit/providers/akafka/provider.py @@ -28,9 +28,9 @@ from typing import Any, Callable, Optional, Protocol, TypeVar from aiokafka import AIOKafkaConsumer, AIOKafkaProducer +from aiokafka.helpers import create_ssl_context from pydantic import Field from pydantic_settings import BaseSettings -from aiokafka.helpers import create_ssl_context from hexkit.base import InboundProviderBase from hexkit.custom_types import Ascii, JsonObject @@ -78,7 +78,7 @@ class KafkaConfig(BaseSettings): + "Valid values are: PLAINTEXT, SSL.", ) ssl_cafile: str = Field( - None, + "", description="""Certificate Authority file path containing certificates used to sign broker certificates. If CA not specified (by either cafile, capath, cadata) default system CA will be used if found by @@ -87,7 +87,7 @@ class KafkaConfig(BaseSettings): Default: :data:`None`""", ) ssl_certfile: str = Field( - None, + "", description="""optional filename of file in PEM format containing the client certificate, as well as any CA certificates needed to establish the certificate's authenticity. For more information see @@ -95,13 +95,13 @@ class KafkaConfig(BaseSettings): Default: :data:`None`.""", ) ssl_keyfile: str = Field( - None, + "", description=""""optional filename containing the client private key. For more information see :meth:`~ssl.SSLContext.load_cert_chain`. Default: :data:`None`.""", ) ssl_password: str = Field( - None, + "", description="""optional password to be used when loading the certificate chain. For more information see :meth:`~ssl.SSLContext.load_cert_chain`. diff --git a/src/hexkit/providers/akafka/testutils.py b/src/hexkit/providers/akafka/testutils.py index 340f462e..9ee6c65f 100644 --- a/src/hexkit/providers/akafka/testutils.py +++ b/src/hexkit/providers/akafka/testutils.py @@ -19,16 +19,20 @@ Please note, only use for testing purposes. """ import json +import os from collections.abc import AsyncGenerator, Sequence from contextlib import asynccontextmanager from dataclasses import dataclass from functools import partial +from pathlib import Path from typing import Optional, Union +import jks import pytest_asyncio from aiokafka import AIOKafkaConsumer, TopicPartition from kafka import KafkaAdminClient from kafka.errors import KafkaError +from OpenSSL import crypto from testcontainers.kafka import KafkaContainer from hexkit.custom_types import Ascii, JsonObject, PytestScope @@ -398,12 +402,100 @@ async def expect_events( ) +def generate_ssl_certificates(): + """Generate ssl keys""" + key = crypto.PKey() + key.generate_key(crypto.TYPE_RSA, 2048) + + # generate a self signed certificate + cert = crypto.X509() + cert.get_subject().CN = "my.server.example.com" + cert.set_serial_number(473289472) + cert.gmtime_adj_notBefore(0) + cert.gmtime_adj_notAfter(365 * 24 * 60 * 60) + cert.set_issuer(cert.get_subject()) + cert.set_pubkey(key) + cert.sign(key, "sha256") + + # dumping the key and cert to ASN1 + dumped_cert = crypto.dump_certificate(crypto.FILETYPE_ASN1, cert) + dumped_key = crypto.dump_privatekey(crypto.FILETYPE_ASN1, key) + + # creating a private key entry + pke = jks.PrivateKeyEntry.new( + "self signed cert", [dumped_cert], dumped_key, "rsa_raw" + ) + + # if we want the private key entry to have a unique password, we can encrypt it beforehand + # if it is not ecrypted when saved, it will be encrypted with the same password as the keystore + # pke.encrypt('my_private_key_password') + + # os.mkdir("ssl") + ssl_dir = Path("/tmp/ssl") # noqa: S108 + Path(ssl_dir).mkdir(exist_ok=True) + cert_path = os.path.abspath(f"{ssl_dir}/kafka-server-cert.crt") + key_path = os.path.abspath(f"{ssl_dir}/kafka-server-private.key") + jks_path = os.path.abspath(f"{ssl_dir}/kafka-server.keystore.jks") + cred_path = os.path.abspath(f"{ssl_dir}/password.txt") + keystore_password = "password" # noqa: S105 + + with open(cert_path, "w") as f: + f.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert).decode("utf-8")) + + with open(key_path, "w") as f: + f.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, key).decode("utf-8")) + + with open(cred_path, "w") as f: + f.write(keystore_password) + + # creating a jks keystore with the private key, and saving it + keystore = jks.KeyStore.new("jks", [pke]) + keystore.save(jks_path, keystore_password) + + return { + "ssl_dir": ssl_dir, + "ssl_certfile": cert_path, + "ssl_keyfile": key_path, + "ssl_cafile": cert_path, + "ssl_password": cred_path, + "keystore_location": jks_path, + } + + +class KafkaSSLContainer(KafkaContainer): + """KafkaSSLContainer""" + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.ssl_config = generate_ssl_certificates() + + self.with_volume_mapping(str(self.ssl_config["ssl_dir"]), "/etc/kafka/secrets") + + self.with_env("KAFKA_LISTENERS", "SSL://0.0.0.0:9093,BROKER://0.0.0.0:9092") + self.with_env( + "KAFKA_LISTENER_SECURITY_PROTOCOL_MAP", "SSL:SSL,BROKER:PLAINTEXT" + ) + self.with_env("KAFKA_SECURITY_PROTOCOL", "SSL") + + self.with_env("KAFKA_SSL_KEYSTORE_FILENAME", "kafka-server.keystore.jks") + self.with_env("KAFKA_SSL_KEYSTORE_CREDENTIALS", "password.txt") + self.with_env("KAFKA_SSL_KEY_CREDENTIALS", "password.txt") + + self.with_env("KAFKA_SSL_TRUSTSTORE_FILENAME", "kafka-server.keystore.jks") + self.with_env("KAFKA_SSL_TRUSTSTORE_CREDENTIALS", "password.txt") + + self.with_env("KAFKA_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM", " ") + self.with_env("KAFKA_SSL_CLIENT_AUTH", "required") + + self.with_env("KAFKA_INTER_BROKER_LISTENER_NAME", "SSL") + + async def kafka_fixture_function() -> AsyncGenerator[KafkaFixture, None]: """Pytest fixture for tests depending on the Kafka-base providers. **Do not call directly** Instead, use get_kafka_fixture() """ - with KafkaContainer(image="confluentinc/cp-kafka:5.4.9-1-deb8") as kafka: + with KafkaSSLContainer(image="confluentinc/cp-kafka:5.4.9-1-deb8") as kafka: kafka_servers = [kafka.get_bootstrap_server()] config = KafkaConfig( # type: ignore service_name="test_publisher", diff --git a/tests/integration/test_akafka.py b/tests/integration/test_akafka.py index aa1049c8..c55d8c32 100644 --- a/tests/integration/test_akafka.py +++ b/tests/integration/test_akafka.py @@ -83,7 +83,13 @@ async def test_kafka_event_subscriber(kafka_fixture: KafkaFixture): # noqa: F81 service_name="event_subscriber", service_instance_id="1", kafka_servers=kafka_fixture.kafka_servers, + security_protocol="SSL", + ssl_cafile="/tmp/ssl/kafka-server-cert.crt", + ssl_certfile="/tmp/ssl/kafka-server-cert.crt", + ssl_keyfile="/tmp/ssl/kafka-server-private.key", + ssl_password="password", ) + async with KafkaEventSubscriber.construct( config=config, translator=translator, From 77da1d59fa0e8d282d6cff4b500d16eb5e67886c Mon Sep 17 00:00:00 2001 From: walzph Date: Thu, 2 Nov 2023 10:26:27 +0000 Subject: [PATCH 08/29] Add ca, server certs for ssl testing --- .gitignore | 3 + src/hexkit/providers/akafka/testutils.py | 109 +++++++++++++++-------- tests/integration/test_akafka.py | 8 +- 3 files changed, 80 insertions(+), 40 deletions(-) diff --git a/.gitignore b/.gitignore index a71cfd2a..40dbd167 100644 --- a/.gitignore +++ b/.gitignore @@ -137,3 +137,6 @@ dmypy.json .DS_Store desktop.ini thumbs.db + +# SSL keystore +.ssl/ diff --git a/src/hexkit/providers/akafka/testutils.py b/src/hexkit/providers/akafka/testutils.py index 9ee6c65f..d4206a1f 100644 --- a/src/hexkit/providers/akafka/testutils.py +++ b/src/hexkit/providers/akafka/testutils.py @@ -30,6 +30,7 @@ import jks import pytest_asyncio from aiokafka import AIOKafkaConsumer, TopicPartition +from cryptography import x509 from kafka import KafkaAdminClient from kafka.errors import KafkaError from OpenSSL import crypto @@ -402,63 +403,97 @@ async def expect_events( ) -def generate_ssl_certificates(): +def generate_ssl_certificates(): # noqa: PLR0915 """Generate ssl keys""" - key = crypto.PKey() - key.generate_key(crypto.TYPE_RSA, 2048) + ca_key = crypto.PKey() + ca_key.generate_key(crypto.TYPE_RSA, 2048) # generate a self signed certificate - cert = crypto.X509() - cert.get_subject().CN = "my.server.example.com" - cert.set_serial_number(473289472) - cert.gmtime_adj_notBefore(0) - cert.gmtime_adj_notAfter(365 * 24 * 60 * 60) - cert.set_issuer(cert.get_subject()) - cert.set_pubkey(key) - cert.sign(key, "sha256") - - # dumping the key and cert to ASN1 - dumped_cert = crypto.dump_certificate(crypto.FILETYPE_ASN1, cert) - dumped_key = crypto.dump_privatekey(crypto.FILETYPE_ASN1, key) + ca_cert = crypto.X509() + ca_cert.get_subject().CN = "GHGA Test Certificate Authority" + ca_cert.set_serial_number(x509.random_serial_number()) + ca_cert.gmtime_adj_notBefore(0) + ca_cert.gmtime_adj_notAfter(365 * 24 * 60 * 60) + ca_cert.set_issuer(ca_cert.get_subject()) + ca_cert.set_pubkey(ca_key) + ca_cert.sign(ca_key, "sha256") + + server_key = crypto.PKey() + server_key.generate_key(crypto.TYPE_RSA, 2048) + + # generate a self signed certificate + server_cert = crypto.X509() + server_cert.get_subject().CN = "Broker 1" + server_cert.set_serial_number(x509.random_serial_number()) + server_cert.gmtime_adj_notBefore(0) + server_cert.gmtime_adj_notAfter(365 * 24 * 60 * 60) + server_cert.set_issuer(ca_cert.get_subject()) + server_cert.set_pubkey(server_key) + server_cert.sign(ca_key, "sha256") + + # dumping the ca key and cert to ASN1 + dumped_ca_cert = crypto.dump_certificate(crypto.FILETYPE_ASN1, ca_cert) + dumped_ca_key = crypto.dump_privatekey(crypto.FILETYPE_ASN1, ca_key) + # dumping the server key and cert to ASN1 + dumped_server_cert = crypto.dump_certificate(crypto.FILETYPE_ASN1, server_cert) + dumped_server_key = crypto.dump_privatekey(crypto.FILETYPE_ASN1, server_key) # creating a private key entry - pke = jks.PrivateKeyEntry.new( - "self signed cert", [dumped_cert], dumped_key, "rsa_raw" + ca_pke = jks.PrivateKeyEntry.new( + "self signed cert", [dumped_ca_cert], dumped_ca_key, "rsa_raw" ) + # creating a private key entry + server_pke = jks.PrivateKeyEntry.new( + "self signed cert", [dumped_server_cert], dumped_server_key, "rsa_raw" + ) # if we want the private key entry to have a unique password, we can encrypt it beforehand # if it is not ecrypted when saved, it will be encrypted with the same password as the keystore - # pke.encrypt('my_private_key_password') + # pke.encrypt("") # os.mkdir("ssl") - ssl_dir = Path("/tmp/ssl") # noqa: S108 + ssl_dir = Path("/workspace/.ssl") Path(ssl_dir).mkdir(exist_ok=True) - cert_path = os.path.abspath(f"{ssl_dir}/kafka-server-cert.crt") - key_path = os.path.abspath(f"{ssl_dir}/kafka-server-private.key") - jks_path = os.path.abspath(f"{ssl_dir}/kafka-server.keystore.jks") + ca_cert_path = os.path.abspath(f"{ssl_dir}/ca-cert.crt") + ca_key_path = os.path.abspath(f"{ssl_dir}/ca-private.key") + ca_jks_path = os.path.abspath(f"{ssl_dir}/ca.truststore.jks") + server_cert_path = os.path.abspath(f"{ssl_dir}/server-cert.crt") + server_key_path = os.path.abspath(f"{ssl_dir}/server-private.key") + server_jks_path = os.path.abspath(f"{ssl_dir}/server.keystore.jks") cred_path = os.path.abspath(f"{ssl_dir}/password.txt") keystore_password = "password" # noqa: S105 - with open(cert_path, "w") as f: - f.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert).decode("utf-8")) + with open(ca_cert_path, "w") as f: + f.write(crypto.dump_certificate(crypto.FILETYPE_PEM, ca_cert).decode("utf-8")) - with open(key_path, "w") as f: - f.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, key).decode("utf-8")) + with open(ca_key_path, "w") as f: + f.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, ca_key).decode("utf-8")) + with open(server_cert_path, "w") as f: + f.write( + crypto.dump_certificate(crypto.FILETYPE_PEM, server_cert).decode("utf-8") + ) + + with open(server_key_path, "w") as f: + f.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, server_key).decode("utf-8")) with open(cred_path, "w") as f: f.write(keystore_password) # creating a jks keystore with the private key, and saving it - keystore = jks.KeyStore.new("jks", [pke]) - keystore.save(jks_path, keystore_password) + ca_keystore = jks.KeyStore.new("jks", [ca_pke]) + ca_keystore.save(ca_jks_path, keystore_password) + + # creating a jks keystore with the private key, and saving it + server_keystore = jks.KeyStore.new("jks", [server_pke]) + server_keystore.save(server_jks_path, keystore_password) return { "ssl_dir": ssl_dir, - "ssl_certfile": cert_path, - "ssl_keyfile": key_path, - "ssl_cafile": cert_path, + "ssl_certfile": ca_cert_path, + "ssl_keyfile": ca_key_path, + "ssl_cafile": ca_cert_path, "ssl_password": cred_path, - "keystore_location": jks_path, + "keystore_location": ca_jks_path, } @@ -469,7 +504,9 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.ssl_config = generate_ssl_certificates() - self.with_volume_mapping(str(self.ssl_config["ssl_dir"]), "/etc/kafka/secrets") + self.with_volume_mapping( + "/Users/w620-admin/workspace/ghga/hexkit/.ssl", "/etc/kafka/secrets" + ) self.with_env("KAFKA_LISTENERS", "SSL://0.0.0.0:9093,BROKER://0.0.0.0:9092") self.with_env( @@ -477,15 +514,15 @@ def __init__(self, *args, **kwargs): ) self.with_env("KAFKA_SECURITY_PROTOCOL", "SSL") - self.with_env("KAFKA_SSL_KEYSTORE_FILENAME", "kafka-server.keystore.jks") + self.with_env("KAFKA_SSL_KEYSTORE_FILENAME", "server.keystore.jks") self.with_env("KAFKA_SSL_KEYSTORE_CREDENTIALS", "password.txt") self.with_env("KAFKA_SSL_KEY_CREDENTIALS", "password.txt") - self.with_env("KAFKA_SSL_TRUSTSTORE_FILENAME", "kafka-server.keystore.jks") + self.with_env("KAFKA_SSL_TRUSTSTORE_FILENAME", "ca.truststore.jks") self.with_env("KAFKA_SSL_TRUSTSTORE_CREDENTIALS", "password.txt") self.with_env("KAFKA_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM", " ") - self.with_env("KAFKA_SSL_CLIENT_AUTH", "required") + self.with_env("KAFKA_SSL_CLIENT_AUTH", "requested") self.with_env("KAFKA_INTER_BROKER_LISTENER_NAME", "SSL") diff --git a/tests/integration/test_akafka.py b/tests/integration/test_akafka.py index c55d8c32..fd1dfaa8 100644 --- a/tests/integration/test_akafka.py +++ b/tests/integration/test_akafka.py @@ -84,10 +84,10 @@ async def test_kafka_event_subscriber(kafka_fixture: KafkaFixture): # noqa: F81 service_instance_id="1", kafka_servers=kafka_fixture.kafka_servers, security_protocol="SSL", - ssl_cafile="/tmp/ssl/kafka-server-cert.crt", - ssl_certfile="/tmp/ssl/kafka-server-cert.crt", - ssl_keyfile="/tmp/ssl/kafka-server-private.key", - ssl_password="password", + ssl_cafile="/workspace/.ssl/ca-cert.crt" + # # ssl_certfile="/workspace/.ssl/kafka-server-cert.crt", + # # ssl_keyfile="/workspace/.ssl/kafka-server-private.key", + # ssl_password="password", ) async with KafkaEventSubscriber.construct( From 9b1b55af601cfb22393dbe155bbb304ac30794d5 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Tue, 7 Nov 2023 08:48:07 +0000 Subject: [PATCH 09/29] Update gitignore from template --- .gitignore | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/.gitignore b/.gitignore index 40dbd167..d9ba6145 100644 --- a/.gitignore +++ b/.gitignore @@ -133,10 +133,13 @@ dmypy.json # ignore VS Code settings: .vscode/ +# key stores +*.key +*.rnd +.keystore +.ssl/ + # desktop settings and thumbnails .DS_Store desktop.ini thumbs.db - -# SSL keystore -.ssl/ From e9042ce6f31f42491f701f3ae2752c9bd117a70e Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Tue, 7 Nov 2023 13:04:22 +0000 Subject: [PATCH 10/29] Revert server cert creation --- pyproject.toml | 2 - requirements-dev.txt | 144 +------------------- requirements.txt | 163 +---------------------- src/hexkit/providers/akafka/testutils.py | 131 +----------------- tests/integration/test_akafka.py | 6 - 5 files changed, 3 insertions(+), 443 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 8bab22dd..4f6c68fe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,8 +28,6 @@ dependencies = [ "pydantic_settings >=2, <3", "PyYAML >=6.0, <7", "dependency-injector >=4.41.0, <5", - "pyjks >=20, <21", - "pyOpenSSL >=23.3.0, <24" ] [project.optional-dependencies] diff --git a/requirements-dev.txt b/requirements-dev.txt index 1fea0a05..e7c617f1 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --all-extras --generate-hashes --output-file=/workspace/requirements-dev.txt /tmp/tmp3yb_4wf4/pyproject.toml /workspace/requirements-dev.in +# pip-compile --all-extras --generate-hashes --output-file=/workspace/requirements-dev.txt /tmp/tmpye1zf3r4/pyproject.toml /workspace/requirements-dev.in # aiokafka==0.8.1 \ --hash=sha256:1e24839088fd6d3ff481cc09a48ea487b997328df11630bc0a1b88255edbcfe9 \ @@ -95,60 +95,6 @@ certifi==2023.7.22 \ # httpcore # httpx # requests -cffi==1.16.0 \ - --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ - --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \ - --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \ - --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \ - --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \ - --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \ - --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \ - --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \ - --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \ - --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \ - --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \ - --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \ - --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \ - --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \ - --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \ - --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \ - --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \ - --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \ - --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \ - --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \ - --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \ - --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \ - --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \ - --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \ - --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \ - --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \ - --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \ - --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \ - --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \ - --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \ - --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \ - --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \ - --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \ - --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \ - --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \ - --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \ - --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \ - --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \ - --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \ - --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \ - --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \ - --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \ - --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \ - --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \ - --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \ - --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \ - --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \ - --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \ - --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \ - --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \ - --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \ - --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357 - # via cryptography cfgv==3.4.0 \ --hash=sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9 \ --hash=sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560 @@ -309,31 +255,6 @@ coverage[toml]==7.3.2 \ # via # coverage # pytest-cov -cryptography==41.0.5 \ - --hash=sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf \ - --hash=sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84 \ - --hash=sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e \ - --hash=sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8 \ - --hash=sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7 \ - --hash=sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1 \ - --hash=sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88 \ - --hash=sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86 \ - --hash=sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179 \ - --hash=sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81 \ - --hash=sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20 \ - --hash=sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548 \ - --hash=sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d \ - --hash=sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d \ - --hash=sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5 \ - --hash=sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1 \ - --hash=sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147 \ - --hash=sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936 \ - --hash=sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797 \ - --hash=sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696 \ - --hash=sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72 \ - --hash=sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da \ - --hash=sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723 - # via pyopenssl dependency-injector==4.41.0 \ --hash=sha256:02620454ee8101f77a317f3229935ce687480883d72a40858ff4b0c87c935cce \ --hash=sha256:059fbb48333148143e8667a5323d162628dfe27c386bd0ed3deeecfc390338bf \ @@ -469,10 +390,6 @@ iniconfig==2.0.0 \ --hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \ --hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374 # via pytest -javaobj-py3==0.4.3 \ - --hash=sha256:38f74db3a57e9998a9774e3614afb95cb396f139f29b3fdb130c5af554435259 \ - --hash=sha256:f6ac64cab49e282cf8171d4c479de413dedbbb1a69c64499648185f974080db3 - # via pyjks jmespath==1.0.1 \ --hash=sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 \ --hash=sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe @@ -573,54 +490,6 @@ pre-commit==3.4.0 \ --hash=sha256:6bbd5129a64cad4c0dfaeeb12cd8f7ea7e15b77028d985341478c8af3c759522 \ --hash=sha256:96d529a951f8b677f730a7212442027e8ba53f9b04d217c4c67dc56c393ad945 # via -r /workspace/requirements-dev-common.in -pyasn1==0.5.0 \ - --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \ - --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde - # via - # pyasn1-modules - # pyjks -pyasn1-modules==0.3.0 \ - --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ - --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d - # via pyjks -pycparser==2.21 \ - --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ - --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 - # via cffi -pycryptodomex==3.19.0 \ - --hash=sha256:09c9401dc06fb3d94cb1ec23b4ea067a25d1f4c6b7b118ff5631d0b5daaab3cc \ - --hash=sha256:0b2f1982c5bc311f0aab8c293524b861b485d76f7c9ab2c3ac9a25b6f7655975 \ - --hash=sha256:136b284e9246b4ccf4f752d435c80f2c44fc2321c198505de1d43a95a3453b3c \ - --hash=sha256:1789d89f61f70a4cd5483d4dfa8df7032efab1118f8b9894faae03c967707865 \ - --hash=sha256:2126bc54beccbede6eade00e647106b4f4c21e5201d2b0a73e9e816a01c50905 \ - --hash=sha256:258c4233a3fe5a6341780306a36c6fb072ef38ce676a6d41eec3e591347919e8 \ - --hash=sha256:263de9a96d2fcbc9f5bd3a279f14ea0d5f072adb68ebd324987576ec25da084d \ - --hash=sha256:50cb18d4dd87571006fd2447ccec85e6cec0136632a550aa29226ba075c80644 \ - --hash=sha256:5b883e1439ab63af976656446fb4839d566bb096f15fc3c06b5a99cde4927188 \ - --hash=sha256:5d73e9fa3fe830e7b6b42afc49d8329b07a049a47d12e0ef9225f2fd220f19b2 \ - --hash=sha256:61056a1fd3254f6f863de94c233b30dd33bc02f8c935b2000269705f1eeeffa4 \ - --hash=sha256:67c8eb79ab33d0fbcb56842992298ddb56eb6505a72369c20f60bc1d2b6fb002 \ - --hash=sha256:6e45bb4635b3c4e0a00ca9df75ef6295838c85c2ac44ad882410cb631ed1eeaa \ - --hash=sha256:7cb51096a6a8d400724104db8a7e4f2206041a1f23e58924aa3d8d96bcb48338 \ - --hash=sha256:800a2b05cfb83654df80266692f7092eeefe2a314fa7901dcefab255934faeec \ - --hash=sha256:8df69e41f7e7015a90b94d1096ec3d8e0182e73449487306709ec27379fff761 \ - --hash=sha256:917033016ecc23c8933205585a0ab73e20020fdf671b7cd1be788a5c4039840b \ - --hash=sha256:a12144d785518f6491ad334c75ccdc6ad52ea49230b4237f319dbb7cef26f464 \ - --hash=sha256:a3866d68e2fc345162b1b9b83ef80686acfe5cec0d134337f3b03950a0a8bf56 \ - --hash=sha256:a588a1cb7781da9d5e1c84affd98c32aff9c89771eac8eaa659d2760666f7139 \ - --hash=sha256:a77b79852175064c822b047fee7cf5a1f434f06ad075cc9986aa1c19a0c53eb0 \ - --hash=sha256:af83a554b3f077564229865c45af0791be008ac6469ef0098152139e6bd4b5b6 \ - --hash=sha256:b801216c48c0886742abf286a9a6b117e248ca144d8ceec1f931ce2dd0c9cb40 \ - --hash=sha256:bfb040b5dda1dff1e197d2ef71927bd6b8bfcb9793bc4dfe0bb6df1e691eaacb \ - --hash=sha256:c01678aee8ac0c1a461cbc38ad496f953f9efcb1fa19f5637cbeba7544792a53 \ - --hash=sha256:c74eb1f73f788facece7979ce91594dc177e1a9b5d5e3e64697dd58299e5cb4d \ - --hash=sha256:c9a68a2f7bd091ccea54ad3be3e9d65eded813e6d79fdf4cc3604e26cdd6384f \ - --hash=sha256:d4dd3b381ff5a5907a3eb98f5f6d32c64d319a840278ceea1dcfcc65063856f3 \ - --hash=sha256:e8e5ecbd4da4157889fce8ba49da74764dd86c891410bfd6b24969fa46edda51 \ - --hash=sha256:eb2fc0ec241bf5e5ef56c8fbec4a2634d631e4c4f616a59b567947a0f35ad83c \ - --hash=sha256:edbe083c299835de7e02c8aa0885cb904a75087d35e7bab75ebe5ed336e8c3e2 \ - --hash=sha256:ff64fd720def623bf64d8776f8d0deada1cc1bf1ec3c1f9d6f5bb5bd098d034f - # via pyjks pydantic==2.4.2 \ --hash=sha256:94f336138093a5d7f426aac732dcfe7ab4eb4da243c88f891d65deb4a2556ee7 \ --hash=sha256:bc3ddf669d234f4220e6e1c4d96b061abe0998185a8d7855c0126782b7abc8c1 @@ -739,10 +608,6 @@ pydantic-settings==2.0.3 \ --hash=sha256:962dc3672495aad6ae96a4390fac7e593591e144625e5112d359f8f67fb75945 \ --hash=sha256:ddd907b066622bd67603b75e2ff791875540dc485b7307c4fffc015719da8625 # via hexkit (pyproject.toml) -pyjks==20.0.0 \ - --hash=sha256:0378cec15fb11b2ed27ba54dad9fd987d48e6f62f49fcff138f5f7a8b312b044 \ - --hash=sha256:394dee142ecff6b1adc36f64356e5584732f1859575aa03b9cf5d5541a9e3460 - # via hexkit (pyproject.toml) pymongo==4.5.0 \ --hash=sha256:076afa0a4a96ca9f77fec0e4a0d241200b3b3a1766f8d7be9a905ecf59a7416b \ --hash=sha256:08819da7864f9b8d4a95729b2bea5fffed08b63d3b9c15b4fea47de655766cf5 \ @@ -828,10 +693,6 @@ pymongo==4.5.0 \ # via # motor # testcontainers -pyopenssl==23.3.0 \ - --hash=sha256:6756834481d9ed5470f4a9393455154bc92fe7a64b7bc6ee2c804e78c52099b2 \ - --hash=sha256:6b2cba5cc46e822750ec3e5a81ee12819850b11303630d575e98108a079c2b12 - # via hexkit (pyproject.toml) pyproject-hooks==1.0.0 \ --hash=sha256:283c11acd6b928d2f6a7c73fa0d01cb2bdc5f07c57a2eeb6e83d5e56b97976f8 \ --hash=sha256:f271b298b97f5955d53fb12b72c1fb1948c22c1a6b70b315c54cedaca0264ef5 @@ -1117,9 +978,6 @@ tornado==6.3.3 \ --hash=sha256:ceb917a50cd35882b57600709dd5421a418c29ddc852da8bcdab1f0db33406b0 \ --hash=sha256:e7d8db41c0181c80d76c982aacc442c0783a2c54d6400fe028954201a2e032fe # via snakeviz -twofish==0.3.0 \ - --hash=sha256:b09d8bb50d33b23ff34cafb1f9209f858f752935c6a5c901efb92a41acb830fa - # via pyjks typer==0.9.0 \ --hash=sha256:50922fd79aea2f4751a8e0408ff10d2662bd0c8bbfa84755a699f3bada2978b2 \ --hash=sha256:5d96d986a21493606a358cae4461bd8cdf83cbf33a5aa950ae629ca3b51467ee diff --git a/requirements.txt b/requirements.txt index 02c22ca1..bbf5063f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --all-extras --constraint=/workspace/requirements-dev.txt --generate-hashes --output-file=/workspace/requirements.txt /tmp/tmp3yb_4wf4/pyproject.toml +# pip-compile --all-extras --constraint=/workspace/requirements-dev.txt --generate-hashes --output-file=/workspace/requirements.txt /tmp/tmpye1zf3r4/pyproject.toml # aiokafka==0.8.1 \ --hash=sha256:1e24839088fd6d3ff481cc09a48ea487b997328df11630bc0a1b88255edbcfe9 \ @@ -72,62 +72,6 @@ certifi==2023.7.22 \ # via # -c /workspace/requirements-dev.txt # requests -cffi==1.16.0 \ - --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ - --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \ - --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \ - --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \ - --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \ - --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \ - --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \ - --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \ - --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \ - --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \ - --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \ - --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \ - --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \ - --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \ - --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \ - --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \ - --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \ - --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \ - --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \ - --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \ - --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \ - --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \ - --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \ - --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \ - --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \ - --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \ - --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \ - --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \ - --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \ - --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \ - --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \ - --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \ - --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \ - --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \ - --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \ - --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \ - --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \ - --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \ - --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \ - --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \ - --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \ - --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \ - --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \ - --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \ - --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \ - --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \ - --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \ - --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \ - --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \ - --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \ - --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \ - --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357 - # via - # -c /workspace/requirements-dev.txt - # cryptography charset-normalizer==3.3.0 \ --hash=sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843 \ --hash=sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786 \ @@ -222,33 +166,6 @@ charset-normalizer==3.3.0 \ # via # -c /workspace/requirements-dev.txt # requests -cryptography==41.0.5 \ - --hash=sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf \ - --hash=sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84 \ - --hash=sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e \ - --hash=sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8 \ - --hash=sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7 \ - --hash=sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1 \ - --hash=sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88 \ - --hash=sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86 \ - --hash=sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179 \ - --hash=sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81 \ - --hash=sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20 \ - --hash=sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548 \ - --hash=sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d \ - --hash=sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d \ - --hash=sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5 \ - --hash=sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1 \ - --hash=sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147 \ - --hash=sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936 \ - --hash=sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797 \ - --hash=sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696 \ - --hash=sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72 \ - --hash=sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da \ - --hash=sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723 - # via - # -c /workspace/requirements-dev.txt - # pyopenssl dependency-injector==4.41.0 \ --hash=sha256:02620454ee8101f77a317f3229935ce687480883d72a40858ff4b0c87c935cce \ --hash=sha256:059fbb48333148143e8667a5323d162628dfe27c386bd0ed3deeecfc390338bf \ @@ -347,12 +264,6 @@ idna==3.4 \ # via # -c /workspace/requirements-dev.txt # requests -javaobj-py3==0.4.3 \ - --hash=sha256:38f74db3a57e9998a9774e3614afb95cb396f139f29b3fdb130c5af554435259 \ - --hash=sha256:f6ac64cab49e282cf8171d4c479de413dedbbb1a69c64499648185f974080db3 - # via - # -c /workspace/requirements-dev.txt - # pyjks jmespath==1.0.1 \ --hash=sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980 \ --hash=sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe @@ -393,61 +304,6 @@ packaging==23.2 \ # aiokafka # deprecation # docker -pyasn1==0.5.0 \ - --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \ - --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde - # via - # -c /workspace/requirements-dev.txt - # pyasn1-modules - # pyjks -pyasn1-modules==0.3.0 \ - --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ - --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d - # via - # -c /workspace/requirements-dev.txt - # pyjks -pycparser==2.21 \ - --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ - --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 - # via - # -c /workspace/requirements-dev.txt - # cffi -pycryptodomex==3.19.0 \ - --hash=sha256:09c9401dc06fb3d94cb1ec23b4ea067a25d1f4c6b7b118ff5631d0b5daaab3cc \ - --hash=sha256:0b2f1982c5bc311f0aab8c293524b861b485d76f7c9ab2c3ac9a25b6f7655975 \ - --hash=sha256:136b284e9246b4ccf4f752d435c80f2c44fc2321c198505de1d43a95a3453b3c \ - --hash=sha256:1789d89f61f70a4cd5483d4dfa8df7032efab1118f8b9894faae03c967707865 \ - --hash=sha256:2126bc54beccbede6eade00e647106b4f4c21e5201d2b0a73e9e816a01c50905 \ - --hash=sha256:258c4233a3fe5a6341780306a36c6fb072ef38ce676a6d41eec3e591347919e8 \ - --hash=sha256:263de9a96d2fcbc9f5bd3a279f14ea0d5f072adb68ebd324987576ec25da084d \ - --hash=sha256:50cb18d4dd87571006fd2447ccec85e6cec0136632a550aa29226ba075c80644 \ - --hash=sha256:5b883e1439ab63af976656446fb4839d566bb096f15fc3c06b5a99cde4927188 \ - --hash=sha256:5d73e9fa3fe830e7b6b42afc49d8329b07a049a47d12e0ef9225f2fd220f19b2 \ - --hash=sha256:61056a1fd3254f6f863de94c233b30dd33bc02f8c935b2000269705f1eeeffa4 \ - --hash=sha256:67c8eb79ab33d0fbcb56842992298ddb56eb6505a72369c20f60bc1d2b6fb002 \ - --hash=sha256:6e45bb4635b3c4e0a00ca9df75ef6295838c85c2ac44ad882410cb631ed1eeaa \ - --hash=sha256:7cb51096a6a8d400724104db8a7e4f2206041a1f23e58924aa3d8d96bcb48338 \ - --hash=sha256:800a2b05cfb83654df80266692f7092eeefe2a314fa7901dcefab255934faeec \ - --hash=sha256:8df69e41f7e7015a90b94d1096ec3d8e0182e73449487306709ec27379fff761 \ - --hash=sha256:917033016ecc23c8933205585a0ab73e20020fdf671b7cd1be788a5c4039840b \ - --hash=sha256:a12144d785518f6491ad334c75ccdc6ad52ea49230b4237f319dbb7cef26f464 \ - --hash=sha256:a3866d68e2fc345162b1b9b83ef80686acfe5cec0d134337f3b03950a0a8bf56 \ - --hash=sha256:a588a1cb7781da9d5e1c84affd98c32aff9c89771eac8eaa659d2760666f7139 \ - --hash=sha256:a77b79852175064c822b047fee7cf5a1f434f06ad075cc9986aa1c19a0c53eb0 \ - --hash=sha256:af83a554b3f077564229865c45af0791be008ac6469ef0098152139e6bd4b5b6 \ - --hash=sha256:b801216c48c0886742abf286a9a6b117e248ca144d8ceec1f931ce2dd0c9cb40 \ - --hash=sha256:bfb040b5dda1dff1e197d2ef71927bd6b8bfcb9793bc4dfe0bb6df1e691eaacb \ - --hash=sha256:c01678aee8ac0c1a461cbc38ad496f953f9efcb1fa19f5637cbeba7544792a53 \ - --hash=sha256:c74eb1f73f788facece7979ce91594dc177e1a9b5d5e3e64697dd58299e5cb4d \ - --hash=sha256:c9a68a2f7bd091ccea54ad3be3e9d65eded813e6d79fdf4cc3604e26cdd6384f \ - --hash=sha256:d4dd3b381ff5a5907a3eb98f5f6d32c64d319a840278ceea1dcfcc65063856f3 \ - --hash=sha256:e8e5ecbd4da4157889fce8ba49da74764dd86c891410bfd6b24969fa46edda51 \ - --hash=sha256:eb2fc0ec241bf5e5ef56c8fbec4a2634d631e4c4f616a59b567947a0f35ad83c \ - --hash=sha256:edbe083c299835de7e02c8aa0885cb904a75087d35e7bab75ebe5ed336e8c3e2 \ - --hash=sha256:ff64fd720def623bf64d8776f8d0deada1cc1bf1ec3c1f9d6f5bb5bd098d034f - # via - # -c /workspace/requirements-dev.txt - # pyjks pydantic==2.4.2 \ --hash=sha256:94f336138093a5d7f426aac732dcfe7ab4eb4da243c88f891d65deb4a2556ee7 \ --hash=sha256:bc3ddf669d234f4220e6e1c4d96b061abe0998185a8d7855c0126782b7abc8c1 @@ -571,12 +427,6 @@ pydantic-settings==2.0.3 \ # via # -c /workspace/requirements-dev.txt # hexkit (pyproject.toml) -pyjks==20.0.0 \ - --hash=sha256:0378cec15fb11b2ed27ba54dad9fd987d48e6f62f49fcff138f5f7a8b312b044 \ - --hash=sha256:394dee142ecff6b1adc36f64356e5584732f1859575aa03b9cf5d5541a9e3460 - # via - # -c /workspace/requirements-dev.txt - # hexkit (pyproject.toml) pymongo==4.5.0 \ --hash=sha256:076afa0a4a96ca9f77fec0e4a0d241200b3b3a1766f8d7be9a905ecf59a7416b \ --hash=sha256:08819da7864f9b8d4a95729b2bea5fffed08b63d3b9c15b4fea47de655766cf5 \ @@ -663,12 +513,6 @@ pymongo==4.5.0 \ # -c /workspace/requirements-dev.txt # motor # testcontainers -pyopenssl==23.3.0 \ - --hash=sha256:6756834481d9ed5470f4a9393455154bc92fe7a64b7bc6ee2c804e78c52099b2 \ - --hash=sha256:6b2cba5cc46e822750ec3e5a81ee12819850b11303630d575e98108a079c2b12 - # via - # -c /workspace/requirements-dev.txt - # hexkit (pyproject.toml) python-dateutil==2.8.2 \ --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 @@ -871,11 +715,6 @@ testcontainers[kafka,mongo]==3.7.1 \ # -c /workspace/requirements-dev.txt # hexkit (pyproject.toml) # testcontainers -twofish==0.3.0 \ - --hash=sha256:b09d8bb50d33b23ff34cafb1f9209f858f752935c6a5c901efb92a41acb830fa - # via - # -c /workspace/requirements-dev.txt - # pyjks typing-extensions==4.8.0 \ --hash=sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0 \ --hash=sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef diff --git a/src/hexkit/providers/akafka/testutils.py b/src/hexkit/providers/akafka/testutils.py index d4206a1f..340f462e 100644 --- a/src/hexkit/providers/akafka/testutils.py +++ b/src/hexkit/providers/akafka/testutils.py @@ -19,21 +19,16 @@ Please note, only use for testing purposes. """ import json -import os from collections.abc import AsyncGenerator, Sequence from contextlib import asynccontextmanager from dataclasses import dataclass from functools import partial -from pathlib import Path from typing import Optional, Union -import jks import pytest_asyncio from aiokafka import AIOKafkaConsumer, TopicPartition -from cryptography import x509 from kafka import KafkaAdminClient from kafka.errors import KafkaError -from OpenSSL import crypto from testcontainers.kafka import KafkaContainer from hexkit.custom_types import Ascii, JsonObject, PytestScope @@ -403,136 +398,12 @@ async def expect_events( ) -def generate_ssl_certificates(): # noqa: PLR0915 - """Generate ssl keys""" - ca_key = crypto.PKey() - ca_key.generate_key(crypto.TYPE_RSA, 2048) - - # generate a self signed certificate - ca_cert = crypto.X509() - ca_cert.get_subject().CN = "GHGA Test Certificate Authority" - ca_cert.set_serial_number(x509.random_serial_number()) - ca_cert.gmtime_adj_notBefore(0) - ca_cert.gmtime_adj_notAfter(365 * 24 * 60 * 60) - ca_cert.set_issuer(ca_cert.get_subject()) - ca_cert.set_pubkey(ca_key) - ca_cert.sign(ca_key, "sha256") - - server_key = crypto.PKey() - server_key.generate_key(crypto.TYPE_RSA, 2048) - - # generate a self signed certificate - server_cert = crypto.X509() - server_cert.get_subject().CN = "Broker 1" - server_cert.set_serial_number(x509.random_serial_number()) - server_cert.gmtime_adj_notBefore(0) - server_cert.gmtime_adj_notAfter(365 * 24 * 60 * 60) - server_cert.set_issuer(ca_cert.get_subject()) - server_cert.set_pubkey(server_key) - server_cert.sign(ca_key, "sha256") - - # dumping the ca key and cert to ASN1 - dumped_ca_cert = crypto.dump_certificate(crypto.FILETYPE_ASN1, ca_cert) - dumped_ca_key = crypto.dump_privatekey(crypto.FILETYPE_ASN1, ca_key) - # dumping the server key and cert to ASN1 - dumped_server_cert = crypto.dump_certificate(crypto.FILETYPE_ASN1, server_cert) - dumped_server_key = crypto.dump_privatekey(crypto.FILETYPE_ASN1, server_key) - - # creating a private key entry - ca_pke = jks.PrivateKeyEntry.new( - "self signed cert", [dumped_ca_cert], dumped_ca_key, "rsa_raw" - ) - - # creating a private key entry - server_pke = jks.PrivateKeyEntry.new( - "self signed cert", [dumped_server_cert], dumped_server_key, "rsa_raw" - ) - # if we want the private key entry to have a unique password, we can encrypt it beforehand - # if it is not ecrypted when saved, it will be encrypted with the same password as the keystore - # pke.encrypt("") - - # os.mkdir("ssl") - ssl_dir = Path("/workspace/.ssl") - Path(ssl_dir).mkdir(exist_ok=True) - ca_cert_path = os.path.abspath(f"{ssl_dir}/ca-cert.crt") - ca_key_path = os.path.abspath(f"{ssl_dir}/ca-private.key") - ca_jks_path = os.path.abspath(f"{ssl_dir}/ca.truststore.jks") - server_cert_path = os.path.abspath(f"{ssl_dir}/server-cert.crt") - server_key_path = os.path.abspath(f"{ssl_dir}/server-private.key") - server_jks_path = os.path.abspath(f"{ssl_dir}/server.keystore.jks") - cred_path = os.path.abspath(f"{ssl_dir}/password.txt") - keystore_password = "password" # noqa: S105 - - with open(ca_cert_path, "w") as f: - f.write(crypto.dump_certificate(crypto.FILETYPE_PEM, ca_cert).decode("utf-8")) - - with open(ca_key_path, "w") as f: - f.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, ca_key).decode("utf-8")) - with open(server_cert_path, "w") as f: - f.write( - crypto.dump_certificate(crypto.FILETYPE_PEM, server_cert).decode("utf-8") - ) - - with open(server_key_path, "w") as f: - f.write(crypto.dump_privatekey(crypto.FILETYPE_PEM, server_key).decode("utf-8")) - - with open(cred_path, "w") as f: - f.write(keystore_password) - - # creating a jks keystore with the private key, and saving it - ca_keystore = jks.KeyStore.new("jks", [ca_pke]) - ca_keystore.save(ca_jks_path, keystore_password) - - # creating a jks keystore with the private key, and saving it - server_keystore = jks.KeyStore.new("jks", [server_pke]) - server_keystore.save(server_jks_path, keystore_password) - - return { - "ssl_dir": ssl_dir, - "ssl_certfile": ca_cert_path, - "ssl_keyfile": ca_key_path, - "ssl_cafile": ca_cert_path, - "ssl_password": cred_path, - "keystore_location": ca_jks_path, - } - - -class KafkaSSLContainer(KafkaContainer): - """KafkaSSLContainer""" - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.ssl_config = generate_ssl_certificates() - - self.with_volume_mapping( - "/Users/w620-admin/workspace/ghga/hexkit/.ssl", "/etc/kafka/secrets" - ) - - self.with_env("KAFKA_LISTENERS", "SSL://0.0.0.0:9093,BROKER://0.0.0.0:9092") - self.with_env( - "KAFKA_LISTENER_SECURITY_PROTOCOL_MAP", "SSL:SSL,BROKER:PLAINTEXT" - ) - self.with_env("KAFKA_SECURITY_PROTOCOL", "SSL") - - self.with_env("KAFKA_SSL_KEYSTORE_FILENAME", "server.keystore.jks") - self.with_env("KAFKA_SSL_KEYSTORE_CREDENTIALS", "password.txt") - self.with_env("KAFKA_SSL_KEY_CREDENTIALS", "password.txt") - - self.with_env("KAFKA_SSL_TRUSTSTORE_FILENAME", "ca.truststore.jks") - self.with_env("KAFKA_SSL_TRUSTSTORE_CREDENTIALS", "password.txt") - - self.with_env("KAFKA_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM", " ") - self.with_env("KAFKA_SSL_CLIENT_AUTH", "requested") - - self.with_env("KAFKA_INTER_BROKER_LISTENER_NAME", "SSL") - - async def kafka_fixture_function() -> AsyncGenerator[KafkaFixture, None]: """Pytest fixture for tests depending on the Kafka-base providers. **Do not call directly** Instead, use get_kafka_fixture() """ - with KafkaSSLContainer(image="confluentinc/cp-kafka:5.4.9-1-deb8") as kafka: + with KafkaContainer(image="confluentinc/cp-kafka:5.4.9-1-deb8") as kafka: kafka_servers = [kafka.get_bootstrap_server()] config = KafkaConfig( # type: ignore service_name="test_publisher", diff --git a/tests/integration/test_akafka.py b/tests/integration/test_akafka.py index fd1dfaa8..aa1049c8 100644 --- a/tests/integration/test_akafka.py +++ b/tests/integration/test_akafka.py @@ -83,13 +83,7 @@ async def test_kafka_event_subscriber(kafka_fixture: KafkaFixture): # noqa: F81 service_name="event_subscriber", service_instance_id="1", kafka_servers=kafka_fixture.kafka_servers, - security_protocol="SSL", - ssl_cafile="/workspace/.ssl/ca-cert.crt" - # # ssl_certfile="/workspace/.ssl/kafka-server-cert.crt", - # # ssl_keyfile="/workspace/.ssl/kafka-server-private.key", - # ssl_password="password", ) - async with KafkaEventSubscriber.construct( config=config, translator=translator, From 263de338757a289f3d1c12aaf178595a9c6d2242 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Tue, 7 Nov 2023 18:37:55 +0000 Subject: [PATCH 11/29] Add tests for Kafka SSL access --- .devcontainer/devcontainer.json | 3 +- .devcontainer/docker-compose.yml | 30 ++++--- .devcontainer/kafka_secrets/.gitignore | 7 ++ .devcontainer/kafka_secrets/create_secrets.sh | 72 +++++++++++++++++ .github/workflows/tests.yaml | 16 ++++ .static_files_ignore | 2 +- src/hexkit/providers/akafka/provider.py | 28 ++----- tests/integration/test_akafka.py | 79 ++++++++++++++++++- 8 files changed, 202 insertions(+), 35 deletions(-) create mode 100644 .devcontainer/kafka_secrets/.gitignore create mode 100755 .devcontainer/kafka_secrets/create_secrets.sh diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 89ff58f8..e5d28cad 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -59,6 +59,7 @@ }, // Use 'forwardPorts' to make a list of ports inside the container available locally. // "forwardPorts": [5000, 5432], + "initializeCommand": ".devcontainer/kafka_secrets/create_secrets.sh", // Use 'postCreateCommand' to run commands after the container is created. "postCreateCommand": "dev_install", // Comment out connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root. @@ -72,4 +73,4 @@ // details can be found here: https://github.com/devcontainers/features/tree/main/src/docker-outside-of-docker "ghcr.io/devcontainers/features/docker-outside-of-docker:1": {} } -} +} \ No newline at end of file diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index 44a67e76..7f066b04 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -23,30 +23,39 @@ services: user: vscode zookeeper: - image: confluentinc/cp-zookeeper:7.3.1 + image: confluentinc/cp-zookeeper:7.5.1 # used ports: 2181 environment: + ZOOKEEPER_SERVER_ID: 1 ZOOKEEPER_CLIENT_PORT: 2181 ZOOKEEPER_TICK_TIME: 2000 kafka: - image: confluentinc/cp-server:7.3.1 + image: confluentinc/cp-server:7.5.1 restart: always depends_on: - zookeeper - # used ports: 9092 + # used ports: 9092, 19092 environment: KAFKA_BROKER_ID: 1 KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 - KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,PLAINTEXT_HOST://localhost:29092 - KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,SSL://kafka:19092 KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT + KAFKA_SSL_KEYSTORE_FILENAME: kafka.keystore.jks + KAFKA_SSL_KEYSTORE_CREDENTIALS: pwd.txt + KAFKA_SSL_KEY_CREDENTIALS: pwd.txt + KAFKA_SSL_TRUSTSTORE_FILENAME: kafka.truststore.jks + KAFKA_SSL_TRUSTSTORE_CREDENTIALS: pwd.txt + KAFKA_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM: " " + KAFKA_SSL_CLIENT_AUTH: required KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 KAFKA_CONFLUENT_LICENSE_TOPIC_REPLICATION_FACTOR: 1 KAFKA_CONFLUENT_BALANCER_TOPIC_REPLICATION_FACTOR: 1 KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 + volumes: + - ./kafka_secrets:/etc/kafka/secrets kafka-ui: image: provectuslabs/kafka-ui @@ -55,9 +64,8 @@ services: - kafka - zookeeper environment: - - KAFKA_CLUSTERS_0_NAME=local - - KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS=kafka:9092 - - KAFKA_CLUSTERS_0_ZOOKEEPER=zookeeper:2181 - # used ports: 8080 - # The port is automatically forwarded to localhost. - # Please see the "PORTS" panel for details. + KAFKA_CLUSTERS_0_NAME: local + KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:9092 + KAFKA_CLUSTERS_0_ZOOKEEPER: zookeeper:2181 + ports: + - 8080:8080 diff --git a/.devcontainer/kafka_secrets/.gitignore b/.devcontainer/kafka_secrets/.gitignore new file mode 100644 index 00000000..78b1e601 --- /dev/null +++ b/.devcontainer/kafka_secrets/.gitignore @@ -0,0 +1,7 @@ +*.crt +*.crs +*.jks +*.key +*.p12 +*.srl +pwd.* diff --git a/.devcontainer/kafka_secrets/create_secrets.sh b/.devcontainer/kafka_secrets/create_secrets.sh new file mode 100755 index 00000000..884bfe60 --- /dev/null +++ b/.devcontainer/kafka_secrets/create_secrets.sh @@ -0,0 +1,72 @@ +#!/bin/bash + +# Create Kafka secrets used in integration tests + +set -e +set -u + +cd $(dirname "${BASH_SOURCE[0]}") +[ -f create_secrets.sh ] || exit 1 +if [ -f ca.crt -o -f client.crt ] +then + echo "Secrets already exist. You can remove them to create new ones." + exit 0 +fi + +# install openssl and keytool +if ! command -v openssl &> /dev/null || ! command -v keytool &> /dev/null +then + if ! apt install -y openssl openjdk-11-jre-headless + then + echo Please install openssl and keytool. + exit 1 + fi +fi + +# remove old secrets +rm -f *.crt *.csr *.key *.srl *.jks *.p12 + +pw=testpw +echo $pw > pwd.txt + +# generate CA key +openssl req -new -x509 -keyout ca.key -out ca.crt -days 9999 \ + -subj '/CN=ca.test.ghga.dev/OU=TEST/O=GHGA' \ + -passin pass:$pw -passout pass:$pw + +for component in kafka client +do + echo "Create keystore for Kafka $component..." + # create keystore + keytool -genkey -noprompt -alias $component \ + -dname "CN=$component, OU=TEST, O=GHGA" \ + -keystore $component.keystore.jks \ + -keyalg RSA -storepass $pw -keypass $pw + + # create CSR, sign the key and import back into keystore + keytool -keystore $component.keystore.jks -alias $component -certreq \ + -file $component.csr -storepass $pw -keypass $pw + + openssl x509 -req -CA ca.crt -CAkey ca.key \ + -in $component.csr -out $component.crt \ + -days 9999 -CAcreateserial -passin pass:$pw + + keytool -keystore $component.keystore.jks -alias CARoot \ + -import -file ca.crt -storepass $pw -keypass $pw -noprompt + + keytool -keystore $component.keystore.jks -alias $component \ + -import -file $component.crt -storepass $pw -keypass $pw -noprompt + + # create truststore and import the CA cert + keytool -keystore $component.truststore.jks -alias CARoot \ + -import -file ca.crt -storepass $pw -keypass $pw -noprompt +done + +# Create certfile and encrypted keyfile for the client +keytool -importkeystore -srckeystore client.keystore.jks -srcalias client \ + -destkeystore client.keystore.p12 -deststoretype PKCS12 \ + -srcstorepass $pw -deststorepass $pw -noprompt +openssl pkcs12 -in client.keystore.p12 -nocerts -out client.key \ + -passin pass:$pw -passout pass:$pw + +rm -f kafka.crt kafka.key ca.key *.csr *.p12 *.srl diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 05f474ca..ea60353f 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -10,9 +10,25 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Create env files + run: | + .devcontainer/kafka_secrets/create_secrets.sh + + - name: Set up containers + run: | + docker-compose -f .devcontainer/docker-compose.yml up -d + - id: common uses: ghga-de/gh-action-common@v3 + - name: Create env files + run: | + .devcontainer/init-scripts/dev/set_env.sh + + - name: Set up containers + run: | + docker-compose -f .devcontainer/docker-compose.yml up -d + - id: pytest run: | export ${{ steps.common.outputs.CONFIG_YAML_ENV_VAR_NAME }}="${{ steps.common.outputs.CONFIG_YAML }}" diff --git a/.static_files_ignore b/.static_files_ignore index 2728d3f2..2098675d 100644 --- a/.static_files_ignore +++ b/.static_files_ignore @@ -5,7 +5,7 @@ .github/workflows/check_openapi_spec.yaml .github/workflows/check_readme.yaml .github/workflows/cd.yaml -.github/workflows/dev_cd.yaml +.github/workflows/tests.yaml scripts/script_utils/fastapi_app_location.py diff --git a/src/hexkit/providers/akafka/provider.py b/src/hexkit/providers/akafka/provider.py index 32a65e24..de177d03 100644 --- a/src/hexkit/providers/akafka/provider.py +++ b/src/hexkit/providers/akafka/provider.py @@ -79,33 +79,21 @@ class KafkaConfig(BaseSettings): ) ssl_cafile: str = Field( "", - description="""Certificate Authority file path containing certificates - used to sign broker certificates. If CA not specified (by either - cafile, capath, cadata) default system CA will be used if found by - OpenSSL. For more information see - :meth:`~ssl.SSLContext.load_verify_locations`. - Default: :data:`None`""", + description="Certificate Authority file path containing certificates" + + " used to sign broker certificates. If a CA not specified, the default" + + " system CA will be used if found by OpenSSL.", ) ssl_certfile: str = Field( "", - description="""optional filename of file in PEM format containing - the client certificate, as well as any CA certificates needed to - establish the certificate's authenticity. For more information see - :meth:`~ssl.SSLContext.load_cert_chain`. - Default: :data:`None`.""", + description="Optional filename of client certificate, as well as any" + + " CA certificates needed to establish the certificate's authenticity.", ) ssl_keyfile: str = Field( - "", - description=""""optional filename containing the client private key. - For more information see :meth:`~ssl.SSLContext.load_cert_chain`. - Default: :data:`None`.""", + "", description="Optional filename containing the client private key." ) ssl_password: str = Field( "", - description="""optional password to be used when loading the - certificate chain. For more information see - :meth:`~ssl.SSLContext.load_cert_chain`. - Default: :data:`None`.""", + description="Optional password to be used for the client private key.", ) @@ -122,7 +110,7 @@ def generate_client_id(*, service_name: str, instance_id: str) -> str: def generate_ssl_context(config: KafkaConfig) -> Optional[ssl.SSLContext]: - """Generate ssl_context for connecting to Kafka broker via an encrypted SSL connection""" + """Generate SSL context for an encrypted SSL connection to Kafka broker.""" return ( create_ssl_context( cafile=config.ssl_cafile, diff --git a/tests/integration/test_akafka.py b/tests/integration/test_akafka.py index aa1049c8..112e5097 100644 --- a/tests/integration/test_akafka.py +++ b/tests/integration/test_akafka.py @@ -16,9 +16,12 @@ """Testing Apache Kafka based providers.""" +from pathlib import Path from unittest.mock import AsyncMock import pytest +from kafka import KafkaAdminClient +from kafka.errors import KafkaError from hexkit.custom_types import JsonObject from hexkit.providers.akafka import ( @@ -62,13 +65,13 @@ async def test_kafka_event_publisher(kafka_fixture: KafkaFixture): # noqa: F811 @pytest.mark.asyncio async def test_kafka_event_subscriber(kafka_fixture: KafkaFixture): # noqa: F811 - """Test the KafkaEventSubscriber with mocked KafkaEventSubscriber.""" + """Test the KafkaEventSubscriber with mocked EventSubscriber.""" payload = {"test_content": "Hello World"} type_ = "test_type" key = "test_key" topic = "test_topic" - # create protocol-compatiple translator mock: + # create protocol-compatible translator mock: translator = AsyncMock() translator.topics_of_interest = [topic] translator.types_of_interest = [type_] @@ -95,3 +98,75 @@ async def test_kafka_event_subscriber(kafka_fixture: KafkaFixture): # noqa: F81 translator.consume.assert_awaited_once_with( payload=payload, type_=type_, topic=topic ) + + +def find_kafka_secrets_dir() -> Path: + """Get the directory with Kafka secrets.""" + current_dir = Path(__file__) + while current_dir != current_dir.parent: + current_dir = current_dir.parent + secrets_dir = current_dir / ".devcontainer" / "kafka_secrets" + if secrets_dir.is_dir(): + for filename in "ca.crt", "client.crt", "client.key", "pwd.txt": + assert (secrets_dir / filename).exists(), ( + f"No {filename} in Kafka secrets directory." + " Please re-run the create_secrets.sh script." + ) + return secrets_dir + assert False, "Kafka secrets directory not found." + + +@pytest.mark.asyncio +async def test_kafka_ssl(): + """Test connecting to Kafka via SSL. + + This test uses the broker configured with the needed secrets via docker-compose + instead of a test container. + """ + payload: JsonObject = {"test_content": "Be aware... Connect with care"} + type_ = "test_type" + key = "test_key" + topic = "test_topic" + + admin_client = KafkaAdminClient(bootstrap_servers=["kafka:9092"]) + try: + admin_client.delete_topics([topic]) + except KafkaError: + pass + + secrets_dir = find_kafka_secrets_dir() + password = open(secrets_dir / "pwd.txt").read().strip() + assert password + + config = KafkaConfig( + service_name="test_ssl", + service_instance_id="1", + kafka_servers=["kafka:19092"], # SSL port + security_protocol="SSL", + ssl_cafile=str(secrets_dir / "ca.crt"), + ssl_certfile=str(secrets_dir / "client.crt"), + ssl_keyfile=str(secrets_dir / "client.key"), + ssl_password=password, + ) + + async with KafkaEventPublisher.construct(config=config) as event_publisher: + await event_publisher.publish( + payload=payload, + type_=type_, + key=key, + topic=topic, + ) + + translator = AsyncMock() + translator.topics_of_interest = [topic] + translator.types_of_interest = [type_] + + async with KafkaEventSubscriber.construct( + config=config, + translator=translator, + ) as event_subscriber: + await event_subscriber.run(forever=False) + + translator.consume.assert_awaited_once_with( + payload=payload, type_=type_, topic=topic + ) From 5122baa6c5032958c670b2f060472d04b64546e3 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Tue, 7 Nov 2023 18:42:25 +0000 Subject: [PATCH 12/29] Fix init script location --- .github/workflows/tests.yaml | 4 ++-- .static_files_ignore | 2 ++ 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index ea60353f..6dd85af5 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -21,9 +21,9 @@ jobs: - id: common uses: ghga-de/gh-action-common@v3 - - name: Create env files + - name: Create Kafka secrets run: | - .devcontainer/init-scripts/dev/set_env.sh + .devcontainer/kafka_secrets/create_secrets.sh - name: Set up containers run: | diff --git a/.static_files_ignore b/.static_files_ignore index 2098675d..e5e55f1a 100644 --- a/.static_files_ignore +++ b/.static_files_ignore @@ -1,6 +1,8 @@ # Optional list of files which are actually static in the template # but are allowed to have different content in the current repository +.devcontainer/devcontainer.json + .github/workflows/check_config_docs.yaml .github/workflows/check_openapi_spec.yaml .github/workflows/check_readme.yaml From 52ffff9ea82858e697714fe1af8f17b41bad4e20 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Tue, 7 Nov 2023 18:46:29 +0000 Subject: [PATCH 13/29] Remove duplicate steps --- .github/workflows/tests.yaml | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 6dd85af5..c89e8c47 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -10,7 +10,7 @@ jobs: steps: - uses: actions/checkout@v3 - - name: Create env files + - name: Create Kafka secrets run: | .devcontainer/kafka_secrets/create_secrets.sh @@ -21,14 +21,6 @@ jobs: - id: common uses: ghga-de/gh-action-common@v3 - - name: Create Kafka secrets - run: | - .devcontainer/kafka_secrets/create_secrets.sh - - - name: Set up containers - run: | - docker-compose -f .devcontainer/docker-compose.yml up -d - - id: pytest run: | export ${{ steps.common.outputs.CONFIG_YAML_ENV_VAR_NAME }}="${{ steps.common.outputs.CONFIG_YAML }}" From 385a7f9fa2f004db45b748b479b8fea5073db634 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Tue, 7 Nov 2023 19:01:31 +0000 Subject: [PATCH 14/29] Add ports and step names in test workflow --- .devcontainer/docker-compose.yml | 3 +++ .github/workflows/tests.yaml | 12 +++++++++--- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index 7f066b04..aa45bee7 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -56,6 +56,9 @@ services: KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 volumes: - ./kafka_secrets:/etc/kafka/secrets + ports: + - 9092:9092 + - 19092:19092 kafka-ui: image: provectuslabs/kafka-ui diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index c89e8c47..ab6bfd8d 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -8,20 +8,26 @@ jobs: name: Tests steps: - - uses: actions/checkout@v3 + - id: checkout + name: Check out repository + uses: actions/checkout@v3 - - name: Create Kafka secrets + - id: secrets + name: Create Kafka secrets run: | .devcontainer/kafka_secrets/create_secrets.sh - - name: Set up containers + - id: containers + name: Set up containers run: | docker-compose -f .devcontainer/docker-compose.yml up -d - id: common + name: Run all common steps uses: ghga-de/gh-action-common@v3 - id: pytest + name: Run all tests run: | export ${{ steps.common.outputs.CONFIG_YAML_ENV_VAR_NAME }}="${{ steps.common.outputs.CONFIG_YAML }}" From 4a73bed40d51ff605c5513648613fd30a63a61c7 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Tue, 7 Nov 2023 19:21:13 +0000 Subject: [PATCH 15/29] Add delete topic enable setting --- .devcontainer/docker-compose.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index aa45bee7..3a369f41 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -41,6 +41,7 @@ services: KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,SSL://kafka:19092 KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT + KAFKA_DELETE_TOPIC_ENABLE: "true" KAFKA_SSL_KEYSTORE_FILENAME: kafka.keystore.jks KAFKA_SSL_KEYSTORE_CREDENTIALS: pwd.txt KAFKA_SSL_KEY_CREDENTIALS: pwd.txt From 148aa7a6b2b9dcca37958e57cc0bc083df992998 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Wed, 8 Nov 2023 09:11:38 +0000 Subject: [PATCH 16/29] Make tests work from outside the docker container --- .devcontainer/docker-compose.yml | 7 ++++--- .devcontainer/kafka_secrets/create_secrets.sh | 2 +- .github/workflows/tests.yaml | 4 ++-- src/hexkit/providers/akafka/testutils.py | 2 +- tests/integration/test_akafka.py | 4 ++-- 5 files changed, 10 insertions(+), 9 deletions(-) diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index 3a369f41..fc97ba8f 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -12,6 +12,9 @@ services: init: true privileged: true + # Since we want to be able to run the tests outside the devcontainer, + # we must test with the same network when running inside. + network_mode: "host" volumes: - ..:/workspace:cached @@ -24,7 +27,6 @@ services: zookeeper: image: confluentinc/cp-zookeeper:7.5.1 - # used ports: 2181 environment: ZOOKEEPER_SERVER_ID: 1 ZOOKEEPER_CLIENT_PORT: 2181 @@ -35,11 +37,10 @@ services: restart: always depends_on: - zookeeper - # used ports: 9092, 19092 environment: KAFKA_BROKER_ID: 1 KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 - KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092,SSL://kafka:19092 + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://localhost:9092,SSL://localhost:19092 KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT KAFKA_DELETE_TOPIC_ENABLE: "true" KAFKA_SSL_KEYSTORE_FILENAME: kafka.keystore.jks diff --git a/.devcontainer/kafka_secrets/create_secrets.sh b/.devcontainer/kafka_secrets/create_secrets.sh index 884bfe60..2a93d1e6 100755 --- a/.devcontainer/kafka_secrets/create_secrets.sh +++ b/.devcontainer/kafka_secrets/create_secrets.sh @@ -39,7 +39,7 @@ do echo "Create keystore for Kafka $component..." # create keystore keytool -genkey -noprompt -alias $component \ - -dname "CN=$component, OU=TEST, O=GHGA" \ + -dname "CN=localhost, OU=TEST, O=GHGA" \ -keystore $component.keystore.jks \ -keyalg RSA -storepass $pw -keypass $pw diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index ab6bfd8d..475bbf9a 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -18,9 +18,9 @@ jobs: .devcontainer/kafka_secrets/create_secrets.sh - id: containers - name: Set up containers + name: Set up Kafka containers run: | - docker-compose -f .devcontainer/docker-compose.yml up -d + docker-compose -f .devcontainer/docker-compose.yml up kafka zookeeper -d - id: common name: Run all common steps diff --git a/src/hexkit/providers/akafka/testutils.py b/src/hexkit/providers/akafka/testutils.py index 340f462e..7e94877b 100644 --- a/src/hexkit/providers/akafka/testutils.py +++ b/src/hexkit/providers/akafka/testutils.py @@ -61,7 +61,7 @@ class ExpectedEvent(EventBase): @dataclass(frozen=True) class RecordedEvent(EventBase): - """Used by the EventyRecorder class to describe events recorded in a specific topic.""" + """Used by the EventRecorder class to describe events recorded in a specific topic.""" key: Ascii diff --git a/tests/integration/test_akafka.py b/tests/integration/test_akafka.py index 112e5097..3a16573b 100644 --- a/tests/integration/test_akafka.py +++ b/tests/integration/test_akafka.py @@ -128,7 +128,7 @@ async def test_kafka_ssl(): key = "test_key" topic = "test_topic" - admin_client = KafkaAdminClient(bootstrap_servers=["kafka:9092"]) + admin_client = KafkaAdminClient(bootstrap_servers=["localhost:9092"]) try: admin_client.delete_topics([topic]) except KafkaError: @@ -141,7 +141,7 @@ async def test_kafka_ssl(): config = KafkaConfig( service_name="test_ssl", service_instance_id="1", - kafka_servers=["kafka:19092"], # SSL port + kafka_servers=["localhost:19092"], # SSL port security_protocol="SSL", ssl_cafile=str(secrets_dir / "ca.crt"), ssl_certfile=str(secrets_dir / "client.crt"), From 379b3e57556b706a85f47cdc471750ce3ea595e5 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Wed, 8 Nov 2023 09:28:51 +0000 Subject: [PATCH 17/29] Fix docker-compose options --- .github/workflows/tests.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 475bbf9a..9414e640 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -20,7 +20,7 @@ jobs: - id: containers name: Set up Kafka containers run: | - docker-compose -f .devcontainer/docker-compose.yml up kafka zookeeper -d + docker-compose -f .devcontainer/docker-compose.yml up -d kafka zookeeper - id: common name: Run all common steps From b05a98d0ec69bc66657c5977ac775caa8624c406 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Wed, 8 Nov 2023 09:29:27 +0000 Subject: [PATCH 18/29] Add a prefix to kafka config settings --- src/hexkit/providers/akafka/provider.py | 24 ++++++++++++------------ tests/integration/test_akafka.py | 10 +++++----- tests/unit/test_akafka.py | 2 +- 3 files changed, 18 insertions(+), 18 deletions(-) diff --git a/src/hexkit/providers/akafka/provider.py b/src/hexkit/providers/akafka/provider.py index de177d03..dc08b853 100644 --- a/src/hexkit/providers/akafka/provider.py +++ b/src/hexkit/providers/akafka/provider.py @@ -72,26 +72,26 @@ class KafkaConfig(BaseSettings): examples=[["localhost:9092"]], description="A list of connection strings to connect to Kafka bootstrap servers.", ) - security_protocol: Literal["PLAINTEXT", "SSL"] = Field( + kafka_security_protocol: Literal["PLAINTEXT", "SSL"] = Field( "PLAINTEXT", description="Protocol used to communicate with brokers. " + "Valid values are: PLAINTEXT, SSL.", ) - ssl_cafile: str = Field( + kafka_ssl_cafile: str = Field( "", description="Certificate Authority file path containing certificates" + " used to sign broker certificates. If a CA not specified, the default" + " system CA will be used if found by OpenSSL.", ) - ssl_certfile: str = Field( + kafka_ssl_certfile: str = Field( "", description="Optional filename of client certificate, as well as any" + " CA certificates needed to establish the certificate's authenticity.", ) - ssl_keyfile: str = Field( + kafka_ssl_keyfile: str = Field( "", description="Optional filename containing the client private key." ) - ssl_password: str = Field( + kafka_ssl_password: str = Field( "", description="Optional password to be used for the client private key.", ) @@ -113,12 +113,12 @@ def generate_ssl_context(config: KafkaConfig) -> Optional[ssl.SSLContext]: """Generate SSL context for an encrypted SSL connection to Kafka broker.""" return ( create_ssl_context( - cafile=config.ssl_cafile, - certfile=config.ssl_certfile, - keyfile=config.ssl_keyfile, - password=config.ssl_password, + cafile=config.kafka_ssl_cafile, + certfile=config.kafka_ssl_certfile, + keyfile=config.kafka_ssl_keyfile, + password=config.kafka_ssl_password, ) - if config.security_protocol == "SSL" + if config.kafka_security_protocol == "SSL" else None ) @@ -190,7 +190,7 @@ async def construct( producer = kafka_producer_cls( bootstrap_servers=",".join(config.kafka_servers), - security_protocol=config.security_protocol, + security_protocol=config.kafka_security_protocol, ssl_context=generate_ssl_context(config), client_id=client_id, key_serializer=lambda key: key.encode("ascii"), @@ -342,7 +342,7 @@ async def construct( consumer = kafka_consumer_cls( *topics, bootstrap_servers=",".join(config.kafka_servers), - security_protocol=config.security_protocol, + security_protocol=config.kafka_security_protocol, ssl_context=generate_ssl_context(config), client_id=client_id, group_id=config.service_name, diff --git a/tests/integration/test_akafka.py b/tests/integration/test_akafka.py index 3a16573b..668f9e26 100644 --- a/tests/integration/test_akafka.py +++ b/tests/integration/test_akafka.py @@ -142,11 +142,11 @@ async def test_kafka_ssl(): service_name="test_ssl", service_instance_id="1", kafka_servers=["localhost:19092"], # SSL port - security_protocol="SSL", - ssl_cafile=str(secrets_dir / "ca.crt"), - ssl_certfile=str(secrets_dir / "client.crt"), - ssl_keyfile=str(secrets_dir / "client.key"), - ssl_password=password, + kafka_security_protocol="SSL", + kafka_ssl_cafile=str(secrets_dir / "ca.crt"), + kafka_ssl_certfile=str(secrets_dir / "client.crt"), + kafka_ssl_keyfile=str(secrets_dir / "client.key"), + kafka_ssl_password=password, ) async with KafkaEventPublisher.construct(config=config) as event_publisher: diff --git a/tests/unit/test_akafka.py b/tests/unit/test_akafka.py index 98ce958f..638a6555 100644 --- a/tests/unit/test_akafka.py +++ b/tests/unit/test_akafka.py @@ -148,7 +148,7 @@ async def test_kafka_event_subscriber( consumer_cls = Mock() consumer_cls.return_value = consumer - # create protocol-compatiple translator mock: + # create protocol-compatible translator mock: translator = AsyncMock() if processing_failure and exception: translator.consume.side_effect = exception() From a783b8a27139848119f289370d6491e3ab73f4e3 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Wed, 8 Nov 2023 11:22:55 +0000 Subject: [PATCH 19/29] Rename certificates to make more sense --- .devcontainer/docker-compose.yml | 4 ++-- .devcontainer/kafka_secrets/create_secrets.sh | 7 +++---- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index fc97ba8f..3ed35b62 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -43,10 +43,10 @@ services: KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://localhost:9092,SSL://localhost:19092 KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT KAFKA_DELETE_TOPIC_ENABLE: "true" - KAFKA_SSL_KEYSTORE_FILENAME: kafka.keystore.jks + KAFKA_SSL_KEYSTORE_FILENAME: broker.keystore.jks KAFKA_SSL_KEYSTORE_CREDENTIALS: pwd.txt KAFKA_SSL_KEY_CREDENTIALS: pwd.txt - KAFKA_SSL_TRUSTSTORE_FILENAME: kafka.truststore.jks + KAFKA_SSL_TRUSTSTORE_FILENAME: broker.truststore.jks KAFKA_SSL_TRUSTSTORE_CREDENTIALS: pwd.txt KAFKA_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM: " " KAFKA_SSL_CLIENT_AUTH: required diff --git a/.devcontainer/kafka_secrets/create_secrets.sh b/.devcontainer/kafka_secrets/create_secrets.sh index 2a93d1e6..c870c7db 100755 --- a/.devcontainer/kafka_secrets/create_secrets.sh +++ b/.devcontainer/kafka_secrets/create_secrets.sh @@ -29,15 +29,14 @@ rm -f *.crt *.csr *.key *.srl *.jks *.p12 pw=testpw echo $pw > pwd.txt -# generate CA key +echo "Generate a CA key..." openssl req -new -x509 -keyout ca.key -out ca.crt -days 9999 \ -subj '/CN=ca.test.ghga.dev/OU=TEST/O=GHGA' \ -passin pass:$pw -passout pass:$pw -for component in kafka client +for component in broker client do echo "Create keystore for Kafka $component..." - # create keystore keytool -genkey -noprompt -alias $component \ -dname "CN=localhost, OU=TEST, O=GHGA" \ -keystore $component.keystore.jks \ @@ -69,4 +68,4 @@ keytool -importkeystore -srckeystore client.keystore.jks -srcalias client \ openssl pkcs12 -in client.keystore.p12 -nocerts -out client.key \ -passin pass:$pw -passout pass:$pw -rm -f kafka.crt kafka.key ca.key *.csr *.p12 *.srl +rm -f broker.crt broker.key ca.key *.csr *.p12 *.srl From d8a049ecfd8d41d04ec438527a716d343aff7ce7 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Thu, 9 Nov 2023 16:53:15 +0000 Subject: [PATCH 20/29] Use test container for testing Kafka with SSL --- .devcontainer/devcontainer.json | 3 +- .devcontainer/docker-compose.yml | 17 +- .devcontainer/kafka_secrets/.gitignore | 7 - .devcontainer/kafka_secrets/create_secrets.sh | 71 ------- .github/workflows/tests.yaml | 16 +- .static_files_ignore | 3 - requirements-dev.in | 1 + src/hexkit/providers/akafka/containers.py | 181 ++++++++++++++++++ tests/fixtures/kafka_secrets.py | 166 ++++++++++++++++ tests/integration/test_akafka.py | 113 +++++------ 10 files changed, 404 insertions(+), 174 deletions(-) delete mode 100644 .devcontainer/kafka_secrets/.gitignore delete mode 100755 .devcontainer/kafka_secrets/create_secrets.sh create mode 100644 src/hexkit/providers/akafka/containers.py create mode 100644 tests/fixtures/kafka_secrets.py diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index e5d28cad..89ff58f8 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -59,7 +59,6 @@ }, // Use 'forwardPorts' to make a list of ports inside the container available locally. // "forwardPorts": [5000, 5432], - "initializeCommand": ".devcontainer/kafka_secrets/create_secrets.sh", // Use 'postCreateCommand' to run commands after the container is created. "postCreateCommand": "dev_install", // Comment out connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root. @@ -73,4 +72,4 @@ // details can be found here: https://github.com/devcontainers/features/tree/main/src/docker-outside-of-docker "ghcr.io/devcontainers/features/docker-outside-of-docker:1": {} } -} \ No newline at end of file +} diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index 3ed35b62..054d0939 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -12,9 +12,6 @@ services: init: true privileged: true - # Since we want to be able to run the tests outside the devcontainer, - # we must test with the same network when running inside. - network_mode: "host" volumes: - ..:/workspace:cached @@ -40,27 +37,15 @@ services: environment: KAFKA_BROKER_ID: 1 KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 - KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://localhost:9092,SSL://localhost:19092 + KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://localhost:9092 KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT KAFKA_DELETE_TOPIC_ENABLE: "true" - KAFKA_SSL_KEYSTORE_FILENAME: broker.keystore.jks - KAFKA_SSL_KEYSTORE_CREDENTIALS: pwd.txt - KAFKA_SSL_KEY_CREDENTIALS: pwd.txt - KAFKA_SSL_TRUSTSTORE_FILENAME: broker.truststore.jks - KAFKA_SSL_TRUSTSTORE_CREDENTIALS: pwd.txt - KAFKA_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM: " " - KAFKA_SSL_CLIENT_AUTH: required KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 KAFKA_CONFLUENT_LICENSE_TOPIC_REPLICATION_FACTOR: 1 KAFKA_CONFLUENT_BALANCER_TOPIC_REPLICATION_FACTOR: 1 KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 - volumes: - - ./kafka_secrets:/etc/kafka/secrets - ports: - - 9092:9092 - - 19092:19092 kafka-ui: image: provectuslabs/kafka-ui diff --git a/.devcontainer/kafka_secrets/.gitignore b/.devcontainer/kafka_secrets/.gitignore deleted file mode 100644 index 78b1e601..00000000 --- a/.devcontainer/kafka_secrets/.gitignore +++ /dev/null @@ -1,7 +0,0 @@ -*.crt -*.crs -*.jks -*.key -*.p12 -*.srl -pwd.* diff --git a/.devcontainer/kafka_secrets/create_secrets.sh b/.devcontainer/kafka_secrets/create_secrets.sh deleted file mode 100755 index c870c7db..00000000 --- a/.devcontainer/kafka_secrets/create_secrets.sh +++ /dev/null @@ -1,71 +0,0 @@ -#!/bin/bash - -# Create Kafka secrets used in integration tests - -set -e -set -u - -cd $(dirname "${BASH_SOURCE[0]}") -[ -f create_secrets.sh ] || exit 1 -if [ -f ca.crt -o -f client.crt ] -then - echo "Secrets already exist. You can remove them to create new ones." - exit 0 -fi - -# install openssl and keytool -if ! command -v openssl &> /dev/null || ! command -v keytool &> /dev/null -then - if ! apt install -y openssl openjdk-11-jre-headless - then - echo Please install openssl and keytool. - exit 1 - fi -fi - -# remove old secrets -rm -f *.crt *.csr *.key *.srl *.jks *.p12 - -pw=testpw -echo $pw > pwd.txt - -echo "Generate a CA key..." -openssl req -new -x509 -keyout ca.key -out ca.crt -days 9999 \ - -subj '/CN=ca.test.ghga.dev/OU=TEST/O=GHGA' \ - -passin pass:$pw -passout pass:$pw - -for component in broker client -do - echo "Create keystore for Kafka $component..." - keytool -genkey -noprompt -alias $component \ - -dname "CN=localhost, OU=TEST, O=GHGA" \ - -keystore $component.keystore.jks \ - -keyalg RSA -storepass $pw -keypass $pw - - # create CSR, sign the key and import back into keystore - keytool -keystore $component.keystore.jks -alias $component -certreq \ - -file $component.csr -storepass $pw -keypass $pw - - openssl x509 -req -CA ca.crt -CAkey ca.key \ - -in $component.csr -out $component.crt \ - -days 9999 -CAcreateserial -passin pass:$pw - - keytool -keystore $component.keystore.jks -alias CARoot \ - -import -file ca.crt -storepass $pw -keypass $pw -noprompt - - keytool -keystore $component.keystore.jks -alias $component \ - -import -file $component.crt -storepass $pw -keypass $pw -noprompt - - # create truststore and import the CA cert - keytool -keystore $component.truststore.jks -alias CARoot \ - -import -file ca.crt -storepass $pw -keypass $pw -noprompt -done - -# Create certfile and encrypted keyfile for the client -keytool -importkeystore -srckeystore client.keystore.jks -srcalias client \ - -destkeystore client.keystore.p12 -deststoretype PKCS12 \ - -srcstorepass $pw -deststorepass $pw -noprompt -openssl pkcs12 -in client.keystore.p12 -nocerts -out client.key \ - -passin pass:$pw -passout pass:$pw - -rm -f broker.crt broker.key ca.key *.csr *.p12 *.srl diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 9414e640..05f474ca 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -8,26 +8,12 @@ jobs: name: Tests steps: - - id: checkout - name: Check out repository - uses: actions/checkout@v3 - - - id: secrets - name: Create Kafka secrets - run: | - .devcontainer/kafka_secrets/create_secrets.sh - - - id: containers - name: Set up Kafka containers - run: | - docker-compose -f .devcontainer/docker-compose.yml up -d kafka zookeeper + - uses: actions/checkout@v3 - id: common - name: Run all common steps uses: ghga-de/gh-action-common@v3 - id: pytest - name: Run all tests run: | export ${{ steps.common.outputs.CONFIG_YAML_ENV_VAR_NAME }}="${{ steps.common.outputs.CONFIG_YAML }}" diff --git a/.static_files_ignore b/.static_files_ignore index e5e55f1a..084bd570 100644 --- a/.static_files_ignore +++ b/.static_files_ignore @@ -1,13 +1,10 @@ # Optional list of files which are actually static in the template # but are allowed to have different content in the current repository -.devcontainer/devcontainer.json - .github/workflows/check_config_docs.yaml .github/workflows/check_openapi_spec.yaml .github/workflows/check_readme.yaml .github/workflows/cd.yaml -.github/workflows/tests.yaml scripts/script_utils/fastapi_app_location.py diff --git a/requirements-dev.in b/requirements-dev.in index 1cb47f0c..efe3e50f 100644 --- a/requirements-dev.in +++ b/requirements-dev.in @@ -4,3 +4,4 @@ -r requirements-dev-common.in # additional requirements can be listed her +cryptography >= 41 diff --git a/src/hexkit/providers/akafka/containers.py b/src/hexkit/providers/akafka/containers.py new file mode 100644 index 00000000..73ff5c78 --- /dev/null +++ b/src/hexkit/providers/akafka/containers.py @@ -0,0 +1,181 @@ +# Copyright 2021 - 2023 Universität Tübingen, DKFZ, EMBL, and Universität zu Köln +# for the German Human Genome-Phenome Archive (GHGA) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +"""Improved Kafka test containers.""" + +import tarfile +import time +from io import BytesIO +from ssl import SSLError +from textwrap import dedent +from typing import Literal, Optional + +from kafka import KafkaConsumer +from kafka.errors import KafkaError, NoBrokersAvailable, UnrecognizedBrokerVersion +from testcontainers.core.container import DockerContainer +from testcontainers.core.waiting_utils import wait_container_is_ready + +__all__ = ["KafkaSSLContainer"] + +DEFAULT_IMAGE = "confluentinc/cp-kafka:7.5.1" + +DEFAULT_PORT = 9093 # default port for the Kafka container +BROKER_PORT = 9092 # auxiliary port for inter broker listener + + +class KafkaSSLContainer(DockerContainer): + """Kafka container that supports SSL (or actually TLS).""" + + TC_START_SCRIPT = "/tc-start.sh" + SECRETS_PATH = "/etc/kafka/secrets" + + def __init__( # noqa: C901, PLR0912, PLR0913 + self, + image: str = DEFAULT_IMAGE, + port: int = DEFAULT_PORT, + cert: Optional[str] = None, + key: Optional[str] = None, + trusted: Optional[str] = None, + password: Optional[str] = None, + client_auth: Optional[Literal["requested", "required", "none"]] = None, + **kwargs, + ) -> None: + """Initialize the Kafka SSL container with the given parameters. + + "cert" must contain the certificate of the broker and if needed also + intermediate certificates. "key" must contain the private key of the + broker. If it password protected, "password" must be specified as well. + "trusted" must contain the trusted certificates. In "client_auth" you can + specify whether authentication is requested, required or not needed at all. + """ + super().__init__(image, **kwargs) + env = self.with_env + self.port = port + ssl = bool(cert or trusted or client_auth) + protocol = "SSL" if ssl else "PLAINTEXT" + self.protocol = protocol + self.with_exposed_ports(port) + self.broker_port = DEFAULT_PORT if port == BROKER_PORT else BROKER_PORT + listeners = f"{protocol}://0.0.0.0:{port},BROKER://0.0.0.0:{self.broker_port}" + protocol_map = f"BROKER:PLAINTEXT,{protocol}:{protocol}" + env("KAFKA_LISTENERS", listeners) + env("KAFKA_INTER_BROKER_LISTENER_NAME", "BROKER") + env("KAFKA_LISTENER_SECURITY_PROTOCOL_MAP", protocol_map) + env("KAFKA_BROKER_ID", "1") + env("KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR", "1") + env("KAFKA_OFFSETS_TOPIC_NUM_PARTITIONS", "1") + env("KAFKA_LOG_FLUSH_INTERVAL_MESSAGES", "10000000") + env("KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS", "0") + if ssl: + if cert: + cert = cert.strip().replace("\n", "\\n") + if key: + key = key.strip().replace("\n", "\\n") + if password: + password = password.strip().replace("\n", "\\n") + if trusted: + trusted = trusted.strip().replace("\n", "\\n") + if cert: + if not cert.startswith("-----BEGIN") or "CERTIFICATE" not in cert: + raise ValueError("Certificate chain must be in PEM format") + env("KAFKA_SSL_KEYSTORE_CERTIFICATE_CHAIN", cert) + if key: + if not key.startswith("-----BEGIN") or "PRIVATE KEY" not in key: + raise ValueError("Private key must be in PEM format") + env("KAFKA_SSL_KEYSTORE_KEY", key) + if cert or key: + env("KAFKA_SSL_KEYSTORE_TYPE", "PEM") + if key and password: + env("KAFKA_SSL_KEY_PASSWORD", password) + if trusted: + if not trusted.startswith("-----BEGIN") or "CERTIFICATE" not in trusted: + raise ValueError("Trusted certificates must be in PEM format") + env("KAFKA_SSL_TRUSTSTORE_CERTIFICATES", trusted) + env("KAFKA_SSL_TRUSTSTORE_TYPE", "PEM") + if client_auth: + env("KAFKA_SSL_CLIENT_AUTH", client_auth) + env("KAFKA_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM", " ") + + def get_bootstrap_server(self) -> str: + """Get the Kafka bootstrap server.""" + host = self.get_container_host_ip() + port = self.get_exposed_port(self.port) + return f"{host}:{port}" + + def start(self) -> "KafkaSSLContainer": + """Start the Docker container.""" + script = self.TC_START_SCRIPT + command = f'sh -c "while [ ! -f {script} ]; do sleep 0.1; done; sh {script}"' + self.with_command(command) + super().start() + self.tc_start() + self._connect() + return self + + @wait_container_is_ready( + UnrecognizedBrokerVersion, NoBrokersAvailable, KafkaError, ValueError + ) # type: ignore + def _connect(self) -> None: + bootstrap_server = self.get_bootstrap_server() + try: + consumer = KafkaConsumer( + group_id="test", + bootstrap_servers=[bootstrap_server], + security_protocol=self.protocol, + ) + except SSLError: + pass # count this as connected + else: + if not consumer.bootstrap_connected(): + raise KafkaError("Unable to connect with Kafka container!") + + def tc_start(self) -> None: + """Start the test container.""" + protocol = self.protocol + host = self.get_container_host_ip() + port = self.get_exposed_port(self.port) + listeners = f"{protocol}://{host}:{port},BROKER://127.0.0.1:{self.broker_port}" + # In the following script, first start the ZooKeeper and then launch the + # Kafka broker. The configuration in the Docker image checks for the + # existence of key and trust store files which we do not need, since we + # pass the certificates and keys directly to the broker. Therefore, we + # deactivate these checks in the "configure" script before running it. + script = f""" + #!/bin/bash + c=/etc/confluent/docker + . $c/bash-config + export KAFKA_ADVERTISED_LISTENERS={listeners} + export KAFKA_ZOOKEEPER_CONNECT=localhost:2181 + p=zookeeper.properties + echo "clientPort=2181" > $p + echo "dataDir=/var/lib/zookeeper/data" >> $p + echo "dataLogDir=/var/lib/zookeeper/log" >> $p + zookeeper-server-start $p & + sed -i -E '/^if .*LISTENERS.*SSL:/,/^fi/d' $c/configure + $c/configure && $c/launch + """ + self.create_file(dedent(script).strip().encode("utf-8"), self.TC_START_SCRIPT) + + def create_file(self, content: bytes, path: str) -> None: + """Create a file inside the container.""" + with BytesIO() as archive: + with tarfile.TarFile(fileobj=archive, mode="w") as tar: + tarinfo = tarfile.TarInfo(name=path) + tarinfo.size = len(content) + tarinfo.mtime = time.time() # type: ignore + tar.addfile(tarinfo, BytesIO(content)) + archive.seek(0) + self.get_wrapped_container().put_archive("/", archive) diff --git a/tests/fixtures/kafka_secrets.py b/tests/fixtures/kafka_secrets.py new file mode 100644 index 00000000..a9bebefb --- /dev/null +++ b/tests/fixtures/kafka_secrets.py @@ -0,0 +1,166 @@ +# Copyright 2021 - 2023 Universität Tübingen, DKFZ, EMBL, and Universität zu Köln +# for the German Human Genome-Phenome Archive (GHGA) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +"""Generate secrets for authenticated and encrypted communication with Kafka.""" + +import datetime +import secrets +import string +from typing import Optional + +from cryptography import x509 +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import rsa +from cryptography.x509.oid import NameOID + +__all__ = ["KafkaSecrets"] + + +class KafkaSecrets: + """Container for all secrets needed to establish a TLS connection with Kafka.""" + + ca_cert: str + + broker_cert: str + broker_key: str + broker_pwd: str + + client_cert: str + client_key: str + client_pwd: str + + def __init__( + self, + hostname: str = "localhost", + broker_pwd_size: int = 0, + client_pwd_size: int = 16, + days: int = 1, + ) -> None: + """Generate random secrets in PEM format. + + Unfortunately, the Kafka broker does not support the password protection + algorithm provided by the cryptography library. Therefore, and because this + is a feature that we do not need to test here, we do not generate a password + for the broker key by default. However, is works with the Kafka client. + """ + ca_cert, ca_key = generate_self_signed_cert(cn="ca.test.dev", days=days) + self.ca_cert = cert_to_pem(ca_cert) + + cert, key = generate_signed_cert( + cn=hostname, ca=ca_cert, ca_key=ca_key, days=days + ) + + self.broker_cert = cert_to_pem(cert) + password = generate_password(broker_pwd_size) + self.broker_key = key_to_pem(key, password) + self.broker_pwd = password + + cert, key = generate_signed_cert( + cn=hostname, ca=ca_cert, ca_key=ca_key, days=days + ) + + self.client_cert = cert_to_pem(cert) + password = generate_password(client_pwd_size) + self.client_key = key_to_pem(key, password) + self.client_pwd = password + + +def cert_to_pem(cert: x509.Certificate) -> str: + """Serialize the given certificate in PEM format.""" + return cert.public_bytes(serialization.Encoding.PEM).decode("ascii") + + +def key_to_pem(key: rsa.RSAPrivateKey, password: Optional[str]) -> str: + """Serialize the given key in PEM format.""" + return key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=get_encryption_algorithm(password), + ).decode("ascii") + + +def generate_password(size: int = 16) -> str: + """Generate a random password.""" + chars = string.ascii_letters + string.digits + choice = secrets.choice + return "".join(choice(chars) for _i in range(size)) + + +def get_encryption_algorithm( + password: Optional[str], +) -> serialization.KeySerializationEncryption: + """Get an encryption algorithm for the given password.""" + return ( + serialization.BestAvailableEncryption(password.encode("utf-8")) + if password + else serialization.NoEncryption() + ) + + +def generate_key() -> rsa.RSAPrivateKey: + """Generate a private key using RSA.""" + return rsa.generate_private_key(public_exponent=65537, key_size=2048) + + +def generate_self_signed_cert( + cn: str, days: int = 1 +) -> tuple[x509.Certificate, rsa.RSAPrivateKey]: + """Generate a self-signed certificate with its private key.""" + key = generate_key() + subject = issuer = x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, cn)]) + cert = ( + x509.CertificateBuilder() + .subject_name(subject) + .issuer_name(issuer) + .public_key(key.public_key()) + .serial_number(x509.random_serial_number()) + .not_valid_before(datetime.datetime.utcnow()) + .not_valid_after(datetime.datetime.utcnow() + datetime.timedelta(days=days)) + .add_extension( + x509.BasicConstraints(ca=True, path_length=None), + critical=True, + ) + .sign(key, hashes.SHA256()) + ) + return cert, key + + +def generate_signed_cert( + cn: str, ca: x509.Certificate, ca_key: rsa.RSAPrivateKey, days: int = 1 +) -> tuple[x509.Certificate, rsa.RSAPrivateKey]: + """Generate a signed certificate with its private key.""" + key = generate_key() + subject = x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, cn)]) + csr = ( + x509.CertificateSigningRequestBuilder() + .subject_name(subject) + .sign(key, hashes.SHA256()) + ) + cert = ( + x509.CertificateBuilder() + .subject_name(csr.subject) + .issuer_name(ca.subject) + .public_key(key.public_key()) + .serial_number(x509.random_serial_number()) + .not_valid_before(datetime.datetime.utcnow()) + .not_valid_after(datetime.datetime.utcnow() + datetime.timedelta(days=days)) + .add_extension( + x509.BasicConstraints(ca=True, path_length=None), + critical=True, + ) + .sign(ca_key, hashes.SHA256()) + ) + return cert, key diff --git a/tests/integration/test_akafka.py b/tests/integration/test_akafka.py index 668f9e26..21ab0147 100644 --- a/tests/integration/test_akafka.py +++ b/tests/integration/test_akafka.py @@ -16,12 +16,12 @@ """Testing Apache Kafka based providers.""" +from os import environ from pathlib import Path +from socket import getfqdn from unittest.mock import AsyncMock import pytest -from kafka import KafkaAdminClient -from kafka.errors import KafkaError from hexkit.custom_types import JsonObject from hexkit.providers.akafka import ( @@ -29,12 +29,15 @@ KafkaEventPublisher, KafkaEventSubscriber, ) +from hexkit.providers.akafka.containers import KafkaSSLContainer from hexkit.providers.akafka.testutils import ( # noqa: F401 ExpectedEvent, KafkaFixture, kafka_fixture, ) +from ..fixtures.kafka_secrets import KafkaSecrets + @pytest.mark.asyncio async def test_kafka_event_publisher(kafka_fixture: KafkaFixture): # noqa: F811 @@ -100,73 +103,63 @@ async def test_kafka_event_subscriber(kafka_fixture: KafkaFixture): # noqa: F81 ) -def find_kafka_secrets_dir() -> Path: - """Get the directory with Kafka secrets.""" - current_dir = Path(__file__) - while current_dir != current_dir.parent: - current_dir = current_dir.parent - secrets_dir = current_dir / ".devcontainer" / "kafka_secrets" - if secrets_dir.is_dir(): - for filename in "ca.crt", "client.crt", "client.key", "pwd.txt": - assert (secrets_dir / filename).exists(), ( - f"No {filename} in Kafka secrets directory." - " Please re-run the create_secrets.sh script." - ) - return secrets_dir - assert False, "Kafka secrets directory not found." +@pytest.mark.asyncio +async def test_kafka_ssl(tmp_path: Path): + """Test connecting to Kafka via SSL (TLS).""" + hostname = environ.get("TC_HOST") or getfqdn() + + secrets = KafkaSecrets(hostname=hostname) + path = tmp_path / ".ssl" + path.mkdir() -@pytest.mark.asyncio -async def test_kafka_ssl(): - """Test connecting to Kafka via SSL. + (path / "ca.crt").open("w").write(secrets.ca_cert) + (path / "client.crt").open("w").write(secrets.client_cert) + (path / "client.key").open("w").write(secrets.client_key) - This test uses the broker configured with the needed secrets via docker-compose - instead of a test container. - """ payload: JsonObject = {"test_content": "Be aware... Connect with care"} type_ = "test_type" key = "test_key" topic = "test_topic" - admin_client = KafkaAdminClient(bootstrap_servers=["localhost:9092"]) - try: - admin_client.delete_topics([topic]) - except KafkaError: - pass - - secrets_dir = find_kafka_secrets_dir() - password = open(secrets_dir / "pwd.txt").read().strip() - assert password - - config = KafkaConfig( - service_name="test_ssl", - service_instance_id="1", - kafka_servers=["localhost:19092"], # SSL port - kafka_security_protocol="SSL", - kafka_ssl_cafile=str(secrets_dir / "ca.crt"), - kafka_ssl_certfile=str(secrets_dir / "client.crt"), - kafka_ssl_keyfile=str(secrets_dir / "client.key"), - kafka_ssl_password=password, - ) - - async with KafkaEventPublisher.construct(config=config) as event_publisher: - await event_publisher.publish( - payload=payload, - type_=type_, - key=key, - topic=topic, + with KafkaSSLContainer( + cert=secrets.broker_cert, + key=secrets.broker_key, + password=secrets.broker_pwd, + trusted=secrets.ca_cert, + client_auth="required", + ) as kafka: + kafka_servers = [kafka.get_bootstrap_server()] + + config = KafkaConfig( + service_name="test_ssl", + service_instance_id="1", + kafka_servers=kafka_servers, + kafka_security_protocol="SSL", + kafka_ssl_cafile=str(path / "ca.crt"), + kafka_ssl_certfile=str(path / "client.crt"), + kafka_ssl_keyfile=str(path / "client.key"), + kafka_ssl_password=secrets.client_pwd, ) - translator = AsyncMock() - translator.topics_of_interest = [topic] - translator.types_of_interest = [type_] + async with KafkaEventPublisher.construct(config=config) as event_publisher: + await event_publisher.publish( + payload=payload, + type_=type_, + key=key, + topic=topic, + ) - async with KafkaEventSubscriber.construct( - config=config, - translator=translator, - ) as event_subscriber: - await event_subscriber.run(forever=False) + translator = AsyncMock() + translator.topics_of_interest = [topic] + translator.types_of_interest = [type_] - translator.consume.assert_awaited_once_with( - payload=payload, type_=type_, topic=topic - ) + async with KafkaEventSubscriber.construct( + config=config, + translator=translator, + ) as event_subscriber: + await event_subscriber.run(forever=False) + + translator.consume.assert_awaited_once_with( + payload=payload, type_=type_, topic=topic + ) From 37172662bbea7cdd0d487dfe98f091681aed36ee Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Thu, 9 Nov 2023 16:58:01 +0000 Subject: [PATCH 21/29] Update from template --- scripts/license_checker.py | 3 +++ src/hexkit/providers/akafka/containers.py | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/scripts/license_checker.py b/scripts/license_checker.py index 0ddc7861..5d8be069 100755 --- a/scripts/license_checker.py +++ b/scripts/license_checker.py @@ -88,6 +88,9 @@ "xml", "yaml", "yml", + "tsv", + "fastq", + "gz", ] # exclude any files with names that match any of the following regex: diff --git a/src/hexkit/providers/akafka/containers.py b/src/hexkit/providers/akafka/containers.py index 73ff5c78..2e8bdab0 100644 --- a/src/hexkit/providers/akafka/containers.py +++ b/src/hexkit/providers/akafka/containers.py @@ -127,7 +127,7 @@ def start(self) -> "KafkaSSLContainer": @wait_container_is_ready( UnrecognizedBrokerVersion, NoBrokersAvailable, KafkaError, ValueError - ) # type: ignore + ) # pyright: ignore def _connect(self) -> None: bootstrap_server = self.get_bootstrap_server() try: From 35d4eba3ca7503a896cdef8736e2e0c40ede0b8a Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Thu, 9 Nov 2023 16:59:43 +0000 Subject: [PATCH 22/29] Update lock files --- requirements-dev.txt | 85 +++++++++++++++++++++++++++++++++++++++++++- requirements.txt | 2 +- 2 files changed, 85 insertions(+), 2 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index e7c617f1..7edea988 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --all-extras --generate-hashes --output-file=/workspace/requirements-dev.txt /tmp/tmpye1zf3r4/pyproject.toml /workspace/requirements-dev.in +# pip-compile --all-extras --generate-hashes --output-file=/workspace/requirements-dev.txt /tmp/tmpij57a9ui/pyproject.toml /workspace/requirements-dev.in # aiokafka==0.8.1 \ --hash=sha256:1e24839088fd6d3ff481cc09a48ea487b997328df11630bc0a1b88255edbcfe9 \ @@ -95,6 +95,60 @@ certifi==2023.7.22 \ # httpcore # httpx # requests +cffi==1.16.0 \ + --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ + --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \ + --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \ + --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \ + --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \ + --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \ + --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \ + --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \ + --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \ + --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \ + --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \ + --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \ + --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \ + --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \ + --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \ + --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \ + --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \ + --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \ + --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \ + --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \ + --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \ + --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \ + --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \ + --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \ + --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \ + --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \ + --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \ + --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \ + --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \ + --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \ + --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \ + --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \ + --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \ + --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \ + --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \ + --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \ + --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \ + --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \ + --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \ + --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \ + --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \ + --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \ + --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \ + --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \ + --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \ + --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \ + --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \ + --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \ + --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \ + --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \ + --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \ + --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357 + # via cryptography cfgv==3.4.0 \ --hash=sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9 \ --hash=sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560 @@ -255,6 +309,31 @@ coverage[toml]==7.3.2 \ # via # coverage # pytest-cov +cryptography==41.0.5 \ + --hash=sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf \ + --hash=sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84 \ + --hash=sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e \ + --hash=sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8 \ + --hash=sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7 \ + --hash=sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1 \ + --hash=sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88 \ + --hash=sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86 \ + --hash=sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179 \ + --hash=sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81 \ + --hash=sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20 \ + --hash=sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548 \ + --hash=sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d \ + --hash=sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d \ + --hash=sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5 \ + --hash=sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1 \ + --hash=sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147 \ + --hash=sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936 \ + --hash=sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797 \ + --hash=sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696 \ + --hash=sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72 \ + --hash=sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da \ + --hash=sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723 + # via -r /workspace/requirements-dev.in dependency-injector==4.41.0 \ --hash=sha256:02620454ee8101f77a317f3229935ce687480883d72a40858ff4b0c87c935cce \ --hash=sha256:059fbb48333148143e8667a5323d162628dfe27c386bd0ed3deeecfc390338bf \ @@ -490,6 +569,10 @@ pre-commit==3.4.0 \ --hash=sha256:6bbd5129a64cad4c0dfaeeb12cd8f7ea7e15b77028d985341478c8af3c759522 \ --hash=sha256:96d529a951f8b677f730a7212442027e8ba53f9b04d217c4c67dc56c393ad945 # via -r /workspace/requirements-dev-common.in +pycparser==2.21 \ + --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ + --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 + # via cffi pydantic==2.4.2 \ --hash=sha256:94f336138093a5d7f426aac732dcfe7ab4eb4da243c88f891d65deb4a2556ee7 \ --hash=sha256:bc3ddf669d234f4220e6e1c4d96b061abe0998185a8d7855c0126782b7abc8c1 diff --git a/requirements.txt b/requirements.txt index bbf5063f..a7684137 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --all-extras --constraint=/workspace/requirements-dev.txt --generate-hashes --output-file=/workspace/requirements.txt /tmp/tmpye1zf3r4/pyproject.toml +# pip-compile --all-extras --constraint=/workspace/requirements-dev.txt --generate-hashes --output-file=/workspace/requirements.txt /tmp/tmpij57a9ui/pyproject.toml # aiokafka==0.8.1 \ --hash=sha256:1e24839088fd6d3ff481cc09a48ea487b997328df11630bc0a1b88255edbcfe9 \ From 12bf063e88196d5b8ce90cc1e73be445b6d44f91 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Thu, 9 Nov 2023 17:27:56 +0000 Subject: [PATCH 23/29] Fix tests when not running in docker --- tests/integration/test_akafka.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/integration/test_akafka.py b/tests/integration/test_akafka.py index 21ab0147..2b6eb167 100644 --- a/tests/integration/test_akafka.py +++ b/tests/integration/test_akafka.py @@ -18,7 +18,6 @@ from os import environ from pathlib import Path -from socket import getfqdn from unittest.mock import AsyncMock import pytest @@ -106,7 +105,7 @@ async def test_kafka_event_subscriber(kafka_fixture: KafkaFixture): # noqa: F81 @pytest.mark.asyncio async def test_kafka_ssl(tmp_path: Path): """Test connecting to Kafka via SSL (TLS).""" - hostname = environ.get("TC_HOST") or getfqdn() + hostname = environ.get("TC_HOST") or "localhost" secrets = KafkaSecrets(hostname=hostname) From 80ced22b36092cf41266053a3c2379b4837492e4 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Thu, 9 Nov 2023 17:28:57 +0000 Subject: [PATCH 24/29] Use latest docker image for kafka everywhere --- examples/stream_calc/sc_tests/integration/test_event_api.py | 3 ++- src/hexkit/providers/akafka/testutils.py | 5 +++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/examples/stream_calc/sc_tests/integration/test_event_api.py b/examples/stream_calc/sc_tests/integration/test_event_api.py index 34606ce1..478a09c5 100644 --- a/examples/stream_calc/sc_tests/integration/test_event_api.py +++ b/examples/stream_calc/sc_tests/integration/test_event_api.py @@ -30,6 +30,7 @@ from testcontainers.kafka import KafkaContainer from hexkit.custom_types import JsonObject +from hexkit.providers.akafka.containers import DEFAULT_IMAGE as KAFKA_IMAGE from stream_calc.config import Config from stream_calc.main import main @@ -200,7 +201,7 @@ async def test_receive_calc_publish(cases: list[Case] = deepcopy(CASES)): the results. """ - with KafkaContainer() as kafka: + with KafkaContainer(image=KAFKA_IMAGE) as kafka: kafka_server = kafka.get_bootstrap_server() submit_test_problems(cases, kafka_server=kafka_server) diff --git a/src/hexkit/providers/akafka/testutils.py b/src/hexkit/providers/akafka/testutils.py index 7e94877b..51604a81 100644 --- a/src/hexkit/providers/akafka/testutils.py +++ b/src/hexkit/providers/akafka/testutils.py @@ -32,6 +32,7 @@ from testcontainers.kafka import KafkaContainer from hexkit.custom_types import Ascii, JsonObject, PytestScope +from hexkit.providers.akafka.containers import DEFAULT_IMAGE as KAFKA_IMAGE from hexkit.providers.akafka.provider import ( ConsumerEvent, KafkaConfig, @@ -324,7 +325,7 @@ async def __aenter__(self) -> "EventRecorder": return self async def __aexit__(self, error_type, error_val, error_tb): - """Stop recording and check the recorded events agains the expectation when + """Stop recording and check the recorded events against the expectation when exiting the context block. """ await self.stop_recording() @@ -403,7 +404,7 @@ async def kafka_fixture_function() -> AsyncGenerator[KafkaFixture, None]: **Do not call directly** Instead, use get_kafka_fixture() """ - with KafkaContainer(image="confluentinc/cp-kafka:5.4.9-1-deb8") as kafka: + with KafkaContainer(image=KAFKA_IMAGE) as kafka: kafka_servers = [kafka.get_bootstrap_server()] config = KafkaConfig( # type: ignore service_name="test_publisher", From d9ff85f8b4864082213405195c80264b9953dbee Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Thu, 9 Nov 2023 17:31:17 +0000 Subject: [PATCH 25/29] Bump version --- pyproject.toml | 2 +- requirements-dev.txt | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 4f6c68fe..5bb0d051 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "hexkit" -version = "0.11.1" +version = "0.11.2" description = "A Toolkit for Building Microservices using the Hexagonal Architecture" readme = "README.md" authors = [ diff --git a/requirements-dev.txt b/requirements-dev.txt index 7edea988..c74abda0 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --all-extras --generate-hashes --output-file=/workspace/requirements-dev.txt /tmp/tmpij57a9ui/pyproject.toml /workspace/requirements-dev.in +# pip-compile --all-extras --generate-hashes --output-file=/workspace/requirements-dev.txt /tmp/tmpydm_s59g/pyproject.toml /workspace/requirements-dev.in # aiokafka==0.8.1 \ --hash=sha256:1e24839088fd6d3ff481cc09a48ea487b997328df11630bc0a1b88255edbcfe9 \ diff --git a/requirements.txt b/requirements.txt index a7684137..6a3f34b8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --all-extras --constraint=/workspace/requirements-dev.txt --generate-hashes --output-file=/workspace/requirements.txt /tmp/tmpij57a9ui/pyproject.toml +# pip-compile --all-extras --constraint=/workspace/requirements-dev.txt --generate-hashes --output-file=/workspace/requirements.txt /tmp/tmpydm_s59g/pyproject.toml # aiokafka==0.8.1 \ --hash=sha256:1e24839088fd6d3ff481cc09a48ea487b997328df11630bc0a1b88255edbcfe9 \ From 576e4ef31381b4e6f75931719d8cc092745ca7f4 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Fri, 10 Nov 2023 09:30:36 +0000 Subject: [PATCH 26/29] Point to GitHub issue that we work around --- src/hexkit/providers/akafka/containers.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/hexkit/providers/akafka/containers.py b/src/hexkit/providers/akafka/containers.py index 2e8bdab0..dd338dd1 100644 --- a/src/hexkit/providers/akafka/containers.py +++ b/src/hexkit/providers/akafka/containers.py @@ -148,11 +148,6 @@ def tc_start(self) -> None: host = self.get_container_host_ip() port = self.get_exposed_port(self.port) listeners = f"{protocol}://{host}:{port},BROKER://127.0.0.1:{self.broker_port}" - # In the following script, first start the ZooKeeper and then launch the - # Kafka broker. The configuration in the Docker image checks for the - # existence of key and trust store files which we do not need, since we - # pass the certificates and keys directly to the broker. Therefore, we - # deactivate these checks in the "configure" script before running it. script = f""" #!/bin/bash c=/etc/confluent/docker @@ -164,6 +159,7 @@ def tc_start(self) -> None: echo "dataDir=/var/lib/zookeeper/data" >> $p echo "dataLogDir=/var/lib/zookeeper/log" >> $p zookeeper-server-start $p & + # workaround for https://github.com/confluentinc/kafka-images/issues/244 sed -i -E '/^if .*LISTENERS.*SSL:/,/^fi/d' $c/configure $c/configure && $c/launch """ From 1e7036e37f2cf8d800ef6dc81642e35520927bb6 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Fri, 10 Nov 2023 14:18:30 +0000 Subject: [PATCH 27/29] Rename module with test container --- examples/stream_calc/sc_tests/integration/test_event_api.py | 2 +- src/hexkit/providers/akafka/{containers.py => testcontainer.py} | 0 src/hexkit/providers/akafka/testutils.py | 2 +- tests/integration/test_akafka.py | 2 +- 4 files changed, 3 insertions(+), 3 deletions(-) rename src/hexkit/providers/akafka/{containers.py => testcontainer.py} (100%) diff --git a/examples/stream_calc/sc_tests/integration/test_event_api.py b/examples/stream_calc/sc_tests/integration/test_event_api.py index 478a09c5..7ad72ec8 100644 --- a/examples/stream_calc/sc_tests/integration/test_event_api.py +++ b/examples/stream_calc/sc_tests/integration/test_event_api.py @@ -30,7 +30,7 @@ from testcontainers.kafka import KafkaContainer from hexkit.custom_types import JsonObject -from hexkit.providers.akafka.containers import DEFAULT_IMAGE as KAFKA_IMAGE +from hexkit.providers.akafka.testcontainer import DEFAULT_IMAGE as KAFKA_IMAGE from stream_calc.config import Config from stream_calc.main import main diff --git a/src/hexkit/providers/akafka/containers.py b/src/hexkit/providers/akafka/testcontainer.py similarity index 100% rename from src/hexkit/providers/akafka/containers.py rename to src/hexkit/providers/akafka/testcontainer.py diff --git a/src/hexkit/providers/akafka/testutils.py b/src/hexkit/providers/akafka/testutils.py index 51604a81..eabc8550 100644 --- a/src/hexkit/providers/akafka/testutils.py +++ b/src/hexkit/providers/akafka/testutils.py @@ -32,13 +32,13 @@ from testcontainers.kafka import KafkaContainer from hexkit.custom_types import Ascii, JsonObject, PytestScope -from hexkit.providers.akafka.containers import DEFAULT_IMAGE as KAFKA_IMAGE from hexkit.providers.akafka.provider import ( ConsumerEvent, KafkaConfig, KafkaEventPublisher, get_event_type, ) +from hexkit.providers.akafka.testcontainer import DEFAULT_IMAGE as KAFKA_IMAGE @dataclass(frozen=True) diff --git a/tests/integration/test_akafka.py b/tests/integration/test_akafka.py index 2b6eb167..772ff35c 100644 --- a/tests/integration/test_akafka.py +++ b/tests/integration/test_akafka.py @@ -28,7 +28,7 @@ KafkaEventPublisher, KafkaEventSubscriber, ) -from hexkit.providers.akafka.containers import KafkaSSLContainer +from hexkit.providers.akafka.testcontainer import KafkaSSLContainer from hexkit.providers.akafka.testutils import ( # noqa: F401 ExpectedEvent, KafkaFixture, From af44944c1bdbe48301487167a7c740805fd4a9a2 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Fri, 10 Nov 2023 15:27:58 +0100 Subject: [PATCH 28/29] Update pyproject.toml Co-authored-by: Kersten Breuer --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 5bb0d051..e572b292 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "hexkit" -version = "0.11.2" +version = "1.0.0" description = "A Toolkit for Building Microservices using the Hexagonal Architecture" readme = "README.md" authors = [ From b3e09d520c21d9085fb9d335076fd8ea66adc6c4 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Fri, 10 Nov 2023 14:30:15 +0000 Subject: [PATCH 29/29] Recreate lock files --- requirements-dev.txt | 2 +- requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index c74abda0..6c84919c 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --all-extras --generate-hashes --output-file=/workspace/requirements-dev.txt /tmp/tmpydm_s59g/pyproject.toml /workspace/requirements-dev.in +# pip-compile --all-extras --generate-hashes --output-file=/workspace/requirements-dev.txt /tmp/tmp6008thxb/pyproject.toml /workspace/requirements-dev.in # aiokafka==0.8.1 \ --hash=sha256:1e24839088fd6d3ff481cc09a48ea487b997328df11630bc0a1b88255edbcfe9 \ diff --git a/requirements.txt b/requirements.txt index 6a3f34b8..f913204e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.9 # by the following command: # -# pip-compile --all-extras --constraint=/workspace/requirements-dev.txt --generate-hashes --output-file=/workspace/requirements.txt /tmp/tmpydm_s59g/pyproject.toml +# pip-compile --all-extras --constraint=/workspace/requirements-dev.txt --generate-hashes --output-file=/workspace/requirements.txt /tmp/tmp6008thxb/pyproject.toml # aiokafka==0.8.1 \ --hash=sha256:1e24839088fd6d3ff481cc09a48ea487b997328df11630bc0a1b88255edbcfe9 \