From 3725a700ee154142e080cf0523b79c92d77561e2 Mon Sep 17 00:00:00 2001 From: Mourits de Beer <31511766+ff137@users.noreply.github.com> Date: Tue, 14 May 2024 19:26:32 +0200 Subject: [PATCH] :art: Apply formatting (#448) * :art: apply isort import organisation Signed-off-by: ff137 * :art: apply black formatting Signed-off-by: ff137 --------- Signed-off-by: ff137 --- .../basicmessage_storage/v1_0/__init__.py | 11 +- .../basicmessage_storage/v1_0/config.py | 2 +- .../basicmessage_storage/v1_0/models.py | 5 +- .../basicmessage_storage/v1_0/routes.py | 12 +- .../v1_0/tests/test_init.py | 5 +- .../tests/test_basicmessage_storage.py | 5 +- .../connection_update/v1_0/routes.py | 6 +- .../v1_0/tests/test_routes.py | 10 +- .../integration/tests/__init__.py | 14 +- .../tests/test_connection_update.py | 13 +- .../v1_0/constants.py | 12 +- .../v1_0/handlers/handler.py | 6 +- .../v1_0/manager.py | 1 + .../v1_0/message_types.py | 1 + .../v1_0/messages/set_device_info.py | 1 + .../v1_0/models.py | 1 + .../v1_0/routes.py | 4 +- .../integration/tests/test_example.py | 5 +- kafka_events/demo/setup/main.py | 4 +- kafka_events/integration/tests/conftest.py | 5 +- kafka_events/integration/tests/test_events.py | 1 + kafka_events/kafka_events/definition.py | 2 +- .../v1_0/deliverer/deliverer/__init__.py | 4 + .../v1_0/deliverer/deliverer/__main__.py | 1 + .../v1_0/http_kafka_relay/relay/__init__.py | 1 + .../v1_0/http_kafka_relay/setup.py | 2 +- .../kafka_events/v1_0/kafka_queue/__init__.py | 1 - .../kafka_events/v1_0/kafka_queue/config.py | 14 +- .../v1_0/kafka_queue/events/__init__.py | 10 +- .../kafka_events/v1_0/kafka_queue/inbound.py | 16 +- .../kafka_events/v1_0/kafka_queue/outbound.py | 13 +- .../v1_0/kafka_queue/tests/test_config.py | 2 +- kafka_events/setup.py | 1 - .../multitenant_provider/v1_0/config.py | 18 +- .../multitenant_provider/v1_0/manager.py | 11 +- .../multitenant_provider/v1_0/models.py | 1 + .../multitenant_provider/v1_0/provider.py | 1 + .../multitenant_provider/v1_0/routes.py | 7 +- .../v1_0/tests/test_config.py | 8 +- .../v1_0/tests/test_init.py | 19 +- .../v1_0/tests/test_manager.py | 1 + .../v1_0/tests/test_routes.py | 4 +- .../integration/afj_runner/runner/__main__.py | 5 +- oid4vci/integration/oid4vci_client/client.py | 5 +- oid4vci/integration/oid4vci_client/crypto.py | 4 +- .../tests/test_pre_auth_code_flow.py | 1 + oid4vci/oid4vci/config.py | 1 + oid4vci/oid4vci/jwk_resolver.py | 3 +- oid4vci/oid4vci/models/exchange.py | 3 +- oid4vci/oid4vci/models/supported_cred.py | 1 + oid4vci/oid4vci/oid4vci_server.py | 2 +- oid4vci/oid4vci/public_routes.py | 8 +- oid4vci/oid4vci/routes.py | 3 +- oid4vci/oid4vci/tests/conftest.py | 1 - oid4vci/oid4vci/tests/models/test_exchange.py | 3 +- .../tests/models/test_supported_cred.py | 2 +- oid4vci/oid4vci/tests/routes/conftest.py | 22 +- oid4vci/oid4vci/tests/routes/test_admin.py | 7 +- .../tests/routes/test_public_routes.py | 2 +- .../integration/tests/test_example.py | 5 +- redis_events/integration/tests/conftest.py | 3 +- redis_events/integration/tests/test_events.py | 33 +- .../integration/tests/test_redis_basic.py | 1 + .../redis_events/v1_0/redis_queue/config.py | 17 +- .../redis_events/v1_0/redis_queue/inbound.py | 34 +- .../redis_events/v1_0/redis_queue/outbound.py | 11 +- .../v1_0/redis_queue/tests/test_events.py | 50 +-- .../v1_0/redis_queue/tests/test_inbound.py | 8 +- .../v1_0/redis_queue/tests/test_outbound.py | 34 +- .../redis_events/v1_0/redis_queue/utils.py | 13 +- .../v1_0/services/deliverer/__init__.py | 7 +- .../v1_0/services/deliverer/deliver.py | 19 +- .../v1_0/services/deliverer/tests/__init__.py | 3 +- .../services/deliverer/tests/test_deliver.py | 12 +- .../redis_events/v1_0/services/relay/relay.py | 27 +- .../v1_0/services/relay/tests/__init__.py | 3 +- .../v1_0/services/relay/tests/test_relay.py | 88 ++--- .../v1_0/status_endpoint/status_endpoints.py | 7 +- repo_manager.py | 358 ++++++++++-------- rpc/integration/tests/test_rpc.py | 5 +- rpc/rpc/v1_0/handlers.py | 3 +- rpc/rpc/v1_0/messages.py | 13 +- rpc/rpc/v1_0/models.py | 4 +- rpc/rpc/v1_0/routes.py | 19 +- rpc/rpc/v1_0/tests/test_messages.py | 1 - rpc/rpc/v1_0/tests/test_models.py | 3 +- rpc/rpc/v1_0/tests/test_routes.py | 4 +- rpc/rpc/v1_0/tests/test_schemas.py | 3 +- 88 files changed, 572 insertions(+), 550 deletions(-) diff --git a/basicmessage_storage/basicmessage_storage/v1_0/__init__.py b/basicmessage_storage/basicmessage_storage/v1_0/__init__.py index 79037e3dc..0700a1ce2 100644 --- a/basicmessage_storage/basicmessage_storage/v1_0/__init__.py +++ b/basicmessage_storage/basicmessage_storage/v1_0/__init__.py @@ -2,14 +2,15 @@ import re from aries_cloudagent.config.injection_context import InjectionContext -from aries_cloudagent.core.event_bus import EventBus, Event +from aries_cloudagent.core.event_bus import Event, EventBus from aries_cloudagent.core.profile import Profile from aries_cloudagent.core.protocol_registry import ProtocolRegistry from aries_cloudagent.multitenant.admin.routes import ( ACAPY_LIFECYCLE_CONFIG_FLAG_ARGS_MAP, ) -from .models import BasicMessageRecord + from .config import get_config +from .models import BasicMessageRecord LOGGER = logging.getLogger(__name__) @@ -29,9 +30,9 @@ async def setup(context: InjectionContext): # acapy should create a separate map for plugin settings # add subwallet config, acapy will accept any child under basicmessage-storage # but will get filtered with `.config.get_config` - ACAPY_LIFECYCLE_CONFIG_FLAG_ARGS_MAP[ - "basicmessage-storage" - ] = "basicmessage_storage" + ACAPY_LIFECYCLE_CONFIG_FLAG_ARGS_MAP["basicmessage-storage"] = ( + "basicmessage_storage" + ) event_bus.subscribe(BASIC_MESSAGE_EVENT_PATTERN, basic_message_event_handler) LOGGER.info("< basicmessage_storage plugin setup.") diff --git a/basicmessage_storage/basicmessage_storage/v1_0/config.py b/basicmessage_storage/basicmessage_storage/v1_0/config.py index 1cc4589cf..f624c2098 100644 --- a/basicmessage_storage/basicmessage_storage/v1_0/config.py +++ b/basicmessage_storage/basicmessage_storage/v1_0/config.py @@ -1,11 +1,11 @@ """Configuration classes for multitenant_provider.""" + import logging from typing import Any, Mapping from mergedeep import merge from pydantic import BaseModel - LOGGER = logging.getLogger(__name__) diff --git a/basicmessage_storage/basicmessage_storage/v1_0/models.py b/basicmessage_storage/basicmessage_storage/v1_0/models.py index dc32ae9bd..a5705f7b8 100644 --- a/basicmessage_storage/basicmessage_storage/v1_0/models.py +++ b/basicmessage_storage/basicmessage_storage/v1_0/models.py @@ -1,12 +1,13 @@ """Basic Messages Storage Model classes and schemas.""" + from aries_cloudagent.core.profile import ProfileSession from aries_cloudagent.messaging.models.base_record import BaseRecord, BaseRecordSchema from aries_cloudagent.messaging.valid import ( INDY_ISO8601_DATETIME_EXAMPLE, INDY_ISO8601_DATETIME_VALIDATE, ) -from marshmallow import fields from aries_cloudagent.storage.base import BaseStorage +from marshmallow import fields class BasicMessageRecord(BaseRecord): @@ -63,7 +64,6 @@ def record_tags(self) -> dict: """Get tags for record.""" return {"connection_id": self.connection_id, "message_id": self.message_id} - async def delete_record(self, session: ProfileSession): """Perform connection record deletion actions. @@ -81,7 +81,6 @@ async def delete_record(self, session: ProfileSession): {"message_id": self.message_id}, ) - @classmethod async def retrieve_by_message_id( cls, session: ProfileSession, message_id: str diff --git a/basicmessage_storage/basicmessage_storage/v1_0/routes.py b/basicmessage_storage/basicmessage_storage/v1_0/routes.py index 2b58f9971..a7029de01 100644 --- a/basicmessage_storage/basicmessage_storage/v1_0/routes.py +++ b/basicmessage_storage/basicmessage_storage/v1_0/routes.py @@ -1,4 +1,5 @@ """Basic Messages Storage API Routes.""" + import functools import logging import uuid @@ -6,30 +7,29 @@ from aiohttp import web from aiohttp_apispec import ( docs, - response_schema, - querystring_schema, match_info_schema, + querystring_schema, request_schema, + response_schema, ) from aries_cloudagent.admin.request_context import AdminRequestContext from aries_cloudagent.messaging.models.base import BaseModelError from aries_cloudagent.messaging.models.openapi import OpenAPISchema -from aries_cloudagent.messaging.util import time_now, str_to_epoch +from aries_cloudagent.messaging.util import str_to_epoch, time_now from aries_cloudagent.messaging.valid import UUID4_EXAMPLE from aries_cloudagent.multitenant.error import WalletKeyMissingError from aries_cloudagent.protocols.basicmessage.v1_0.message_types import SPEC_URI from aries_cloudagent.protocols.basicmessage.v1_0.routes import ( BasicConnIdMatchInfoSchema, - SendMessageSchema, BasicMessageModuleResponseSchema, + SendMessageSchema, connections_send_message, ) from aries_cloudagent.storage.error import StorageError, StorageNotFoundError from marshmallow import fields, validate -from .models import BasicMessageRecord, BasicMessageRecordSchema from .config import get_config - +from .models import BasicMessageRecord, BasicMessageRecordSchema LOGGER = logging.getLogger(__name__) diff --git a/basicmessage_storage/basicmessage_storage/v1_0/tests/test_init.py b/basicmessage_storage/basicmessage_storage/v1_0/tests/test_init.py index 36503532c..5aa31aa34 100644 --- a/basicmessage_storage/basicmessage_storage/v1_0/tests/test_init.py +++ b/basicmessage_storage/basicmessage_storage/v1_0/tests/test_init.py @@ -1,11 +1,12 @@ import asynctest -import basicmessage_storage.v1_0 as test_module - from aries_cloudagent.core.event_bus import Event from aries_cloudagent.core.in_memory import InMemoryProfile from asynctest import TestCase as AsyncTestCase from asynctest import mock as async_mock from pydantic import BaseModel + +import basicmessage_storage.v1_0 as test_module + from .. import basic_message_event_handler, setup from ..models import BasicMessageRecord diff --git a/basicmessage_storage/integration/tests/test_basicmessage_storage.py b/basicmessage_storage/integration/tests/test_basicmessage_storage.py index aecdd497e..be34e5682 100644 --- a/basicmessage_storage/integration/tests/test_basicmessage_storage.py +++ b/basicmessage_storage/integration/tests/test_basicmessage_storage.py @@ -2,10 +2,11 @@ # pylint: disable=redefined-outer-name -import pytest import time -from . import Agent, BOB, ALICE +import pytest + +from . import ALICE, BOB, Agent @pytest.fixture(scope="session") diff --git a/connection_update/connection_update/v1_0/routes.py b/connection_update/connection_update/v1_0/routes.py index 9e4aa8744..30ab97c6c 100644 --- a/connection_update/connection_update/v1_0/routes.py +++ b/connection_update/connection_update/v1_0/routes.py @@ -1,9 +1,10 @@ """v1.0 connection update protocol routes.""" + import functools import logging from aiohttp import web -from aiohttp_apispec import docs, match_info_schema, response_schema, request_schema +from aiohttp_apispec import docs, match_info_schema, request_schema, response_schema from aries_cloudagent.admin.request_context import AdminRequestContext from aries_cloudagent.connections.models.conn_record import ConnRecord, ConnRecordSchema from aries_cloudagent.messaging.models.base import BaseModelError @@ -11,7 +12,7 @@ from aries_cloudagent.protocols.connections.v1_0.routes import ( ConnectionsConnIdMatchInfoSchema, ) -from aries_cloudagent.storage.error import StorageNotFoundError, StorageError +from aries_cloudagent.storage.error import StorageError, StorageNotFoundError from marshmallow import fields LOGGER = logging.getLogger(__name__) @@ -19,6 +20,7 @@ def error_handler(func): """Handle connection update errors.""" + @functools.wraps(func) async def wrapper(request): try: diff --git a/connection_update/connection_update/v1_0/tests/test_routes.py b/connection_update/connection_update/v1_0/tests/test_routes.py index 052c8f77b..aed23f59c 100644 --- a/connection_update/connection_update/v1_0/tests/test_routes.py +++ b/connection_update/connection_update/v1_0/tests/test_routes.py @@ -26,19 +26,17 @@ async def setUp(self) -> None: @asynctest.patch.object(ConnRecord, "retrieve_by_id") async def test_connections_update_saves_with_alias_from_body(self, mock_retrieve): self.request.json = async_mock.CoroutineMock() - self.request.json.return_value = { - 'alias': 'test-alias' - - } + self.request.json.return_value = {"alias": "test-alias"} self.request.match_info = {"conn_id": self.test_conn_id} mock_retrieve.return_value = async_mock.CoroutineMock( - save=async_mock.CoroutineMock(), alias="", serialize=lambda: {}) + save=async_mock.CoroutineMock(), alias="", serialize=lambda: {} + ) await test_module.connections_update(self.request) mock_retrieve.return_value.save.assert_called - assert mock_retrieve.return_value.alias == 'test-alias' + assert mock_retrieve.return_value.alias == "test-alias" async def test_register(self): mock_app = async_mock.MagicMock() diff --git a/connection_update/integration/tests/__init__.py b/connection_update/integration/tests/__init__.py index 19a422499..32db5e3f4 100644 --- a/connection_update/integration/tests/__init__.py +++ b/connection_update/integration/tests/__init__.py @@ -15,10 +15,12 @@ def get(agent: str, path: str, **kwargs): """Get.""" return requests.get(f"{agent}{path}", **kwargs) + def post(agent: str, path: str, **kwargs): """Post.""" return requests.post(f"{agent}{path}", **kwargs) + def put(agent: str, path: str, **kwargs): """Put.""" return requests.put(f"{agent}{path}", **kwargs) @@ -79,7 +81,7 @@ def accept_invite(self, connection_id: str): self.url, f"/connections/{connection_id}/accept-invitation", ) - + @unwrap_json_response @fail_if_not_ok("Failed to send basic message") def connections_update(self, connection_id, alias): @@ -89,7 +91,7 @@ def connections_update(self, connection_id, alias): f"/connections/{connection_id}", json={"alias": alias}, ) - + @unwrap_json_response @fail_if_not_ok("Failed to send basic message") def get_connection(self, connection_id): @@ -120,10 +122,8 @@ def post( wrapped_post = unwrap_json_response(wrapped_post) return wrapped_post(self.url, path, **kwargs) - - def put( - self, path: str, return_json: bool = True, fail_with: str = None, **kwargs - ): + + def put(self, path: str, return_json: bool = True, fail_with: str = None, **kwargs): """Do get to agent endpoint.""" wrapped_put = put if fail_with: @@ -131,4 +131,4 @@ def put( if return_json: wrapped_put = unwrap_json_response(wrapped_put) - return wrapped_put(self.url, path, **kwargs) \ No newline at end of file + return wrapped_put(self.url, path, **kwargs) diff --git a/connection_update/integration/tests/test_connection_update.py b/connection_update/integration/tests/test_connection_update.py index 53a3a9165..880825536 100644 --- a/connection_update/integration/tests/test_connection_update.py +++ b/connection_update/integration/tests/test_connection_update.py @@ -2,10 +2,11 @@ # pylint: disable=redefined-outer-name -import pytest import time -from . import Agent, BOB, ALICE +import pytest + +from . import ALICE, BOB, Agent @pytest.fixture(scope="session") @@ -27,14 +28,14 @@ def established_connection(bob, alice): resp = alice.receive_invite(invite, auto_accept="true") yield resp["connection_id"] + def test_send_message(bob, alice, established_connection): # make sure connection is active... time.sleep(2) - test_alias = 'test-alias' + test_alias = "test-alias" update_response = alice.connections_update(established_connection, alias=test_alias) get_response = alice.get_connection(established_connection) - assert update_response['alias'] == test_alias - assert get_response['alias'] == test_alias - + assert update_response["alias"] == test_alias + assert get_response["alias"] == test_alias diff --git a/firebase_push_notifications/firebase_push_notifications/v1_0/constants.py b/firebase_push_notifications/firebase_push_notifications/v1_0/constants.py index 0def089df..cc9e69306 100644 --- a/firebase_push_notifications/firebase_push_notifications/v1_0/constants.py +++ b/firebase_push_notifications/firebase_push_notifications/v1_0/constants.py @@ -1,13 +1,13 @@ """This module contains the constants used in the project.""" # Acapy -FORWARDING_EVENT = 'acapy::forward::received' +FORWARDING_EVENT = "acapy::forward::received" # Firebase -SCOPES = ['https://www.googleapis.com/auth/firebase.messaging'] -BASE_URL = 'https://fcm.googleapis.com' -ENDPOINT_PREFIX = 'v1/projects/' -ENDPOINT_SUFFIX = '/messages:send' +SCOPES = ["https://www.googleapis.com/auth/firebase.messaging"] +BASE_URL = "https://fcm.googleapis.com" +ENDPOINT_PREFIX = "v1/projects/" +ENDPOINT_SUFFIX = "/messages:send" # Configs -MAX_SEND_RATE_MINUTES = 0 \ No newline at end of file +MAX_SEND_RATE_MINUTES = 0 diff --git a/firebase_push_notifications/firebase_push_notifications/v1_0/handlers/handler.py b/firebase_push_notifications/firebase_push_notifications/v1_0/handlers/handler.py index 0d06575aa..679022391 100644 --- a/firebase_push_notifications/firebase_push_notifications/v1_0/handlers/handler.py +++ b/firebase_push_notifications/firebase_push_notifications/v1_0/handlers/handler.py @@ -1,4 +1,5 @@ """Handler for push notifications.""" + import logging from aries_cloudagent.messaging.base_handler import ( @@ -7,8 +8,8 @@ RequestContext, ) -from ..messages.set_device_info import SetDeviceInfo from ..manager import save_device_token +from ..messages.set_device_info import SetDeviceInfo LOGGER = logging.getLogger(__name__) @@ -20,7 +21,8 @@ async def handle(self, context: RequestContext, responder: BaseResponder): """Handle set-device-info message.""" connection_id = context.connection_record.connection_id LOGGER.info( - f'set-device-info protocol handler called for connection: {connection_id}') + f"set-device-info protocol handler called for connection: {connection_id}" + ) assert isinstance(context.message, SetDeviceInfo) device_token = context.message.device_token diff --git a/firebase_push_notifications/firebase_push_notifications/v1_0/manager.py b/firebase_push_notifications/firebase_push_notifications/v1_0/manager.py index cce410283..e8d82751c 100644 --- a/firebase_push_notifications/firebase_push_notifications/v1_0/manager.py +++ b/firebase_push_notifications/firebase_push_notifications/v1_0/manager.py @@ -1,4 +1,5 @@ """This module contains the logic for sending push notifications to the user's device using the Firebase Cloud Messaging service.""" # noqa: E501 + import json import logging import os diff --git a/firebase_push_notifications/firebase_push_notifications/v1_0/message_types.py b/firebase_push_notifications/firebase_push_notifications/v1_0/message_types.py index e7df49cf2..bf35ddcc2 100644 --- a/firebase_push_notifications/firebase_push_notifications/v1_0/message_types.py +++ b/firebase_push_notifications/firebase_push_notifications/v1_0/message_types.py @@ -1,4 +1,5 @@ """Message type identifiers for push-notifications-fcm v1.0.""" + from aries_cloudagent.protocols.didcomm_prefix import DIDCommPrefix ARIES_PROTOCOL = "push-notifications-fcm/1.0" diff --git a/firebase_push_notifications/firebase_push_notifications/v1_0/messages/set_device_info.py b/firebase_push_notifications/firebase_push_notifications/v1_0/messages/set_device_info.py index 1105062f1..fd5d65d69 100644 --- a/firebase_push_notifications/firebase_push_notifications/v1_0/messages/set_device_info.py +++ b/firebase_push_notifications/firebase_push_notifications/v1_0/messages/set_device_info.py @@ -1,4 +1,5 @@ """Message type for setting device info.""" + from aries_cloudagent.messaging.agent_message import AgentMessage, AgentMessageSchema from marshmallow import EXCLUDE, fields diff --git a/firebase_push_notifications/firebase_push_notifications/v1_0/models.py b/firebase_push_notifications/firebase_push_notifications/v1_0/models.py index b405b06c8..8fcfcfd24 100644 --- a/firebase_push_notifications/firebase_push_notifications/v1_0/models.py +++ b/firebase_push_notifications/firebase_push_notifications/v1_0/models.py @@ -1,4 +1,5 @@ """Model for Firebase Push Notifications.""" + from aries_cloudagent.core.profile import ProfileSession from aries_cloudagent.messaging.models.base_record import BaseRecord, BaseRecordSchema from aries_cloudagent.messaging.valid import INDY_ISO8601_DATETIME_VALIDATE diff --git a/firebase_push_notifications/firebase_push_notifications/v1_0/routes.py b/firebase_push_notifications/firebase_push_notifications/v1_0/routes.py index 0bc950cdf..b1a3268a9 100644 --- a/firebase_push_notifications/firebase_push_notifications/v1_0/routes.py +++ b/firebase_push_notifications/firebase_push_notifications/v1_0/routes.py @@ -1,10 +1,10 @@ """API for Firebase push notifications.""" + import logging import re from aiohttp import web -from aiohttp_apispec import (docs, match_info_schema, request_schema, - response_schema) +from aiohttp_apispec import docs, match_info_schema, request_schema, response_schema from aries_cloudagent.admin.request_context import AdminRequestContext from aries_cloudagent.connections.models.conn_record import ConnRecord from aries_cloudagent.core.event_bus import Event, EventBus diff --git a/firebase_push_notifications/integration/tests/test_example.py b/firebase_push_notifications/integration/tests/test_example.py index b24186106..64a59b3e0 100644 --- a/firebase_push_notifications/integration/tests/test_example.py +++ b/firebase_push_notifications/integration/tests/test_example.py @@ -1,7 +1,8 @@ -import pytest import time -from . import Agent, BOB, ALICE +import pytest + +from . import ALICE, BOB, Agent @pytest.fixture(scope="session") diff --git a/kafka_events/demo/setup/main.py b/kafka_events/demo/setup/main.py index e2574321a..3a9d02307 100644 --- a/kafka_events/demo/setup/main.py +++ b/kafka_events/demo/setup/main.py @@ -1,13 +1,14 @@ """Automate setup of Agency + Mediator.""" + import asyncio from os import getenv from typing import Any, Callable, TypeVar from acapy_client import Client from acapy_client.api.connection import ( + create_invitation, get_connection, receive_invitation, - create_invitation, ) from acapy_client.api.mediation import ( get_mediation_requests_mediation_id, @@ -20,6 +21,7 @@ from acapy_client.models.mediation_create_request import MediationCreateRequest from acapy_client.models.mediation_record import MediationRecord from acapy_client.models.receive_invitation_request import ReceiveInvitationRequest + """ 1. Create invitation from mediator. 2. Receive invitation in Agency. diff --git a/kafka_events/integration/tests/conftest.py b/kafka_events/integration/tests/conftest.py index 4d433251a..a1def39a7 100644 --- a/kafka_events/integration/tests/conftest.py +++ b/kafka_events/integration/tests/conftest.py @@ -5,10 +5,7 @@ from os import getenv from typing import Iterator, Optional -from aiokafka.consumer.consumer import AIOKafkaConsumer -from aiokafka.producer.producer import AIOKafkaProducer import pytest_asyncio - from acapy_client import Client from acapy_client.api.connection import create_static, delete_connection, set_metadata from acapy_client.models import ( @@ -17,6 +14,8 @@ ConnectionStaticResult, ) from acapy_client.models.conn_record import ConnRecord +from aiokafka.consumer.consumer import AIOKafkaConsumer +from aiokafka.producer.producer import AIOKafkaProducer from echo_agent import EchoClient diff --git a/kafka_events/integration/tests/test_events.py b/kafka_events/integration/tests/test_events.py index 8d7e43e23..09bfe6041 100644 --- a/kafka_events/integration/tests/test_events.py +++ b/kafka_events/integration/tests/test_events.py @@ -1,4 +1,5 @@ """Basic Message Tests""" + import asyncio import json diff --git a/kafka_events/kafka_events/definition.py b/kafka_events/kafka_events/definition.py index 5a1d329e6..e5ece9273 100644 --- a/kafka_events/kafka_events/definition.py +++ b/kafka_events/kafka_events/definition.py @@ -7,4 +7,4 @@ "current_minor_version": 0, "path": "v1_0", } -] \ No newline at end of file +] diff --git a/kafka_events/kafka_events/v1_0/deliverer/deliverer/__init__.py b/kafka_events/kafka_events/v1_0/deliverer/deliverer/__init__.py index 3db8996d2..fa4cb8208 100644 --- a/kafka_events/kafka_events/v1_0/deliverer/deliverer/__init__.py +++ b/kafka_events/kafka_events/v1_0/deliverer/deliverer/__init__.py @@ -7,8 +7,10 @@ class KafkaQueuePayload(BaseModel): """Base class for payloads that are sent to the Kafka queue.""" + class Config: """Configuration class for KafkaQueuePayload.""" + json_encoders = {bytes: lambda v: base64.urlsafe_b64encode(v).decode()} @classmethod @@ -24,11 +26,13 @@ def to_bytes(self) -> bytes: class Service(BaseModel): """Model for a service that can be called.""" + url: str class OutboundPayload(KafkaQueuePayload): """Model for a payload that is sent to the Kafka queue.""" + service: Service payload: bytes retries: int = 0 diff --git a/kafka_events/kafka_events/v1_0/deliverer/deliverer/__main__.py b/kafka_events/kafka_events/v1_0/deliverer/deliverer/__main__.py index 6755c5d42..55e029dd6 100644 --- a/kafka_events/kafka_events/v1_0/deliverer/deliverer/__main__.py +++ b/kafka_events/kafka_events/v1_0/deliverer/deliverer/__main__.py @@ -1,4 +1,5 @@ """Kafka consumer of outbound messages from ACA-Py.""" + import asyncio import signal import sys diff --git a/kafka_events/kafka_events/v1_0/http_kafka_relay/relay/__init__.py b/kafka_events/kafka_events/v1_0/http_kafka_relay/relay/__init__.py index 80ab87693..eb94a3d1f 100644 --- a/kafka_events/kafka_events/v1_0/http_kafka_relay/relay/__init__.py +++ b/kafka_events/kafka_events/v1_0/http_kafka_relay/relay/__init__.py @@ -1,4 +1,5 @@ """HTTP to Kafka Relay.""" + import base64 import json import logging diff --git a/kafka_events/kafka_events/v1_0/http_kafka_relay/setup.py b/kafka_events/kafka_events/v1_0/http_kafka_relay/setup.py index 1c78b119f..b9aadee9c 100644 --- a/kafka_events/kafka_events/v1_0/http_kafka_relay/setup.py +++ b/kafka_events/kafka_events/v1_0/http_kafka_relay/setup.py @@ -1,7 +1,7 @@ """Setup file for the http_kafka_relay package.""" + #!/usr/bin/env python from setuptools import setup - if __name__ == "__main__": setup() diff --git a/kafka_events/kafka_events/v1_0/kafka_queue/__init__.py b/kafka_events/kafka_events/v1_0/kafka_queue/__init__.py index 0b8cab06c..37335aa50 100644 --- a/kafka_events/kafka_events/v1_0/kafka_queue/__init__.py +++ b/kafka_events/kafka_events/v1_0/kafka_queue/__init__.py @@ -2,5 +2,4 @@ from .config import get_config - __all__ = ["get_config"] diff --git a/kafka_events/kafka_events/v1_0/kafka_queue/config.py b/kafka_events/kafka_events/v1_0/kafka_queue/config.py index 3b9c0ee97..5e9b128e8 100644 --- a/kafka_events/kafka_events/v1_0/kafka_queue/config.py +++ b/kafka_events/kafka_events/v1_0/kafka_queue/config.py @@ -4,11 +4,10 @@ from abc import ABC, abstractmethod from typing import List, Mapping, Optional, Union -from pydantic import BaseModel, Extra - from aries_cloudagent.config.base import BaseSettings from aries_cloudagent.config.plugin_settings import PluginSettings from aries_cloudagent.config.settings import Settings +from pydantic import BaseModel, Extra LOGGER = logging.getLogger(__name__) @@ -22,6 +21,7 @@ def _alias_generator(key: str) -> str: class SecurityProtocol(ABC): """Base class for security protocol.""" + SSL_PROTOCOLS = ("SSL", "SASL_SSL") @property @@ -38,10 +38,12 @@ def security_protocol(self) -> str: class ProducerConfig(BaseModel, SecurityProtocol): """Producer configuration.""" + bootstrap_servers: Union[str, List[str]] class Config: """Configuration for producer.""" + extra = Extra.allow alias_generator = _alias_generator allow_population_by_field_name = True @@ -59,11 +61,13 @@ def security_protocol(self) -> str: class EventsConfig(BaseModel): """Events configuration.""" + producer: ProducerConfig topic_maps: Mapping[str, str] class Config: """Configuration for events.""" + alias_generator = _alias_generator allow_population_by_field_name = True @@ -83,10 +87,12 @@ def default(cls): class ConsumerConfig(BaseModel, SecurityProtocol): """Consumer configuration.""" + group_id: str class Config: """Configuration for consumer.""" + extra = Extra.allow alias_generator = _alias_generator allow_population_by_field_name = True @@ -104,11 +110,13 @@ def security_protocol(self) -> bool: class InboundConfig(BaseModel): """Inbound configuration.""" + consumer: ConsumerConfig topics: List[str] class Config: """Configuration for inbound.""" + alias_generator = _alias_generator allow_population_by_field_name = True @@ -120,6 +128,7 @@ def default(cls): class OutboundConfig(BaseModel): """Outbound configuration.""" + producer: ProducerConfig topic: str @@ -131,6 +140,7 @@ def default(cls): class KafkaConfig(BaseModel): """Kafka configuration.""" + events: Optional[EventsConfig] inbound: Optional[InboundConfig] outbound: Optional[OutboundConfig] diff --git a/kafka_events/kafka_events/v1_0/kafka_queue/events/__init__.py b/kafka_events/kafka_events/v1_0/kafka_queue/events/__init__.py index 68d5d92c1..e8d329d54 100644 --- a/kafka_events/kafka_events/v1_0/kafka_queue/events/__init__.py +++ b/kafka_events/kafka_events/v1_0/kafka_queue/events/__init__.py @@ -8,12 +8,12 @@ from typing import Optional, cast from aiokafka import AIOKafkaProducer - from aries_cloudagent.config.injection_context import InjectionContext from aries_cloudagent.core.event_bus import Event, EventBus, EventWithMetadata from aries_cloudagent.core.profile import Profile from aries_cloudagent.core.util import SHUTDOWN_EVENT_PATTERN, STARTUP_EVENT_PATTERN -from ..config import get_config, EventsConfig + +from ..config import EventsConfig, get_config LOGGER = logging.getLogger(__name__) @@ -46,9 +46,9 @@ async def on_startup(profile: Profile, event: Event): producer = AIOKafkaProducer( **config.producer.dict(), - ssl_context=ssl.create_default_context() - if config.producer.ssl_required - else None, + ssl_context=( + ssl.create_default_context() if config.producer.ssl_required else None + ), ) profile.context.injector.bind_instance(AIOKafkaProducer, producer) await producer.start() diff --git a/kafka_events/kafka_events/v1_0/kafka_queue/inbound.py b/kafka_events/kafka_events/v1_0/kafka_queue/inbound.py index 86b338d03..8ea4b04a3 100644 --- a/kafka_events/kafka_events/v1_0/kafka_queue/inbound.py +++ b/kafka_events/kafka_events/v1_0/kafka_queue/inbound.py @@ -1,5 +1,5 @@ - """Kafka inbound transport classes.""" + import base64 import json import logging @@ -9,11 +9,11 @@ from aiokafka import AIOKafkaConsumer from aiokafka.structs import ConsumerRecord - from aries_cloudagent.messaging.error import MessageParseError -from aries_cloudagent.transport.error import WireFormatParseError, RecipientKeysError +from aries_cloudagent.transport.error import RecipientKeysError, WireFormatParseError from aries_cloudagent.transport.inbound.base import BaseInboundTransport -from .config import get_config, InboundConfig + +from .config import InboundConfig, get_config LOGGER = logging.getLogger(__name__) @@ -35,9 +35,11 @@ def __init__(self, host: str, port: int, create_session, **kwargs) -> None: *self.config.topics, bootstrap_servers=self.host, **self.config.consumer.dict(), - ssl_context=ssl.create_default_context() - if self.config.consumer.ssl_required - else None, + ssl_context=( + ssl.create_default_context() + if self.config.consumer.ssl_required + else None + ), ) async def start(self): diff --git a/kafka_events/kafka_events/v1_0/kafka_queue/outbound.py b/kafka_events/kafka_events/v1_0/kafka_queue/outbound.py index ba63d02b3..0589831bc 100644 --- a/kafka_events/kafka_events/v1_0/kafka_queue/outbound.py +++ b/kafka_events/kafka_events/v1_0/kafka_queue/outbound.py @@ -1,4 +1,5 @@ """Basic in memory queue.""" + import base64 import json import logging @@ -6,7 +7,6 @@ from typing import List, Optional, Union from aiokafka.producer.producer import AIOKafkaProducer - from aries_cloudagent.core.profile import Profile from aries_cloudagent.transport.outbound.base import ( BaseOutboundTransport, @@ -14,7 +14,8 @@ OutboundTransportError, ) from aries_cloudagent.transport.outbound.manager import QueuedOutboundMessage -from .config import get_config, OutboundConfig + +from .config import OutboundConfig, get_config LOGGER = logging.getLogger(__name__) @@ -74,9 +75,11 @@ async def start(self): self.producer = AIOKafkaProducer( **self.config.producer.dict(), - ssl_context=ssl.create_default_context() - if self.config.producer.ssl_required - else None, + ssl_context=( + ssl.create_default_context() + if self.config.producer.ssl_required + else None + ), ) await self.producer.start() diff --git a/kafka_events/kafka_events/v1_0/kafka_queue/tests/test_config.py b/kafka_events/kafka_events/v1_0/kafka_queue/tests/test_config.py index 660e9bf6b..6b20108a6 100644 --- a/kafka_events/kafka_events/v1_0/kafka_queue/tests/test_config.py +++ b/kafka_events/kafka_events/v1_0/kafka_queue/tests/test_config.py @@ -1,6 +1,6 @@ from aries_cloudagent.config.settings import Settings from kafka_queue import config -from kafka_queue.config import InboundConfig, OutboundConfig, EventsConfig +from kafka_queue.config import EventsConfig, InboundConfig, OutboundConfig DUMMY_INBOUND_CONFIG = {"consumer": {"group-id": "some-group-id"}, "topics": []} DUMMY_OUTBOUND_CONFIG = None diff --git a/kafka_events/setup.py b/kafka_events/setup.py index 24029b509..c917c02ef 100644 --- a/kafka_events/setup.py +++ b/kafka_events/setup.py @@ -3,6 +3,5 @@ #!/usr/bin/env python from setuptools import setup - if __name__ == "__main__": setup() diff --git a/multitenant_provider/multitenant_provider/v1_0/config.py b/multitenant_provider/multitenant_provider/v1_0/config.py index 5b22056e5..a0ee1d4e1 100644 --- a/multitenant_provider/multitenant_provider/v1_0/config.py +++ b/multitenant_provider/multitenant_provider/v1_0/config.py @@ -1,4 +1,5 @@ """Configuration classes for multitenant_provider.""" + import logging from datetime import timedelta from typing import Any, Mapping, Optional @@ -6,7 +7,6 @@ from mergedeep import merge from pydantic import BaseModel - LOGGER = logging.getLogger(__name__) @@ -16,11 +16,13 @@ def _alias_generator(key: str) -> str: class ManagerConfig(BaseModel): """Configuration for the multitenant manager.""" + class_name: Optional[str] # real world, this is a UUID always_check_provided_wallet_key: bool = False class Config: """Inner class for configuration.""" + alias_generator = _alias_generator allow_population_by_field_name = True @@ -36,10 +38,12 @@ def default(cls): class ErrorsConfig(BaseModel): """Configuration for error handling.""" + on_unneeded_wallet_key: bool = True class Config: """Inner class for configuration.""" + alias_generator = _alias_generator allow_population_by_field_name = True @@ -51,11 +55,13 @@ def default(cls): class TokenExpiryConfig(BaseModel): """Configuration for token expiry.""" + units: Optional[str] = "weeks" # weeks, days, hours, minutes amount: int = 52 class Config: """Inner class for configuration.""" + alias_generator = _alias_generator allow_population_by_field_name = True @@ -80,6 +86,7 @@ def get_token_expiry_delta(self) -> timedelta: class MultitenantProviderConfig(BaseModel): """Configuration for the multitenant provider.""" + manager: Optional[ManagerConfig] errors: Optional[ErrorsConfig] token_expiry: Optional[TokenExpiryConfig] @@ -106,10 +113,8 @@ def process_config_dict(config_dict: dict) -> dict: def get_config(settings: Mapping[str, Any]) -> MultitenantProviderConfig: """Retrieve configuration from settings.""" try: - LOGGER.debug("Constructing config from: %s", - settings.get("plugin_config")) - plugin_config_dict = settings["plugin_config"].get( - "multitenant_provider", {}) + LOGGER.debug("Constructing config from: %s", settings.get("plugin_config")) + plugin_config_dict = settings["plugin_config"].get("multitenant_provider", {}) LOGGER.debug("Retrieved: %s", plugin_config_dict) plugin_config_dict = process_config_dict(plugin_config_dict) LOGGER.debug("Parsed: %s", plugin_config_dict) @@ -123,6 +128,5 @@ def get_config(settings: Mapping[str, Any]) -> MultitenantProviderConfig: config = MultitenantProviderConfig.default() LOGGER.debug("Returning config: %s", config.json(indent=2)) - LOGGER.debug("Returning config(aliases): %s", - config.json(by_alias=True, indent=2)) + LOGGER.debug("Returning config(aliases): %s", config.json(by_alias=True, indent=2)) return config diff --git a/multitenant_provider/multitenant_provider/v1_0/manager.py b/multitenant_provider/multitenant_provider/v1_0/manager.py index 21756b9ec..8be782314 100644 --- a/multitenant_provider/multitenant_provider/v1_0/manager.py +++ b/multitenant_provider/multitenant_provider/v1_0/manager.py @@ -1,4 +1,5 @@ """Multitenant provider manager.""" + import logging from datetime import datetime, timezone @@ -11,9 +12,7 @@ from aries_cloudagent.multitenant.askar_profile_manager import ( AskarProfileMultitenantManager, ) -from aries_cloudagent.multitenant.base import ( - MultitenantManagerError, -) +from aries_cloudagent.multitenant.base import MultitenantManagerError from aries_cloudagent.multitenant.error import WalletKeyMissingError from aries_cloudagent.multitenant.manager import MultitenantManager from aries_cloudagent.storage.error import StorageError @@ -81,8 +80,7 @@ def check_wallet_key(self, wallet_token_record: WalletTokenRecord, wallet_key: s wallet_token_record.wallet_key_salt.encode("utf-8"), ) # check the passed in value/hash against the calculated hash. - check_input = bcrypt.checkpw( - wallet_key.encode("utf-8"), wallet_key_token) + check_input = bcrypt.checkpw(wallet_key.encode("utf-8"), wallet_key_token) self.logger.debug( f"bcrypt.checkpw(wallet_key.encode('utf-8'), wallet_key_token) = {check_input}" # noqa E501 ) @@ -153,8 +151,7 @@ async def create_auth_token( iat = datetime.now(tz=timezone.utc) exp = iat + config.token_expiry.get_token_expiry_delta() - jwt_payload = {"wallet_id": wallet_record.wallet_id, - "iat": iat, "exp": exp} + jwt_payload = {"wallet_id": wallet_record.wallet_id, "iat": iat, "exp": exp} jwt_secret = self.get_profile().settings.get("multitenant.jwt_secret") if wallet_record.requires_external_key: diff --git a/multitenant_provider/multitenant_provider/v1_0/models.py b/multitenant_provider/multitenant_provider/v1_0/models.py index 0ac1d33e4..5b2e85f63 100644 --- a/multitenant_provider/multitenant_provider/v1_0/models.py +++ b/multitenant_provider/multitenant_provider/v1_0/models.py @@ -1,4 +1,5 @@ """Multitenant Provider Wallet Token(s) Record Model.""" + from typing import List, Optional from aries_cloudagent.core.profile import ProfileSession diff --git a/multitenant_provider/multitenant_provider/v1_0/provider.py b/multitenant_provider/multitenant_provider/v1_0/provider.py index e5774159c..925a63ad7 100644 --- a/multitenant_provider/multitenant_provider/v1_0/provider.py +++ b/multitenant_provider/multitenant_provider/v1_0/provider.py @@ -1,4 +1,5 @@ """Profile manager for multitenancy.""" + import logging from aries_cloudagent.config.base import InjectionError diff --git a/multitenant_provider/multitenant_provider/v1_0/routes.py b/multitenant_provider/multitenant_provider/v1_0/routes.py index a16206172..a00a19373 100644 --- a/multitenant_provider/multitenant_provider/v1_0/routes.py +++ b/multitenant_provider/multitenant_provider/v1_0/routes.py @@ -1,21 +1,22 @@ """Multitenant provider plugin routes.""" + import logging from aiohttp import web from aiohttp_apispec import docs, request_schema, response_schema from aries_cloudagent.admin.request_context import AdminRequestContext from aries_cloudagent.multitenant.admin.routes import ( - CreateWalletTokenRequestSchema, - CreateWalletTokenResponseSchema, CreateWalletRequestSchema, CreateWalletResponseSchema, + CreateWalletTokenRequestSchema, + CreateWalletTokenResponseSchema, wallet_create, ) from aries_cloudagent.multitenant.base import BaseMultitenantManager from aries_cloudagent.multitenant.error import WalletKeyMissingError from aries_cloudagent.storage.error import StorageNotFoundError from aries_cloudagent.wallet.models.wallet_record import WalletRecord -from marshmallow import validates_schema, ValidationError +from marshmallow import ValidationError, validates_schema from .config import MultitenantProviderConfig from .manager import WalletKeyMismatchError diff --git a/multitenant_provider/multitenant_provider/v1_0/tests/test_config.py b/multitenant_provider/multitenant_provider/v1_0/tests/test_config.py index 112c26b59..18f16ba64 100644 --- a/multitenant_provider/multitenant_provider/v1_0/tests/test_config.py +++ b/multitenant_provider/multitenant_provider/v1_0/tests/test_config.py @@ -38,13 +38,7 @@ async def test_get_config_without_settings_returns_default(self): async def test_get_config_with_settings_returns_valid_config(self): settings = { - 'plugin_config': { - 'multitenant_provider': { - 'errors': { - 'testing...': True - } - } - } + "plugin_config": {"multitenant_provider": {"errors": {"testing...": True}}} } config = get_config(settings) assert isinstance(config, MultitenantProviderConfig) diff --git a/multitenant_provider/multitenant_provider/v1_0/tests/test_init.py b/multitenant_provider/multitenant_provider/v1_0/tests/test_init.py index 6b73cf637..b4fe3c806 100644 --- a/multitenant_provider/multitenant_provider/v1_0/tests/test_init.py +++ b/multitenant_provider/multitenant_provider/v1_0/tests/test_init.py @@ -1,11 +1,11 @@ from aries_cloudagent.core.event_bus import Event, EventBus from aries_cloudagent.core.in_memory import InMemoryProfile -from asynctest import TestCase as AsyncTestCase -from asynctest import mock as async_mock from aries_cloudagent.core.protocol_registry import ProtocolRegistry from aries_cloudagent.multitenant.base import BaseMultitenantManager +from asynctest import TestCase as AsyncTestCase +from asynctest import mock as async_mock -from .. import setup, on_startup +from .. import on_startup, setup class TestInit(AsyncTestCase): @@ -36,10 +36,10 @@ async def test_setup_raises_value_error_when_second_inject_returns_none(self): with self.assertRaises(ValueError): await setup(self.context) - async def test_on_startup_injects_base_multi_tenant_provider_when_setting_true(self): - self.profile.context.settings = { - 'multitenant.enabled': True - } + async def test_on_startup_injects_base_multi_tenant_provider_when_setting_true( + self, + ): + self.profile.context.settings = {"multitenant.enabled": True} self.profile.context.injector.bind_instance = async_mock.Mock() self.profile.context.injector.bind_provider = async_mock.Mock() self.profile.context.inject = async_mock.Mock() @@ -56,11 +56,8 @@ async def test_on_startup_raises_error_when_config_missing(self): await on_startup(self.profile, event) async def test_on_startup_raises_error_when_config_false(self): - self.profile.context.settings = { - 'multitenant.enabled': False - } + self.profile.context.settings = {"multitenant.enabled": False} event = Event(topic="test", payload={}) with self.assertRaises(ValueError): await on_startup(self.profile, event) - diff --git a/multitenant_provider/multitenant_provider/v1_0/tests/test_manager.py b/multitenant_provider/multitenant_provider/v1_0/tests/test_manager.py index 7b6388103..328f584e1 100644 --- a/multitenant_provider/multitenant_provider/v1_0/tests/test_manager.py +++ b/multitenant_provider/multitenant_provider/v1_0/tests/test_manager.py @@ -1,4 +1,5 @@ from typing import Optional + import asynctest import bcrypt import jwt diff --git a/multitenant_provider/multitenant_provider/v1_0/tests/test_routes.py b/multitenant_provider/multitenant_provider/v1_0/tests/test_routes.py index 8169481bd..2128a81bf 100644 --- a/multitenant_provider/multitenant_provider/v1_0/tests/test_routes.py +++ b/multitenant_provider/multitenant_provider/v1_0/tests/test_routes.py @@ -45,7 +45,9 @@ async def setUp(self) -> None: mock_session = async_mock.MagicMock() mock_session.__aenter__ = async_mock.CoroutineMock(return_value=mock_session) self.profile.session = mock_session - self.context = AdminRequestContext.test_context(self.session_inject, profile=self.profile) + self.context = AdminRequestContext.test_context( + self.session_inject, profile=self.profile + ) self.request_dict = { "context": self.context, } diff --git a/oid4vci/integration/afj_runner/runner/__main__.py b/oid4vci/integration/afj_runner/runner/__main__.py index 1c70a0e67..e5aadd410 100644 --- a/oid4vci/integration/afj_runner/runner/__main__.py +++ b/oid4vci/integration/afj_runner/runner/__main__.py @@ -1,10 +1,11 @@ """Quick test script.""" + import asyncio -from urllib.parse import urlencode from os import getenv +from urllib.parse import urlencode -from jrpc_client import JsonRpcClient, TCPSocketTransport from controller.controller import Controller +from jrpc_client import JsonRpcClient, TCPSocketTransport AFJ_HOST = getenv("AFJ_HOST", "localhost") AFJ_PORT = int(getenv("AFJ_PORT", "3000")) diff --git a/oid4vci/integration/oid4vci_client/client.py b/oid4vci/integration/oid4vci_client/client.py index fe2425220..dea4f5a58 100644 --- a/oid4vci/integration/oid4vci_client/client.py +++ b/oid4vci/integration/oid4vci_client/client.py @@ -1,14 +1,13 @@ """OpenID Connect 4 Verifiable Credential Issuance Client.""" - -from dataclasses import dataclass import json +from dataclasses import dataclass from typing import Dict, List, Literal, Optional, Union from urllib.parse import parse_qsl, urlparse from aiohttp import ClientSession -from .crypto import AskarKey, AskarCryptoService +from .crypto import AskarCryptoService, AskarKey from .did import generate PRE_AUTH_GRANT = "urn:ietf:params:oauth:grant-type:pre-authorized_code" diff --git a/oid4vci/integration/oid4vci_client/crypto.py b/oid4vci/integration/oid4vci_client/crypto.py index f3499eaba..3ac1e628a 100644 --- a/oid4vci/integration/oid4vci_client/crypto.py +++ b/oid4vci/integration/oid4vci_client/crypto.py @@ -1,10 +1,10 @@ """Crypto module for OID4VCI client.""" -from abc import ABC, abstractmethod import base64 -from dataclasses import dataclass import json import time +from abc import ABC, abstractmethod +from dataclasses import dataclass from typing import Generic, TypeVar, Union from aries_askar import Key, KeyAlg diff --git a/oid4vci/integration/tests/test_pre_auth_code_flow.py b/oid4vci/integration/tests/test_pre_auth_code_flow.py index f12c2fcfd..52416003b 100644 --- a/oid4vci/integration/tests/test_pre_auth_code_flow.py +++ b/oid4vci/integration/tests/test_pre_auth_code_flow.py @@ -1,4 +1,5 @@ """Quick test script.""" + import json from os import getenv from urllib.parse import urlencode diff --git a/oid4vci/oid4vci/config.py b/oid4vci/oid4vci/config.py index 4ae98f661..e2e4c5e54 100644 --- a/oid4vci/oid4vci/config.py +++ b/oid4vci/oid4vci/config.py @@ -2,6 +2,7 @@ from dataclasses import dataclass from os import getenv + from aries_cloudagent.config.base import BaseSettings from aries_cloudagent.config.settings import Settings diff --git a/oid4vci/oid4vci/jwk_resolver.py b/oid4vci/oid4vci/jwk_resolver.py index 1b9a0000c..86e998a1f 100644 --- a/oid4vci/oid4vci/jwk_resolver.py +++ b/oid4vci/oid4vci/jwk_resolver.py @@ -2,9 +2,10 @@ import re from typing import Optional, Pattern, Sequence, Text + from aries_cloudagent.config.injection_context import InjectionContext from aries_cloudagent.core.profile import Profile -from aries_cloudagent.resolver.base import BaseDIDResolver, ResolverType, ResolverError +from aries_cloudagent.resolver.base import BaseDIDResolver, ResolverError, ResolverType from aries_cloudagent.wallet.jwt import b64_to_dict diff --git a/oid4vci/oid4vci/models/exchange.py b/oid4vci/oid4vci/models/exchange.py index e70acc136..193171b32 100644 --- a/oid4vci/oid4vci/models/exchange.py +++ b/oid4vci/oid4vci/models/exchange.py @@ -1,10 +1,11 @@ """Exchange record for OID4VCI.""" from typing import Any, Dict, Optional + from aries_cloudagent.core.profile import ProfileSession from aries_cloudagent.messaging.models.base_record import ( - BaseRecordSchema, BaseExchangeRecord, + BaseRecordSchema, ) from aries_cloudagent.messaging.valid import Uri from marshmallow import fields diff --git a/oid4vci/oid4vci/models/supported_cred.py b/oid4vci/oid4vci/models/supported_cred.py index fc1cb55bc..2bf1d30da 100644 --- a/oid4vci/oid4vci/models/supported_cred.py +++ b/oid4vci/oid4vci/models/supported_cred.py @@ -1,6 +1,7 @@ """Supported Credential Record.""" from typing import Dict, List, Optional + from aries_cloudagent.messaging.models.base_record import BaseRecord, BaseRecordSchema from marshmallow import fields diff --git a/oid4vci/oid4vci/oid4vci_server.py b/oid4vci/oid4vci/oid4vci_server.py index 026315552..a8e62c029 100644 --- a/oid4vci/oid4vci/oid4vci_server.py +++ b/oid4vci/oid4vci/oid4vci_server.py @@ -2,9 +2,9 @@ import logging +import aiohttp_cors from aiohttp import web from aiohttp_apispec import setup_aiohttp_apispec, validation_middleware -import aiohttp_cors from aries_cloudagent.admin.base_server import BaseAdminServer from aries_cloudagent.admin.error import AdminSetupError from aries_cloudagent.admin.request_context import AdminRequestContext diff --git a/oid4vci/oid4vci/public_routes.py b/oid4vci/oid4vci/public_routes.py index b70f7d95a..5094f913d 100644 --- a/oid4vci/oid4vci/public_routes.py +++ b/oid4vci/oid4vci/public_routes.py @@ -1,14 +1,16 @@ """Public routes for OID4VCI.""" -from dataclasses import dataclass import datetime import logging +import uuid +from dataclasses import dataclass from secrets import token_urlsafe from typing import Any, Dict, List, Mapping, Optional -import uuid +import jwt from aiohttp import web from aiohttp_apispec import docs, form_schema, request_schema, response_schema +from aries_askar import Key, KeyAlg from aries_cloudagent.admin.request_context import AdminRequestContext from aries_cloudagent.core.profile import Profile from aries_cloudagent.messaging.models.base import BaseModelError @@ -24,8 +26,6 @@ jwt_verify, ) from aries_cloudagent.wallet.util import b58_to_bytes, b64_to_bytes -from aries_askar import Key, KeyAlg -import jwt from marshmallow import fields from pydid import DIDUrl diff --git a/oid4vci/oid4vci/routes.py b/oid4vci/oid4vci/routes.py index a23d18090..9ff5563d7 100644 --- a/oid4vci/oid4vci/routes.py +++ b/oid4vci/oid4vci/routes.py @@ -1,4 +1,5 @@ """Basic Messages Storage API Routes.""" + import logging import secrets from typing import Any, Dict @@ -27,9 +28,9 @@ from marshmallow import fields from marshmallow.validate import OneOf +from .config import Config from .models.exchange import OID4VCIExchangeRecord, OID4VCIExchangeRecordSchema from .models.supported_cred import SupportedCredential, SupportedCredentialSchema -from .config import Config SPEC_URI = ( "https://openid.net/specs/openid-4-verifiable-credential-issuance-1_0-11.html" diff --git a/oid4vci/oid4vci/tests/conftest.py b/oid4vci/oid4vci/tests/conftest.py index 2cb4b0231..6ee06ac5b 100644 --- a/oid4vci/oid4vci/tests/conftest.py +++ b/oid4vci/oid4vci/tests/conftest.py @@ -1,5 +1,4 @@ import pytest - from aries_cloudagent.core.in_memory import InMemoryProfile diff --git a/oid4vci/oid4vci/tests/models/test_exchange.py b/oid4vci/oid4vci/tests/models/test_exchange.py index f8c74337b..45daebcd7 100644 --- a/oid4vci/oid4vci/tests/models/test_exchange.py +++ b/oid4vci/oid4vci/tests/models/test_exchange.py @@ -1,5 +1,6 @@ -from aries_cloudagent.core.profile import Profile import pytest +from aries_cloudagent.core.profile import Profile + from oid4vci.models.exchange import OID4VCIExchangeRecord diff --git a/oid4vci/oid4vci/tests/models/test_supported_cred.py b/oid4vci/oid4vci/tests/models/test_supported_cred.py index b0a354106..d21292b8a 100644 --- a/oid4vci/oid4vci/tests/models/test_supported_cred.py +++ b/oid4vci/oid4vci/tests/models/test_supported_cred.py @@ -1,5 +1,5 @@ -from aries_cloudagent.core.profile import Profile import pytest +from aries_cloudagent.core.profile import Profile from oid4vci.models.supported_cred import SupportedCredential diff --git a/oid4vci/oid4vci/tests/routes/conftest.py b/oid4vci/oid4vci/tests/routes/conftest.py index 43f7d6e3d..890e844d1 100644 --- a/oid4vci/oid4vci/tests/routes/conftest.py +++ b/oid4vci/oid4vci/tests/routes/conftest.py @@ -1,8 +1,10 @@ +from unittest.mock import MagicMock + +import pytest from aries_cloudagent.admin.request_context import AdminRequestContext from aries_cloudagent.core.in_memory import InMemoryProfile from aries_cloudagent.resolver.did_resolver import DIDResolver -import pytest -from unittest.mock import MagicMock + from oid4vci.jwk_resolver import JwkResolver @@ -10,15 +12,17 @@ def context(): """Test AdminRequestContext.""" context = AdminRequestContext.test_context() - context.update_settings({ - "plugin_config": { - "oid4vci": { - "endpoint": "http://localhost:8020", - "host": "0.0.0.0", - "port": 8020, + context.update_settings( + { + "plugin_config": { + "oid4vci": { + "endpoint": "http://localhost:8020", + "host": "0.0.0.0", + "port": 8020, + } } } - }) + ) yield context diff --git a/oid4vci/oid4vci/tests/routes/test_admin.py b/oid4vci/oid4vci/tests/routes/test_admin.py index 33d9ac888..b82c3a04a 100644 --- a/oid4vci/oid4vci/tests/routes/test_admin.py +++ b/oid4vci/oid4vci/tests/routes/test_admin.py @@ -1,9 +1,10 @@ -from aiohttp import web from unittest.mock import AsyncMock -from aries_cloudagent.admin.request_context import AdminRequestContext -from oid4vci import routes as test_module + import pytest +from aiohttp import web +from aries_cloudagent.admin.request_context import AdminRequestContext +from oid4vci import routes as test_module from oid4vci.models.supported_cred import SupportedCredential diff --git a/oid4vci/oid4vci/tests/routes/test_public_routes.py b/oid4vci/oid4vci/tests/routes/test_public_routes.py index 756377a2e..f1da15570 100644 --- a/oid4vci/oid4vci/tests/routes/test_public_routes.py +++ b/oid4vci/oid4vci/tests/routes/test_public_routes.py @@ -1,9 +1,9 @@ from unittest.mock import patch +import pytest from aiohttp import web from aries_cloudagent.admin.request_context import AdminRequestContext from aries_cloudagent.core.profile import Profile -import pytest from oid4vci import public_routes as test_module diff --git a/plugin_globals/integration/tests/test_example.py b/plugin_globals/integration/tests/test_example.py index b24186106..64a59b3e0 100644 --- a/plugin_globals/integration/tests/test_example.py +++ b/plugin_globals/integration/tests/test_example.py @@ -1,7 +1,8 @@ -import pytest import time -from . import Agent, BOB, ALICE +import pytest + +from . import ALICE, BOB, Agent @pytest.fixture(scope="session") diff --git a/redis_events/integration/tests/conftest.py b/redis_events/integration/tests/conftest.py index 432aa31cb..074c45f5b 100644 --- a/redis_events/integration/tests/conftest.py +++ b/redis_events/integration/tests/conftest.py @@ -6,6 +6,5 @@ @pytest.fixture def redis(): - redis = RedisCluster.from_url( - url="redis://default:test1234@172.28.0.103:6379") + redis = RedisCluster.from_url(url="redis://default:test1234@172.28.0.103:6379") yield redis diff --git a/redis_events/integration/tests/test_events.py b/redis_events/integration/tests/test_events.py index f136d557b..78b79691e 100644 --- a/redis_events/integration/tests/test_events.py +++ b/redis_events/integration/tests/test_events.py @@ -1,10 +1,11 @@ """Basic Message Tests""" + import json import time import pytest -from . import FABER, ALICE, RELAY, Agent +from . import ALICE, FABER, RELAY, Agent @pytest.fixture(scope="session") @@ -62,23 +63,25 @@ async def test_base_redis_keys_are_set(redis): @pytest.mark.asyncio -async def test_outbound_queue_removes_messages_from_queue_and_deliver_sends_them(faber: Agent, established_connection: str, redis): +async def test_outbound_queue_removes_messages_from_queue_and_deliver_sends_them( + faber: Agent, established_connection: str, redis +): faber.send_message(established_connection, "Hello Alice") faber.send_message(established_connection, "Another Alice") - messages = faber.retrieve_basicmessages()['results'] - assert "Hello Alice" in (msg['content'] for msg in messages) - assert "Another Alice" in (msg['content'] for msg in messages) + messages = faber.retrieve_basicmessages()["results"] + assert "Hello Alice" in (msg["content"] for msg in messages) + assert "Another Alice" in (msg["content"] for msg in messages) @pytest.mark.asyncio async def test_deliverer_pulls_messages_from_queue_and_sends_them( - faber: Agent, - established_connection: str, - redis + faber: Agent, established_connection: str, redis ): test_msg = "eyJjb250ZW50IjogInRlc3QtbXNnIn0=" # {"content": "test-msg"} outbound_msg = { - "service": {"url": f"{faber.url}/connections/{established_connection}/send-message"}, + "service": { + "url": f"{faber.url}/connections/{established_connection}/send-message" + }, "payload": test_msg, } await redis.rpush( @@ -87,9 +90,8 @@ async def test_deliverer_pulls_messages_from_queue_and_sends_them( ) time.sleep(5) - messages = faber.retrieve_basicmessages()['results'] - matching_msgs = [ - msg for msg in messages if msg['content'] == "test-msg"] + messages = faber.retrieve_basicmessages()["results"] + matching_msgs = [msg for msg in messages if msg["content"] == "test-msg"] assert matching_msgs.__len__() == 2 # 1 for sent, 1 for received assert await redis.lrange("acapy_outbound", 0, -1) == [] @@ -123,7 +125,7 @@ async def test_deliverer_retry_on_failure( msg = await redis.blpop("acapy_outbound", 10) assert msg # check for manual commit of previous message by handling a new message - faber.send_message(established_connection, 'test-failed-msg') + faber.send_message(established_connection, "test-failed-msg") msg_received = False retry_pop_count = 0 while not msg_received: @@ -135,5 +137,6 @@ async def test_deliverer_retry_on_failure( time.sleep(1) msg_received = True - assert "test-failed-msg" in (msg['content'] - for msg in faber.retrieve_basicmessages()['results']) + assert "test-failed-msg" in ( + msg["content"] for msg in faber.retrieve_basicmessages()["results"] + ) diff --git a/redis_events/integration/tests/test_redis_basic.py b/redis_events/integration/tests/test_redis_basic.py index f8391a27d..2e4629144 100644 --- a/redis_events/integration/tests/test_redis_basic.py +++ b/redis_events/integration/tests/test_redis_basic.py @@ -1,4 +1,5 @@ """Testing basic redis stuff.""" + import pytest diff --git a/redis_events/redis_events/v1_0/redis_queue/config.py b/redis_events/redis_events/v1_0/redis_queue/config.py index 5eebb6960..5c1a428da 100755 --- a/redis_events/redis_events/v1_0/redis_queue/config.py +++ b/redis_events/redis_events/v1_0/redis_queue/config.py @@ -2,8 +2,8 @@ import logging from typing import Any, Mapping, Optional -from pydantic import BaseModel, validator +from pydantic import BaseModel, validator LOGGER = logging.getLogger(__name__) @@ -43,6 +43,7 @@ def _alias_generator(key: str) -> str: class NoneDefaultModel(BaseModel): """Pydantic model that allows None as a default value.""" + @validator("*", pre=True) def not_none(cls, v, field): """If the value is None, return the default value.""" @@ -60,10 +61,12 @@ def not_none(cls, v, field): class ConnectionConfig(BaseModel): """Connection configuration model.""" + connection_url: str class Config: """Pydantic config.""" + alias_generator = _alias_generator allow_population_by_field_name = True @@ -75,12 +78,14 @@ def default(cls): class EventConfig(NoneDefaultModel): """Event configuration model.""" + event_topic_maps: Mapping[str, str] = EVENT_TOPIC_MAP event_webhook_topic_maps: Mapping[str, str] = EVENT_WEBHOOK_TOPIC_MAP deliver_webhook: bool = True class Config: """Pydantic config.""" + alias_generator = _alias_generator allow_population_by_field_name = True @@ -96,11 +101,13 @@ def default(cls): class InboundConfig(NoneDefaultModel): """Inbound configuration model.""" + acapy_inbound_topic: str = "acapy_inbound" acapy_direct_resp_topic: str = "acapy_inbound_direct_resp" class Config: """Pydantic config.""" + alias_generator = _alias_generator allow_population_by_field_name = True @@ -115,6 +122,7 @@ def default(cls): class OutboundConfig(NoneDefaultModel): """Outbound configuration model.""" + acapy_outbound_topic: str = "acapy_outbound" mediator_mode: bool = False @@ -129,6 +137,7 @@ def default(cls): class RedisConfig(BaseModel): """Redis configuration model.""" + event: Optional[EventConfig] inbound: Optional[InboundConfig] outbound: Optional[OutboundConfig] @@ -157,8 +166,7 @@ def process_config_dict(config_dict: dict) -> dict: def get_config(settings: Mapping[str, Any]) -> RedisConfig: """Retrieve producer configuration from settings.""" try: - LOGGER.debug("Constructing config from: %s", - settings.get("plugin_config")) + LOGGER.debug("Constructing config from: %s", settings.get("plugin_config")) config_dict = settings["plugin_config"].get("redis_queue", {}) LOGGER.debug("Retrieved: %s", config_dict) config_dict = process_config_dict(config_dict) @@ -168,6 +176,5 @@ def get_config(settings: Mapping[str, Any]) -> RedisConfig: config = RedisConfig.default() LOGGER.debug("Returning config: %s", config.json(indent=2)) - LOGGER.debug("Returning config(aliases): %s", - config.json(by_alias=True, indent=2)) + LOGGER.debug("Returning config(aliases): %s", config.json(by_alias=True, indent=2)) return config diff --git a/redis_events/redis_events/v1_0/redis_queue/inbound.py b/redis_events/redis_events/v1_0/redis_queue/inbound.py index 5bd2dd46e..0051252c4 100755 --- a/redis_events/redis_events/v1_0/redis_queue/inbound.py +++ b/redis_events/redis_events/v1_0/redis_queue/inbound.py @@ -1,9 +1,10 @@ """Inbound transport using Redis.""" + import asyncio import base64 import json -from json import JSONDecodeError import logging +from json import JSONDecodeError from typing import cast from uuid import uuid4 @@ -18,14 +19,10 @@ DIDCOMM_V1_MIME_TYPE, ) from redis.asyncio import RedisCluster -from redis.exceptions import RedisError, RedisClusterException - -from .utils import ( - curr_datetime_to_str, - b64_to_bytes, -) +from redis.exceptions import RedisClusterException, RedisError -from .config import get_config, InboundConfig, ConnectionConfig +from .config import ConnectionConfig, InboundConfig, get_config +from .utils import b64_to_bytes, curr_datetime_to_str LOGGER = logging.getLogger(__name__) @@ -105,8 +102,7 @@ async def start(self): await asyncio.sleep(1) retry_pop_count = retry_pop_count + 1 if retry_pop_count > 5: - raise InboundTransportError( - f"Unexpected exception: {err}") + raise InboundTransportError(f"Unexpected exception: {err}") if not msg: await asyncio.sleep(0.2) continue @@ -122,8 +118,7 @@ async def start(self): plugin_uid, curr_datetime_to_str().encode("utf-8"), ) - uid_recip_key = f"{plugin_uid.decode()}_{recip_key}".encode( - "utf-8") + uid_recip_key = f"{plugin_uid.decode()}_{recip_key}".encode("utf-8") enc_uid_recip_key_count = await self.redis.hget( "uid_recip_key_pending_msg_count", uid_recip_key ) @@ -152,13 +147,13 @@ async def start(self): if session.profile.settings.get( "emit_new_didcomm_mime_type" ): - response_data[ - "content-type" - ] = DIDCOMM_V1_MIME_TYPE + response_data["content-type"] = ( + DIDCOMM_V1_MIME_TYPE + ) else: - response_data[ - "content-type" - ] = DIDCOMM_V0_MIME_TYPE + response_data["content-type"] = ( + DIDCOMM_V0_MIME_TYPE + ) else: response_data["content-type"] = "application/json" response = response.encode("utf-8") @@ -174,8 +169,7 @@ async def start(self): str.encode(json.dumps(message)), ) except RedisError as err: - LOGGER.exception( - f"Unexpected exception: {err}") + LOGGER.exception(f"Unexpected exception: {err}") except (MessageParseError, WireFormatParseError): LOGGER.exception("Failed to process message") continue diff --git a/redis_events/redis_events/v1_0/redis_queue/outbound.py b/redis_events/redis_events/v1_0/redis_queue/outbound.py index eeea82b6d..e70694770 100755 --- a/redis_events/redis_events/v1_0/redis_queue/outbound.py +++ b/redis_events/redis_events/v1_0/redis_queue/outbound.py @@ -1,7 +1,7 @@ """Basic in memory queue.""" + import base64 import json - import logging from aries_cloudagent.core.profile import Profile @@ -14,14 +14,11 @@ DIDCOMM_V0_MIME_TYPE, DIDCOMM_V1_MIME_TYPE, ) - from redis.asyncio import RedisCluster -from redis.exceptions import RedisError, RedisClusterException +from redis.exceptions import RedisClusterException, RedisError -from .config import OutboundConfig, ConnectionConfig, get_config -from .utils import ( - process_payload_recip_key, -) +from .config import ConnectionConfig, OutboundConfig, get_config +from .utils import process_payload_recip_key LOGGER = logging.getLogger(__name__) diff --git a/redis_events/redis_events/v1_0/redis_queue/tests/test_events.py b/redis_events/redis_events/v1_0/redis_queue/tests/test_events.py index 6ed9f7671..7b3e977e8 100755 --- a/redis_events/redis_events/v1_0/redis_queue/tests/test_events.py +++ b/redis_events/redis_events/v1_0/redis_queue/tests/test_events.py @@ -1,24 +1,19 @@ -import redis import json +from copy import deepcopy -from aries_cloudagent.core.in_memory import InMemoryProfile -from aries_cloudagent.core.event_bus import EventWithMetadata, Event, MockEventBus +import redis +from aiohttp.test_utils import unused_port from aries_cloudagent.connections.models.connection_target import ConnectionTarget -from aries_cloudagent.transport.outbound.message import OutboundMessage +from aries_cloudagent.core.event_bus import Event, EventWithMetadata, MockEventBus +from aries_cloudagent.core.in_memory import InMemoryProfile from aries_cloudagent.transport.error import TransportError -from aiohttp.test_utils import unused_port -from asynctest import TestCase as AsyncTestCase, mock as async_mock -from copy import deepcopy +from aries_cloudagent.transport.outbound.message import OutboundMessage +from asynctest import TestCase as AsyncTestCase +from asynctest import mock as async_mock from redis.asyncio import RedisCluster -from ..events import ( - setup, - on_startup, - on_shutdown, - handle_event, - process_event_payload, -) from .. import events as test_module +from ..events import handle_event, on_shutdown, on_startup, process_event_payload, setup SETTINGS = { "plugin_config": { @@ -83,8 +78,7 @@ def setUp(self): async def test_setup(self): context = async_mock.MagicMock( - settings=SETTINGS, inject=async_mock.MagicMock( - return_value=MockEventBus()) + settings=SETTINGS, inject=async_mock.MagicMock(return_value=MockEventBus()) ) await setup(context) @@ -97,22 +91,19 @@ async def test_setup_x(self): async def test_on_startup(self): self.profile.settings["plugin_config"] = SETTINGS["plugin_config"] - test_event = Event( - "test_topic", {"rev_reg_id": "mock", "crids": ["mock"]}) + test_event = Event("test_topic", {"rev_reg_id": "mock", "crids": ["mock"]}) with async_mock.patch.object( redis.asyncio.RedisCluster, "from_url", async_mock.MagicMock( - return_value=async_mock.MagicMock( - ping=async_mock.CoroutineMock()) + return_value=async_mock.MagicMock(ping=async_mock.CoroutineMock()) ), ): await on_startup(self.profile, test_event) async def test_on_startup_x(self): self.profile.settings["plugin_config"] = SETTINGS["plugin_config"] - test_event = Event( - "test_topic", {"rev_reg_id": "mock", "crids": ["mock"]}) + test_event = Event("test_topic", {"rev_reg_id": "mock", "crids": ["mock"]}) with async_mock.patch.object( redis.asyncio.RedisCluster, "from_url", @@ -123,8 +114,7 @@ async def test_on_startup_x(self): async def test_on_shutddown(self): self.profile.settings["plugin_config"] = SETTINGS["plugin_config"] - test_event = Event( - "test_topic", {"rev_reg_id": "mock", "crids": ["mock"]}) + test_event = Event("test_topic", {"rev_reg_id": "mock", "crids": ["mock"]}) await on_shutdown(self.profile, test_event) async def test_handle_event(self): @@ -143,8 +133,7 @@ async def test_handle_event(self): }, topic="acapy::basicmessage::received", metadata=async_mock.MagicMock( - pattern=async_mock.MagicMock( - pattern="acapy::basicmessage::received") + pattern=async_mock.MagicMock(pattern="acapy::basicmessage::received") ), ) await handle_event(self.profile, test_event_with_metadata) @@ -169,8 +158,7 @@ async def test_handle_event(self): did="6tb9bVM3SzFRMRxoWJTvp1", endpoint="http://echo:3002", label="test-runner", - recipient_keys=[ - "4DBJ4Zp851gjzNtJmmoE97WqVrWN36yZRaGif4AGrxwQ"], + recipient_keys=["4DBJ4Zp851gjzNtJmmoE97WqVrWN36yZRaGif4AGrxwQ"], routing_keys=[], sender_key="4DBJ4Zp851gjzNtJmmoE97WqVrWN36yZRaGif4AGrxwQ", ) @@ -207,8 +195,7 @@ async def test_handle_event(self): did="6tb9bVM3SzFRMRxoWJTvp1", endpoint="http://echo:3002", label="test-runner", - recipient_keys=[ - "4DBJ4Zp851gjzNtJmmoE97WqVrWN36yZRaGif4AGrxwQ"], + recipient_keys=["4DBJ4Zp851gjzNtJmmoE97WqVrWN36yZRaGif4AGrxwQ"], routing_keys=[], sender_key="4DBJ4Zp851gjzNtJmmoE97WqVrWN36yZRaGif4AGrxwQ", ) @@ -248,8 +235,7 @@ async def test_handle_event_deliver_webhook(self): }, topic="acapy::basicmessage::received", metadata=async_mock.MagicMock( - pattern=async_mock.MagicMock( - pattern="acapy::basicmessage::received") + pattern=async_mock.MagicMock(pattern="acapy::basicmessage::received") ), ) await handle_event(self.profile, test_event_with_metadata) diff --git a/redis_events/redis_events/v1_0/redis_queue/tests/test_inbound.py b/redis_events/redis_events/v1_0/redis_queue/tests/test_inbound.py index 69f36bce1..18a5d2229 100755 --- a/redis_events/redis_events/v1_0/redis_queue/tests/test_inbound.py +++ b/redis_events/redis_events/v1_0/redis_queue/tests/test_inbound.py @@ -1,11 +1,13 @@ import base64 import json -import redis +import redis +from aiohttp.test_utils import unused_port from aries_cloudagent.core.in_memory import InMemoryProfile from aries_cloudagent.messaging.error import MessageParseError -from aiohttp.test_utils import unused_port -from asynctest import TestCase as AsyncTestCase, mock as async_mock, PropertyMock +from asynctest import PropertyMock +from asynctest import TestCase as AsyncTestCase +from asynctest import mock as async_mock from .. import inbound as test_inbound from ..inbound import RedisInboundTransport diff --git a/redis_events/redis_events/v1_0/redis_queue/tests/test_outbound.py b/redis_events/redis_events/v1_0/redis_queue/tests/test_outbound.py index 746ab6c34..b3fa6b416 100755 --- a/redis_events/redis_events/v1_0/redis_queue/tests/test_outbound.py +++ b/redis_events/redis_events/v1_0/redis_queue/tests/test_outbound.py @@ -6,9 +6,11 @@ import redis from aiohttp.test_utils import unused_port from aries_cloudagent.core.in_memory import InMemoryProfile -from aries_cloudagent.transport.outbound.base import (ConnectionTarget, - OutboundMessage, - QueuedOutboundMessage) +from aries_cloudagent.transport.outbound.base import ( + ConnectionTarget, + OutboundMessage, + QueuedOutboundMessage, +) from aries_cloudagent.transport.wire_format import BaseWireFormat from asynctest import PropertyMock from asynctest import TestCase as AsyncTestCase @@ -104,8 +106,7 @@ async def test_init(self): await redis_outbound_inst.stop() async def test_init_no_bind_instance(self): - RedisOutboundQueue.running = PropertyMock( - side_effect=[True, True, True, False]) + RedisOutboundQueue.running = PropertyMock(side_effect=[True, True, True, False]) with async_mock.patch.object( redis.asyncio.RedisCluster, "from_url", @@ -323,8 +324,7 @@ async def test_reassign_recip_key_to_uid(self): def test_recipients_from_packed_message(self): assert ( - ",".join(test_util._recipients_from_packed_message( - TEST_PAYLOAD_BYTES)) + ",".join(test_util._recipients_from_packed_message(TEST_PAYLOAD_BYTES)) == "BDg8S6gkvnwDB75v5royCE1XrWn42Spx885aV7cxaNJL" ) @@ -357,8 +357,7 @@ async def test_handle_message_x(self): self.profile.context.injector.bind_instance( redis.asyncio.RedisCluster, async_mock.MagicMock( - rpush=async_mock.CoroutineMock( - side_effect=redis.exceptions.RedisError), + rpush=async_mock.CoroutineMock(side_effect=redis.exceptions.RedisError), ), ) redis_outbound_inst = RedisOutboundQueue( @@ -374,8 +373,7 @@ async def test_handle_message(self): self.profile.context.injector.bind_instance( redis.asyncio.RedisCluster, async_mock.MagicMock( - rpush=async_mock.CoroutineMock( - side_effect=redis.exceptions.RedisError), + rpush=async_mock.CoroutineMock(side_effect=redis.exceptions.RedisError), ), ) with async_mock.patch.object( @@ -389,9 +387,7 @@ async def test_handle_message(self): ) ), ): - redis_outbound_inst = RedisOutboundQueue( - root_profile=self.profile - ) + redis_outbound_inst = RedisOutboundQueue(root_profile=self.profile) q_out_msg = QueuedOutboundMessage( profile=self.profile, message=OutboundMessage(payload="test-message"), @@ -435,9 +431,7 @@ async def test_handle_message_mediator(self): ) ), ): - redis_outbound_inst = RedisOutboundQueue( - root_profile=self.profile - ) + redis_outbound_inst = RedisOutboundQueue(root_profile=self.profile) q_out_msg = QueuedOutboundMessage( profile=self.profile, message=OutboundMessage(payload="test-message"), @@ -621,10 +615,8 @@ def test_get_config(self): test_redis_config = test_config.get_config(SETTINGS) assert isinstance(test_redis_config.event, test_config.EventConfig) assert isinstance(test_redis_config.inbound, test_config.InboundConfig) - assert isinstance(test_redis_config.outbound, - test_config.OutboundConfig) - assert isinstance(test_redis_config.connection, - test_config.ConnectionConfig) + assert isinstance(test_redis_config.outbound, test_config.OutboundConfig) + assert isinstance(test_redis_config.connection, test_config.ConnectionConfig) assert test_redis_config.event.event_topic_maps == { "^acapy::webhook::(.*)$": "acapy-webhook-$wallet_id", diff --git a/redis_events/redis_events/v1_0/redis_queue/utils.py b/redis_events/redis_events/v1_0/redis_queue/utils.py index 618dfbd0e..dc9809c2c 100755 --- a/redis_events/redis_events/v1_0/redis_queue/utils.py +++ b/redis_events/redis_events/v1_0/redis_queue/utils.py @@ -1,12 +1,13 @@ """Utilities for redis_queue module.""" + import asyncio -import datetime import base64 +import datetime import json import logging +from typing import List, Union from redis.asyncio import RedisCluster -from typing import Union, List LOGGER = logging.getLogger(__name__) @@ -47,8 +48,7 @@ def _recipients_from_packed_message(packed_message: bytes) -> List[str]: except Exception as err: raise ValueError("Invalid packed message") from err - recips_json = b64_to_bytes( - wrapper["protected"], urlsafe=True).decode("ascii") + recips_json = b64_to_bytes(wrapper["protected"], urlsafe=True).decode("ascii") try: recips_outer = json.loads(recips_json) except Exception as err: @@ -161,7 +161,7 @@ async def reassign_recip_key_to_uid( async def process_payload_recip_key( redis: RedisCluster, payload: Union[str, bytes], topic: str ): - """Process payload and recip_key and return topic and message.""""" + """Process payload and recip_key and return topic and message.""" "" recip_key_in = ",".join(_recipients_from_packed_message(payload)) recip_key_in_encoded = recip_key_in.encode() message = str.encode( @@ -180,8 +180,7 @@ async def process_payload_recip_key( if not last_accessed_map_value: stale_uid_check = True elif last_accessed_map_value and ( - get_timedelta_seconds(str_to_datetime( - last_accessed_map_value.decode())) >= 15 + get_timedelta_seconds(str_to_datetime(last_accessed_map_value.decode())) >= 15 ): stale_uid_check = True if stale_uid_check: diff --git a/redis_events/redis_events/v1_0/services/deliverer/__init__.py b/redis_events/redis_events/v1_0/services/deliverer/__init__.py index c79dc8e92..6bb7deb47 100755 --- a/redis_events/redis_events/v1_0/services/deliverer/__init__.py +++ b/redis_events/redis_events/v1_0/services/deliverer/__init__.py @@ -7,6 +7,7 @@ class NoneDefaultModel(BaseModel): """Pydantic model that allows None as a default value.""" + @validator("*", pre=True) def not_none(cls, v, field): """If the value is None, return the default value.""" @@ -24,8 +25,10 @@ def not_none(cls, v, field): class RedisQueuePayload(NoneDefaultModel): """Base class for payloads that are sent to the Redis queue.""" + class Config: """Pydantic config.""" + json_encoders = {bytes: lambda v: base64.urlsafe_b64encode(v).decode()} @classmethod @@ -41,11 +44,13 @@ def to_bytes(self) -> bytes: class Service(BaseModel): """Service model.""" + url: str class OutboundPayload(RedisQueuePayload): """Payload to be sent from the Redis queue.""" + service: Service payload: bytes headers: dict = {} @@ -60,7 +65,7 @@ def __init__(self, **data): @validator("payload", pre=True) @classmethod def decode_payload_to_bytes(cls, v): - """Decode payload model to bytes.""""" + """Decode payload model to bytes.""" "" assert isinstance(v, str) return base64.urlsafe_b64decode(v) diff --git a/redis_events/redis_events/v1_0/services/deliverer/deliver.py b/redis_events/redis_events/v1_0/services/deliverer/deliver.py index 986660dc6..29bfe94f2 100755 --- a/redis_events/redis_events/v1_0/services/deliverer/deliver.py +++ b/redis_events/redis_events/v1_0/services/deliverer/deliver.py @@ -1,16 +1,17 @@ """Redis Outbound Delivery Service.""" -import aiohttp + import asyncio import base64 +import json import logging import signal -import json - from contextlib import suppress -from redis.asyncio import RedisCluster -from redis.exceptions import RedisError, RedisClusterException -from time import time from os import getenv +from time import time + +import aiohttp +from redis.asyncio import RedisCluster +from redis.exceptions import RedisClusterException, RedisError from redis_events.v1_0.status_endpoint.status_endpoints import ( start_status_endpoints_server, @@ -125,8 +126,7 @@ async def process_delivery(self): } ) else: - logging.error( - f"Exceeded max retries for {str(endpoint)}") + logging.error(f"Exceeded max retries for {str(endpoint)}") else: logging.info(f"Message dispatched to {endpoint}") elif endpoint_scheme == "ws": @@ -169,8 +169,7 @@ async def add_retry(self, message: dict): zadd_sent = True except (RedisError, RedisClusterException) as err: await asyncio.sleep(1) - logging.exception( - f"Unexpected redis client exception (zadd): {err}") + logging.exception(f"Unexpected redis client exception (zadd): {err}") async def process_retries(self): """Process retries.""" diff --git a/redis_events/redis_events/v1_0/services/deliverer/tests/__init__.py b/redis_events/redis_events/v1_0/services/deliverer/tests/__init__.py index b7be57fa8..28e979528 100755 --- a/redis_events/redis_events/v1_0/services/deliverer/tests/__init__.py +++ b/redis_events/redis_events/v1_0/services/deliverer/tests/__init__.py @@ -1,2 +1,3 @@ import sys -sys.path.append('.') \ No newline at end of file + +sys.path.append(".") diff --git a/redis_events/redis_events/v1_0/services/deliverer/tests/test_deliver.py b/redis_events/redis_events/v1_0/services/deliverer/tests/test_deliver.py index 48c919ba6..bfdf6bff5 100755 --- a/redis_events/redis_events/v1_0/services/deliverer/tests/test_deliver.py +++ b/redis_events/redis_events/v1_0/services/deliverer/tests/test_deliver.py @@ -1,14 +1,16 @@ import asyncio -import aiohttp import base64 -import redis +import json import os import string -import json - -from asynctest import TestCase as AsyncTestCase, mock as async_mock, PropertyMock from time import time +import aiohttp +import redis +from asynctest import PropertyMock +from asynctest import TestCase as AsyncTestCase +from asynctest import mock as async_mock + from .. import deliver as test_module from ..deliver import Deliverer, main diff --git a/redis_events/redis_events/v1_0/services/relay/relay.py b/redis_events/redis_events/v1_0/services/relay/relay.py index b12cbf030..86a48a7ea 100644 --- a/redis_events/redis_events/v1_0/services/relay/relay.py +++ b/redis_events/redis_events/v1_0/services/relay/relay.py @@ -1,4 +1,5 @@ """Inbound relay service agent.""" + import asyncio import base64 import json @@ -12,10 +13,10 @@ from redis.asyncio import RedisCluster from redis.exceptions import RedisClusterException, RedisError -from redis_events.v1_0.redis_queue.utils import (b64_to_bytes, - process_payload_recip_key) -from redis_events.v1_0.status_endpoint.status_endpoints import \ - start_status_endpoints_server +from redis_events.v1_0.redis_queue.utils import b64_to_bytes, process_payload_recip_key +from redis_events.v1_0.status_endpoint.status_endpoints import ( + start_status_endpoints_server, +) logging.basicConfig( format="%(asctime)s | %(levelname)s: %(message)s", @@ -75,8 +76,7 @@ async def process_direct_responses(self): msg_received = True except (RedisError, RedisClusterException) as err: await asyncio.sleep(1) - logging.exception( - f"Unexpected redis client exception: {err}") + logging.exception(f"Unexpected redis client exception: {err}") if not msg: await asyncio.sleep(0.2) continue @@ -124,8 +124,7 @@ async def start(self): app.add_routes([web.get("/", self.message_handler)]) runner = web.AppRunner(app) await runner.setup() - self.site = web.TCPSite( - runner, host=self.site_host, port=self.site_port) + self.site = web.TCPSite(runner, host=self.site_host, port=self.site_port) await self.site.start() async def message_handler(self, request): @@ -151,8 +150,7 @@ async def message_handler(self, request): direct_response_request = False transport_dec = message_dict.get("~transport") if transport_dec: - direct_response_mode = transport_dec.get( - "return_route") + direct_response_mode = transport_dec.get("return_route") if direct_response_mode and direct_response_mode != "none": direct_response_request = True txn_id = str(uuid4()) @@ -266,8 +264,7 @@ async def start(self): app.add_routes([web.post("/", self.message_handler)]) runner = web.AppRunner(app) await runner.setup() - self.site = web.TCPSite( - runner, host=self.site_host, port=self.site_port) + self.site = web.TCPSite(runner, host=self.site_host, port=self.site_port) await self.site.start() async def invite_handler(self, request): @@ -320,8 +317,7 @@ async def message_handler(self, request): response_sent = True except (RedisError, RedisClusterException) as err: await asyncio.sleep(1) - logging.exception( - f"Unexpected redis client exception: {err}") + logging.exception(f"Unexpected redis client exception: {err}") try: response_data = await asyncio.wait_for( self.get_direct_responses( @@ -367,8 +363,7 @@ async def message_handler(self, request): msg_sent = True except (RedisError, RedisClusterException) as err: await asyncio.sleep(1) - logging.exception( - f"Unexpected redis client exception: {err}") + logging.exception(f"Unexpected redis client exception: {err}") return web.Response(status=200) diff --git a/redis_events/redis_events/v1_0/services/relay/tests/__init__.py b/redis_events/redis_events/v1_0/services/relay/tests/__init__.py index b7be57fa8..28e979528 100644 --- a/redis_events/redis_events/v1_0/services/relay/tests/__init__.py +++ b/redis_events/redis_events/v1_0/services/relay/tests/__init__.py @@ -1,2 +1,3 @@ import sys -sys.path.append('.') \ No newline at end of file + +sys.path.append(".") diff --git a/redis_events/redis_events/v1_0/services/relay/tests/test_relay.py b/redis_events/redis_events/v1_0/services/relay/tests/test_relay.py index c20f1490d..2c9800ed7 100644 --- a/redis_events/redis_events/v1_0/services/relay/tests/test_relay.py +++ b/redis_events/redis_events/v1_0/services/relay/tests/test_relay.py @@ -1,15 +1,16 @@ -import aiohttp -import os import json -import redis +import os -from asynctest import TestCase as AsyncTestCase, mock as async_mock, PropertyMock +import aiohttp +import redis +from asynctest import PropertyMock +from asynctest import TestCase as AsyncTestCase +from asynctest import mock as async_mock from .. import relay as test_module from ..relay import HttpRelay, WSRelay -test_retry_msg_a = str.encode(json.dumps( - ["invalid", "list", "require", "dict"])) +test_retry_msg_a = str.encode(json.dumps(["invalid", "list", "require", "dict"])) test_retry_msg_b = str.encode( json.dumps( { @@ -148,8 +149,7 @@ async def test_main_x(self): redis.asyncio.RedisCluster, "from_url", async_mock.MagicMock( - ping=async_mock.CoroutineMock( - side_effect=redis.exceptions.RedisError) + ping=async_mock.CoroutineMock(side_effect=redis.exceptions.RedisError) ), ) as mock_redis, async_mock.patch.object( HttpRelay, "start", async_mock.CoroutineMock() @@ -194,8 +194,7 @@ async def test_stop(self): service = HttpRelay( "test", "test", "8080", "direct_resp_topic", "inbound_msg_topic" ) - service.site = async_mock.MagicMock( - stop=async_mock.CoroutineMock()) + service.site = async_mock.MagicMock(stop=async_mock.CoroutineMock()) service.redis = mock_redis await service.stop() @@ -249,8 +248,7 @@ async def test_process_direct_response(self): ] ) mock_redis.ping = async_mock.CoroutineMock() - sentinel = PropertyMock( - side_effect=[True, True, True, True, True, False]) + sentinel = PropertyMock(side_effect=[True, True, True, True, True, False]) HttpRelay.running = sentinel service = HttpRelay( "test", "test", "8080", "direct_resp_topic", "inbound_msg_topic" @@ -317,8 +315,7 @@ async def test_message_handler(self): test_module, "process_payload_recip_key", async_mock.CoroutineMock( - return_value=("acapy_inbound_input_recip_key", - async_mock.MagicMock()) + return_value=("acapy_inbound_input_recip_key", async_mock.MagicMock()) ), ): service = HttpRelay( @@ -345,8 +342,7 @@ async def test_message_handler(self): test_module, "process_payload_recip_key", async_mock.CoroutineMock( - return_value=("acapy_inbound_input_recip_key", - async_mock.MagicMock()) + return_value=("acapy_inbound_input_recip_key", async_mock.MagicMock()) ), ): service = HttpRelay( @@ -372,8 +368,7 @@ async def test_message_handler(self): with async_mock.patch.object( HttpRelay, "get_direct_responses", - async_mock.CoroutineMock( - side_effect=test_module.asyncio.TimeoutError), + async_mock.CoroutineMock(side_effect=test_module.asyncio.TimeoutError), ), async_mock.patch.object( redis.asyncio.RedisCluster, "from_url", @@ -382,8 +377,7 @@ async def test_message_handler(self): test_module, "process_payload_recip_key", async_mock.CoroutineMock( - return_value=("acapy_inbound_input_recip_key", - async_mock.MagicMock()) + return_value=("acapy_inbound_input_recip_key", async_mock.MagicMock()) ), ): service = HttpRelay( @@ -413,8 +407,7 @@ async def test_message_handler_x(self): with async_mock.patch.object( HttpRelay, "get_direct_responses", - async_mock.CoroutineMock( - side_effect=test_module.asyncio.TimeoutError), + async_mock.CoroutineMock(side_effect=test_module.asyncio.TimeoutError), ), async_mock.patch.object( redis.asyncio.RedisCluster, "from_url", @@ -423,8 +416,7 @@ async def test_message_handler_x(self): test_module, "process_payload_recip_key", async_mock.CoroutineMock( - return_value=("acapy_inbound_input_recip_key", - async_mock.MagicMock()) + return_value=("acapy_inbound_input_recip_key", async_mock.MagicMock()) ), ): service = HttpRelay( @@ -512,8 +504,7 @@ async def test_is_running(self): "test", "test", "8080", "direct_resp_topic", "inbound_msg_topic" ) mock_redis = async_mock.MagicMock( - ping=async_mock.CoroutineMock( - side_effect=redis.exceptions.RedisError) + ping=async_mock.CoroutineMock(side_effect=redis.exceptions.RedisError) ) service.redis = mock_redis service.running = True @@ -569,8 +560,7 @@ async def test_stop(self): service = WSRelay( "test", "test", "8080", "direct_resp_topic", "inbound_msg_topic" ) - service.site = async_mock.MagicMock( - stop=async_mock.CoroutineMock()) + service.site = async_mock.MagicMock(stop=async_mock.CoroutineMock()) service.redis = mock_redis await service.stop() @@ -619,8 +609,7 @@ async def test_process_direct_response(self): ] ) mock_redis.ping = async_mock.CoroutineMock() - sentinel = PropertyMock( - side_effect=[True, True, True, True, False]) + sentinel = PropertyMock(side_effect=[True, True, True, True, False]) WSRelay.running = sentinel service = WSRelay( "test", "test", "8080", "direct_resp_topic", "inbound_msg_topic" @@ -676,8 +665,7 @@ async def test_message_handler_a(self): mock_msg = async_mock.MagicMock( type=aiohttp.WSMsgType.TEXT.value, data=str.encode( - json.dumps({"test": "....", "~transport": { - "return_route": "..."}}) + json.dumps({"test": "....", "~transport": {"return_route": "..."}}) ), ) @@ -737,8 +725,7 @@ async def test_message_handler_a(self): test_module, "process_payload_recip_key", async_mock.CoroutineMock( - return_value=("acapy_inbound_input_recip_key", - async_mock.MagicMock()) + return_value=("acapy_inbound_input_recip_key", async_mock.MagicMock()) ), ): mock_get_direct_responses.return_value = { @@ -760,8 +747,7 @@ async def test_message_handler_b(self): ) mock_msg = async_mock.MagicMock( type=aiohttp.WSMsgType.TEXT.value, - data=json.dumps( - {"test": "....", "~transport": {"return_route": "..."}}), + data=json.dumps({"test": "....", "~transport": {"return_route": "..."}}), ) with async_mock.patch.object( @@ -820,8 +806,7 @@ async def test_message_handler_b(self): test_module, "process_payload_recip_key", async_mock.CoroutineMock( - return_value=("acapy_inbound_input_recip_key", - async_mock.MagicMock()) + return_value=("acapy_inbound_input_recip_key", async_mock.MagicMock()) ), ): mock_get_direct_responses.return_value = { @@ -902,8 +887,7 @@ async def test_message_handler_c(self): test_module, "process_payload_recip_key", async_mock.CoroutineMock( - return_value=("acapy_inbound_input_recip_key", - async_mock.MagicMock()) + return_value=("acapy_inbound_input_recip_key", async_mock.MagicMock()) ), ): service = WSRelay( @@ -922,8 +906,7 @@ async def test_message_handler_x(self): ) mock_msg = async_mock.MagicMock( type=aiohttp.WSMsgType.TEXT.value, - data=json.dumps( - {"test": "....", "~transport": {"return_route": "..."}}), + data=json.dumps({"test": "....", "~transport": {"return_route": "..."}}), ) with async_mock.patch.object( @@ -982,8 +965,7 @@ async def test_message_handler_x(self): test_module, "process_payload_recip_key", async_mock.CoroutineMock( - return_value=("acapy_inbound_input_recip_key", - async_mock.MagicMock()) + return_value=("acapy_inbound_input_recip_key", async_mock.MagicMock()) ), ): mock_get_direct_responses.side_effect = test_module.asyncio.TimeoutError @@ -998,8 +980,7 @@ async def test_message_handler_x(self): mock_msg = async_mock.MagicMock( type=aiohttp.WSMsgType.TEXT.value, - data=json.dumps( - {"test": "....", "~transport": {"return_route": "..."}}), + data=json.dumps({"test": "....", "~transport": {"return_route": "..."}}), ) with async_mock.patch.object( @@ -1058,8 +1039,7 @@ async def test_message_handler_x(self): test_module, "process_payload_recip_key", async_mock.CoroutineMock( - return_value=("acapy_inbound_input_recip_key", - async_mock.MagicMock()) + return_value=("acapy_inbound_input_recip_key", async_mock.MagicMock()) ), ): mock_get_direct_responses.return_value = { @@ -1133,8 +1113,7 @@ async def test_message_handler_x(self): test_module, "process_payload_recip_key", async_mock.CoroutineMock( - return_value=("acapy_inbound_input_recip_key", - async_mock.MagicMock()) + return_value=("acapy_inbound_input_recip_key", async_mock.MagicMock()) ), ): service = WSRelay( @@ -1203,8 +1182,7 @@ async def test_message_handler_x(self): test_module, "process_payload_recip_key", async_mock.CoroutineMock( - return_value=("acapy_inbound_input_recip_key", - async_mock.MagicMock()) + return_value=("acapy_inbound_input_recip_key", async_mock.MagicMock()) ), ): service = WSRelay( @@ -1277,8 +1255,7 @@ async def test_message_handler_x(self): test_module, "process_payload_recip_key", async_mock.CoroutineMock( - return_value=("acapy_inbound_input_recip_key", - async_mock.MagicMock()) + return_value=("acapy_inbound_input_recip_key", async_mock.MagicMock()) ), ): service = WSRelay( @@ -1322,8 +1299,7 @@ async def test_is_running(self): "test", "test", "8080", "direct_resp_topic", "inbound_msg_topic" ) mock_redis = async_mock.MagicMock( - ping=async_mock.CoroutineMock( - side_effect=redis.exceptions.RedisError) + ping=async_mock.CoroutineMock(side_effect=redis.exceptions.RedisError) ) service.redis = mock_redis service.running = True diff --git a/redis_events/redis_events/v1_0/status_endpoint/status_endpoints.py b/redis_events/redis_events/v1_0/status_endpoint/status_endpoints.py index c4316afd8..7caba221a 100644 --- a/redis_events/redis_events/v1_0/status_endpoint/status_endpoints.py +++ b/redis_events/redis_events/v1_0/status_endpoint/status_endpoints.py @@ -1,9 +1,10 @@ """FastAPI status endpoints for readiness and liveness checks.""" + import logging -import uvicorn -import nest_asyncio -from fastapi import Security, Depends, APIRouter, HTTPException +import nest_asyncio +import uvicorn +from fastapi import APIRouter, Depends, HTTPException, Security from fastapi.security.api_key import APIKeyHeader nest_asyncio.apply() diff --git a/repo_manager.py b/repo_manager.py index 23ed88c0d..c3cbcccc3 100644 --- a/repo_manager.py +++ b/repo_manager.py @@ -6,7 +6,7 @@ from enum import Enum from typing import Optional -GLOBAL_PLUGIN_DIR = 'plugin_globals' +GLOBAL_PLUGIN_DIR = "plugin_globals" class PluginInfo: @@ -22,96 +22,100 @@ def __init__( class MangagedPoetrySections(str, Enum): - META = '[tool.poetry]' - DEPS = '[tool.poetry.dependencies]' - DEV_DEPS = '[tool.poetry.dev-dependencies]' - INT_DEPS = '[tool.poetry.group.integration.dependencies]' - RUFF = '[tool.ruff]' - RUFF_LINT = '[tool.ruff.lint]' - RUFF_FILES = '[tool.ruff.per-file-ignores]' - PYTEST = '[tool.pytest.ini_options]' - COVERAGE = '[tool.coverage.run]' - COVERAGE_REPORT = '[tool.coverage.report]' - COVERAGE_XML = '[tool.coverage.xml]' - BUILD = '[build-system]' - EXTRAS = '[tool.poetry.extras]' + META = "[tool.poetry]" + DEPS = "[tool.poetry.dependencies]" + DEV_DEPS = "[tool.poetry.dev-dependencies]" + INT_DEPS = "[tool.poetry.group.integration.dependencies]" + RUFF = "[tool.ruff]" + RUFF_LINT = "[tool.ruff.lint]" + RUFF_FILES = "[tool.ruff.per-file-ignores]" + PYTEST = "[tool.pytest.ini_options]" + COVERAGE = "[tool.coverage.run]" + COVERAGE_REPORT = "[tool.coverage.report]" + COVERAGE_XML = "[tool.coverage.xml]" + BUILD = "[build-system]" + EXTRAS = "[tool.poetry.extras]" sections = { - 'META': [], - 'DEPS': [], - 'DEV_DEPS': [], - 'INT_DEPS': [], - 'RUFF': [], - 'RUFF_LINT': [], - 'RUFF_FILES': [], - 'PYTEST': [], - 'COVERAGE': [], - 'COVERAGE_REPORT': [], - 'COVERAGE_XML': [], - 'BUILD': [], - 'EXTRAS': [] + "META": [], + "DEPS": [], + "DEV_DEPS": [], + "INT_DEPS": [], + "RUFF": [], + "RUFF_LINT": [], + "RUFF_FILES": [], + "PYTEST": [], + "COVERAGE": [], + "COVERAGE_REPORT": [], + "COVERAGE_XML": [], + "BUILD": [], + "EXTRAS": [], } class NEW_PLUGIN_FOLDERS(Enum): - DOCKER = 'docker' - INTEGRATION = 'integration' - DEVCONTAINER = '.devcontainer' - VSCODE = '.vscode' + DOCKER = "docker" + INTEGRATION = "integration" + DEVCONTAINER = ".devcontainer" + VSCODE = ".vscode" class NEW_PLUGIN_FILES(Enum): - PYPROJECT = 'pyproject.toml' - README = 'README.md' - DEFINITION = 'definition.py' + PYPROJECT = "pyproject.toml" + README = "README.md" + DEFINITION = "definition.py" class TAGGED_FILES(Enum): - DOCKER_DEFAULT = 'docker/default.yml' - DOCKERFILE = 'docker/Dockerfile' - DOCKER_INTEGRATION = 'docker/integration.yml' - PYPROJECT = 'pyproject.toml' - PYPROJECT_INTEGRATION = 'integration/pyproject.toml' - DEVCONTAINER = '.devcontainer/devcontainer.json' - VSCODE = '.vscode/launch.json' + DOCKER_DEFAULT = "docker/default.yml" + DOCKERFILE = "docker/Dockerfile" + DOCKER_INTEGRATION = "docker/integration.yml" + PYPROJECT = "pyproject.toml" + PYPROJECT_INTEGRATION = "integration/pyproject.toml" + DEVCONTAINER = ".devcontainer/devcontainer.json" + VSCODE = ".vscode/launch.json" def replace_plugin_tag(path: str, info: PluginInfo): - with open(path, 'r') as file: + with open(path, "r") as file: filedata = file.read() filedata = filedata.replace(GLOBAL_PLUGIN_DIR, info.name) - with open(path, 'w') as file: + with open(path, "w") as file: file.write(filedata) def copy_all_common_files_for_new_plugin(info: PluginInfo) -> None: for folder in list(NEW_PLUGIN_FOLDERS): shutil.copytree( - f'./{GLOBAL_PLUGIN_DIR}/{folder.value}', - f'./{info.name}/{folder.value}' + f"./{GLOBAL_PLUGIN_DIR}/{folder.value}", f"./{info.name}/{folder.value}" ) for file in list(NEW_PLUGIN_FILES): - file_location = f'./{info.name}/{file.value}' if not file == NEW_PLUGIN_FILES.DEFINITION else f'./{info.name}/{info.name}/{file.value}' - shutil.copyfile( - f'./{GLOBAL_PLUGIN_DIR}/{file.value}', - file_location + file_location = ( + f"./{info.name}/{file.value}" + if not file == NEW_PLUGIN_FILES.DEFINITION + else f"./{info.name}/{info.name}/{file.value}" ) + shutil.copyfile(f"./{GLOBAL_PLUGIN_DIR}/{file.value}", file_location) for file in list(TAGGED_FILES): - replace_plugin_tag( - f'./{info.name}/{file.value}', info) + replace_plugin_tag(f"./{info.name}/{file.value}", info) def combine_dependenices(plugin_dependencies, global_dependencies) -> None: """Add the plugin dependencies to the global dependencies if they are plugin specific.""" for p_dep in plugin_dependencies: - if (p_dep.split('=')[0].strip() not in [g_dep.split('=')[0].strip() for g_dep in global_dependencies]): + if p_dep.split("=")[0].strip() not in [ + g_dep.split("=")[0].strip() for g_dep in global_dependencies + ]: global_dependencies.append(p_dep) def is_end_of_section(line: str, current_section: str) -> bool: str_line = line.strip() - return str_line in [section.value for section in MangagedPoetrySections] and str_line != current_section + return ( + str_line in [section.value for section in MangagedPoetrySections] + and str_line != current_section + ) def get_section(i: int, filedata: list, arr: list, current_section: str) -> int: @@ -121,40 +125,41 @@ def get_section(i: int, filedata: list, arr: list, current_section: str) -> int: arr.append(filedata[j]) j += 1 # Remove the last empty line - if arr[-1] == '': + if arr[-1] == "": arr.pop() return j - i def extract_common_sections(filedata: str, sections: dict) -> None: """Go through the file by line and extract the section into the sections object.""" - filedata = filedata.split('\n') + filedata = filedata.split("\n") for i in range(len(filedata)): line = filedata[i] for section in MangagedPoetrySections: if line.startswith(section.value): - i += get_section(i + 1, filedata, - sections[section.name], section.value) + i += get_section(i + 1, filedata, sections[section.name], section.value) -def get_section_output(i: int, content: list, output: list, section: list, current_section: str) -> int: +def get_section_output( + i: int, content: list, output: list, section: list, current_section: str +) -> int: """ - Get a config section based off of an empty line of length of file. - Args: - i: The current line number - content: The file content - output: The output list - section: The section to process - - Returns: The number of lines in the section + Get a config section based off of an empty line of length of file. + Args: + i: The current line number + content: The file content + output: The output list + section: The section to process + + Returns: The number of lines in the section """ j = i output.append(content[j]) - while (j < len(content) - 1 and not is_end_of_section(content[j], current_section)): + while j < len(content) - 1 and not is_end_of_section(content[j], current_section): j += 1 - while (len(section) > 0): - output.append(section.pop(0) + '\n') - output.append('\n') + while len(section) > 0: + output.append(section.pop(0) + "\n") + output.append("\n") return j - i @@ -163,92 +168,108 @@ def get_and_combine_main_poetry_sections(name: str) -> (dict, dict): global_sections = deepcopy(sections) plugin_sections = deepcopy(sections) - with open(f'./{GLOBAL_PLUGIN_DIR}/{TAGGED_FILES.PYPROJECT.value}', 'r') as file: + with open(f"./{GLOBAL_PLUGIN_DIR}/{TAGGED_FILES.PYPROJECT.value}", "r") as file: filedata = file.read() extract_common_sections(filedata, global_sections) - with open(f'./{name}/{TAGGED_FILES.PYPROJECT.value}', 'r') as file: + with open(f"./{name}/{TAGGED_FILES.PYPROJECT.value}", "r") as file: filedata = file.read() extract_common_sections(filedata, plugin_sections) - combine_dependenices(plugin_sections['DEPS'], global_sections['DEPS']) - combine_dependenices(plugin_sections['DEV_DEPS'], - global_sections['DEV_DEPS']) - combine_dependenices(plugin_sections['INT_DEPS'], - global_sections['INT_DEPS']) + combine_dependenices(plugin_sections["DEPS"], global_sections["DEPS"]) + combine_dependenices(plugin_sections["DEV_DEPS"], global_sections["DEV_DEPS"]) + combine_dependenices(plugin_sections["INT_DEPS"], global_sections["INT_DEPS"]) return global_sections, plugin_sections -def process_main_config_sections(name: str, plugin_sections: dict, global_sections: dict) -> None: +def process_main_config_sections( + name: str, plugin_sections: dict, global_sections: dict +) -> None: """Process the main config sections and write them to the plugins pyproject.toml file.""" - with open(f'./{GLOBAL_PLUGIN_DIR}/{TAGGED_FILES.PYPROJECT.value}', 'r') as in_file: + with open(f"./{GLOBAL_PLUGIN_DIR}/{TAGGED_FILES.PYPROJECT.value}", "r") as in_file: content = in_file.readlines() sections = [section.value for section in MangagedPoetrySections] output = [] - with open(f'./{name}/{TAGGED_FILES.PYPROJECT.value}', 'w') as out_file: + with open(f"./{name}/{TAGGED_FILES.PYPROJECT.value}", "w") as out_file: i = 0 while i < len(content): if content[i].startswith(MangagedPoetrySections.META.value): - output.append(MangagedPoetrySections.META.value + '\n') - [output.append(line + '\n') - for line in plugin_sections['META']] - output.append('\n') + output.append(MangagedPoetrySections.META.value + "\n") + [output.append(line + "\n") for line in plugin_sections["META"]] + output.append("\n") i += 1 for section in sections: - if (content[i].startswith(section)): - i += get_section_output(i, content, - output, global_sections[MangagedPoetrySections(content[i].strip()).name], content[i]) + if content[i].startswith(section): + i += get_section_output( + i, + content, + output, + global_sections[ + MangagedPoetrySections(content[i].strip()).name + ], + content[i], + ) else: i += 1 out_file.writelines(output) - replace_plugin_tag( - f'./{name}/{TAGGED_FILES.PYPROJECT.value}', PluginInfo(name)) + replace_plugin_tag(f"./{name}/{TAGGED_FILES.PYPROJECT.value}", PluginInfo(name)) def get_and_combine_integration_poetry_sections(name: str) -> tuple[dict, dict]: """Get the global integration sections and combine them with the plugin specific sections.""" global_sections = deepcopy(sections) plugin_sections = deepcopy(sections) - with open(f'./{GLOBAL_PLUGIN_DIR}/{TAGGED_FILES.PYPROJECT_INTEGRATION.value}', 'r') as file: + with open( + f"./{GLOBAL_PLUGIN_DIR}/{TAGGED_FILES.PYPROJECT_INTEGRATION.value}", "r" + ) as file: filedata = file.read() extract_common_sections(filedata, global_sections) - with open(f'./{name}/{TAGGED_FILES.PYPROJECT_INTEGRATION.value}', 'r') as file: + with open(f"./{name}/{TAGGED_FILES.PYPROJECT_INTEGRATION.value}", "r") as file: filedata = file.read() extract_common_sections(filedata, plugin_sections) - combine_dependenices(plugin_sections['DEPS'], global_sections['DEPS']) - combine_dependenices( - plugin_sections['DEV_DEPS'], global_sections['DEV_DEPS']) + combine_dependenices(plugin_sections["DEPS"], global_sections["DEPS"]) + combine_dependenices(plugin_sections["DEV_DEPS"], global_sections["DEV_DEPS"]) return global_sections, plugin_sections -def process_integration_config_sections(name: str, plugin_sections: dict, global_sections: dict) -> None: +def process_integration_config_sections( + name: str, plugin_sections: dict, global_sections: dict +) -> None: """Process the integration test config sections and write them to the plugins intergqtion/pyproject.toml file.""" - with open(f'./{GLOBAL_PLUGIN_DIR}/{TAGGED_FILES.PYPROJECT_INTEGRATION.value}', 'r') as in_file: + with open( + f"./{GLOBAL_PLUGIN_DIR}/{TAGGED_FILES.PYPROJECT_INTEGRATION.value}", "r" + ) as in_file: content = in_file.readlines() sections = [section.value for section in MangagedPoetrySections] output = [] - with open(f'./{name}/{TAGGED_FILES.PYPROJECT_INTEGRATION.value}', 'w') as out_file: + with open(f"./{name}/{TAGGED_FILES.PYPROJECT_INTEGRATION.value}", "w") as out_file: i = 0 while i < len(content): if content[i].startswith(MangagedPoetrySections.META.value): - output.append(MangagedPoetrySections.META.value + '\n') - [output.append(line + '\n') - for line in plugin_sections['META']] + output.append(MangagedPoetrySections.META.value + "\n") + [output.append(line + "\n") for line in plugin_sections["META"]] i += 1 - output.append('\n') + output.append("\n") for section in sections: - if (content[i].startswith(section)): - i += get_section_output(i, content, - output, global_sections[MangagedPoetrySections(content[i].strip()).name], content[i]) + if content[i].startswith(section): + i += get_section_output( + i, + content, + output, + global_sections[ + MangagedPoetrySections(content[i].strip()).name + ], + content[i], + ) else: i += 1 out_file.writelines(output) @@ -256,23 +277,25 @@ def process_integration_config_sections(name: str, plugin_sections: dict, global def replace_global_sections(name: str) -> None: """ - Combine the global sections with the plugin specific sections and write them to the plugins pyproject.toml file - with the global dependencies overriding the plugin dependencies. + Combine the global sections with the plugin specific sections and write them to the plugins pyproject.toml file + with the global dependencies overriding the plugin dependencies. """ - global_sections, plugin_sections = get_and_combine_main_poetry_sections( - name) + global_sections, plugin_sections = get_and_combine_main_poetry_sections(name) process_main_config_sections(name, plugin_sections, global_sections) - global_sections, plugin_sections = get_and_combine_integration_poetry_sections( - name) + global_sections, plugin_sections = get_and_combine_integration_poetry_sections(name) process_integration_config_sections(name, plugin_sections, global_sections) def is_plugin_directory(plugin_name: str) -> bool: # If there is a drirectory which is not a plugin it should be ignored here - return os.path.isdir(plugin_name) and plugin_name != GLOBAL_PLUGIN_DIR and not plugin_name.startswith('.') + return ( + os.path.isdir(plugin_name) + and plugin_name != GLOBAL_PLUGIN_DIR + and not plugin_name.startswith(".") + ) -def main(arg_1 = None): +def main(arg_1=None): options = """ What would you like to do? @@ -282,15 +305,15 @@ def main(arg_1 = None): (4) Update plugins description with supported aries-cloudagent version (5) Get the plugins that upgraded since last release (6) Exit \n\nInput: """ - + if arg_1: selection = arg_1 else: selection = input(options) - - if (selection != "4" and selection != "5"): + + if selection != "4" and selection != "5": print("Checking poetry is available...") - response = os.system('which poetry') + response = os.system("which poetry") if response == "": print("Poetry is not available. Please install poetry.") exit(1) @@ -299,50 +322,47 @@ def main(arg_1 = None): # Create a new plugin msg = """Creating a new plugin: This will create a blank plugin with all the common files and folders needed to get started developing and testing.""" print(msg) - name = input( - "Enter the plugin name (recommended to use snake_case): ") + name = input("Enter the plugin name (recommended to use snake_case): ") if name == "": print("You must enter a plugin name") exit(1) - version = str( - input("Enter the plugin version (default is 0.1.0): ") or "0.1.0") - description = input( - "Enter the plugin description (default is ''): ") or "" + version = str(input("Enter the plugin version (default is 0.1.0): ") or "0.1.0") + description = input("Enter the plugin description (default is ''): ") or "" plugin_info = PluginInfo(name, version, description) - os.makedirs(f'./{name}/{name}/v1_0') + os.makedirs(f"./{name}/{name}/v1_0") copy_all_common_files_for_new_plugin(plugin_info) - os.system(f'cd {name} && poetry install --all-extras') + os.system(f"cd {name} && poetry install --all-extras") elif selection == "2": # Update common poetry sections msg = """Updating all plugin common poetry sections: This will take the global sections from the plugin_globals and combine them with the plugin specific sections, and install and update the lock file \n""" print(msg) - for plugin_name in os.listdir('./'): + for plugin_name in os.listdir("./"): if is_plugin_directory(plugin_name): - print(f'Updating common poetry sections in {plugin_name}\n') + print(f"Updating common poetry sections in {plugin_name}\n") replace_global_sections(plugin_name) + os.system(f"cd {plugin_name} && rm poetry.lock && poetry lock") os.system( - f'cd {plugin_name} && rm poetry.lock && poetry lock') - os.system( - f'cd {plugin_name}/integration && rm poetry.lock && poetry lock') - + f"cd {plugin_name}/integration && rm poetry.lock && poetry lock" + ) + elif selection == "3": # Upgrade plugin globals lock file msg = """Upgrade plugin_global dependencies \n""" print(msg) - os.system('cd plugin_globals && poetry lock') + os.system("cd plugin_globals && poetry lock") # Update plugins description with supported aries-cloudagent version elif selection == "4": """ - 1. Update the description of each plugin with the supported aries-cloudagent version. - 2. Output text for the release notes in markdown form. + 1. Update the description of each plugin with the supported aries-cloudagent version. + 2. Output text for the release notes in markdown form. """ # Get the aries-cloudagent version from the global poetry.lock file - with open('./plugin_globals/poetry.lock', 'r') as file: + with open("./plugin_globals/poetry.lock", "r") as file: for line in file: if 'name = "aries-cloudagent"' in line: next_line = next(file, None) @@ -354,50 +374,57 @@ def main(arg_1 = None): # Markdown table header print("| Plugin Name | Supported aries-cloudagent version |") print("| --- | --- |") - for plugin_name in sorted(os.listdir('./')): + for plugin_name in sorted(os.listdir("./")): if is_plugin_directory(plugin_name): # Plugin specific aries-cloudagent version - with open(f'./{plugin_name}/poetry.lock', 'r') as file: + with open(f"./{plugin_name}/poetry.lock", "r") as file: for line in file: if 'name = "aries-cloudagent"' in line: next_line = next(file, None) version = re.findall(r'"([^"]*)"', next_line) break - # Extract the description from the pyproject.toml file - with open(f'./{plugin_name}/pyproject.toml', 'r') as file: + # Extract the description from the pyproject.toml file + with open(f"./{plugin_name}/pyproject.toml", "r") as file: filedata = file.read() - linedata = filedata.split('\n') + linedata = filedata.split("\n") for i in range(len(linedata)): line = linedata[i] - if 'description = ' in line: + if "description = " in line: description = re.findall(r'"([^"]*)"', line) description_line = line break # Replace the description with the supported aries-cloudagent version at the end - if description[0].find('Supported aries-cloudagent version') != -1: - description[0] = description[0].split(' (Supported aries-cloudagent version')[0] - - filedata = filedata.replace(description_line, f'description = "{description[0]} (Supported aries-cloudagent version: {version[0]}) "') + if description[0].find("Supported aries-cloudagent version") != -1: + description[0] = description[0].split( + " (Supported aries-cloudagent version" + )[0] - with open(f'./{plugin_name}/pyproject.toml', 'w') as file: + filedata = filedata.replace( + description_line, + f'description = "{description[0]} (Supported aries-cloudagent version: {version[0]}) "', + ) + + with open(f"./{plugin_name}/pyproject.toml", "w") as file: file.write(filedata) - print(f'|{plugin_name} | {version[0]}|') - + print(f"|{plugin_name} | {version[0]}|") + print("***") print(" - ") elif selection == "5": """ - Extact the plugins from the RELEASES.md and determine which plugins which can be - upgraded or are new based off of the global aries-cloudagent version. + Extact the plugins from the RELEASES.md and determine which plugins which can be + upgraded or are new based off of the global aries-cloudagent version. """ # All the pugins. Used to determine which plugins are new. - all_plugins = [plugin for plugin in os.listdir('./') if is_plugin_directory(plugin)] + all_plugins = [ + plugin for plugin in os.listdir("./") if is_plugin_directory(plugin) + ] # Get the aries-cloudagent version from the global poetry.lock file - with open('./plugin_globals/poetry.lock', 'r') as file: + with open("./plugin_globals/poetry.lock", "r") as file: for line in file: if 'name = "aries-cloudagent"' in line: next_line = next(file, None) @@ -405,15 +432,19 @@ def main(arg_1 = None): break # Extract the plugins and versions from the last release in the RELEASES.md file - with open('RELEASES.md', 'r') as file: + with open("RELEASES.md", "r") as file: last_releases = [] for line in file: - if f'### Release v{global_version[0]}' in line: + if f"### Release v{global_version[0]}" in line: line = next(file) line = next(file) line = next(file) - while '***' not in line: - if line != '| Plugin Name | Supported aries-cloudagent version |\n' and line != '| --- | --- |\n': + while "***" not in line: + if ( + line + != "| Plugin Name | Supported aries-cloudagent version |\n" + and line != "| --- | --- |\n" + ): last_releases.append(line.strip()) line = next(file) break @@ -425,18 +456,20 @@ def main(arg_1 = None): # Get all released plugins and the plugins not on the global version for item in last_releases: - released_plugins.append(item.split('|')[1].strip()) - if (item.split('|')[2].strip() == global_version[0]): - plugins_on_old_release.append(item.split('|')[1].strip()) + released_plugins.append(item.split("|")[1].strip()) + if item.split("|")[2].strip() == global_version[0]: + plugins_on_old_release.append(item.split("|")[1].strip()) # If there is releases in the RELEASES.md file then look for new plugins and add them to plugins on old release if last_releases: - new_plugins = [plugin for plugin in all_plugins if plugin not in released_plugins] + new_plugins = [ + plugin for plugin in all_plugins if plugin not in released_plugins + ] for plugin in new_plugins: plugins_on_old_release.append(plugin) output = "" for plugin in plugins_on_old_release: - output += f'{plugin} ' + output += f"{plugin} " print(output) elif selection == "6": @@ -444,12 +477,11 @@ def main(arg_1 = None): exit(0) else: print("Invalid selection. Please try again.") - main() - + main() if __name__ == "__main__": try: main(sys.argv[1]) except Exception: - main() \ No newline at end of file + main() diff --git a/rpc/integration/tests/test_rpc.py b/rpc/integration/tests/test_rpc.py index 531bd9271..5ac9b24b9 100644 --- a/rpc/integration/tests/test_rpc.py +++ b/rpc/integration/tests/test_rpc.py @@ -1,7 +1,8 @@ -import pytest import time -from . import Agent, BOB, ALICE +import pytest + +from . import ALICE, BOB, Agent rpc_request = {"jsonrpc": "2.0", "method": "add", "params": [1, 2], "id": 1} rpc_response = {"jsonrpc": "2.0", "result": 3, "id": 1} diff --git a/rpc/rpc/v1_0/handlers.py b/rpc/rpc/v1_0/handlers.py index 282466263..dcdc7c1cc 100644 --- a/rpc/rpc/v1_0/handlers.py +++ b/rpc/rpc/v1_0/handlers.py @@ -1,9 +1,10 @@ """Message handlers for DIDComm RPC v1.0.""" import json + from aries_cloudagent.messaging.base_handler import ( - BaseResponder, BaseHandler, + BaseResponder, RequestContext, ) from aries_cloudagent.storage.base import BaseStorage diff --git a/rpc/rpc/v1_0/messages.py b/rpc/rpc/v1_0/messages.py index 6bf968e33..1dc3aae33 100644 --- a/rpc/rpc/v1_0/messages.py +++ b/rpc/rpc/v1_0/messages.py @@ -3,17 +3,8 @@ from aries_cloudagent.messaging.agent_message import AgentMessage, AgentMessageSchema from marshmallow import ValidationError, pre_dump -from rpc.v1_0.message_types import ( - DRPC_REQUEST, - DRPC_RESPONSE, - PROTOCOL_PACKAGE, -) -from rpc.v1_0.models import ( - RPC_REQUEST_EXAMPLE, - RPC_RESPONSE_EXAMPLE, - Request, - Response, -) +from rpc.v1_0.message_types import DRPC_REQUEST, DRPC_RESPONSE, PROTOCOL_PACKAGE +from rpc.v1_0.models import RPC_REQUEST_EXAMPLE, RPC_RESPONSE_EXAMPLE, Request, Response class DRPCRequestMessage(AgentMessage): diff --git a/rpc/rpc/v1_0/models.py b/rpc/rpc/v1_0/models.py index 90d56e32a..5c9b0fecd 100644 --- a/rpc/rpc/v1_0/models.py +++ b/rpc/rpc/v1_0/models.py @@ -2,12 +2,10 @@ from typing import Any, List, Mapping, Optional, Union -from marshmallow import fields, validate, ValidationError, validates_schema - from aries_cloudagent.core.profile import ProfileSession from aries_cloudagent.messaging.models.base import BaseModel, BaseModelSchema from aries_cloudagent.messaging.models.base_record import BaseRecord, BaseRecordSchema - +from marshmallow import ValidationError, fields, validate, validates_schema RPC_REQUEST_EXAMPLE = { "jsonrpc": "2.0", diff --git a/rpc/rpc/v1_0/routes.py b/rpc/rpc/v1_0/routes.py index e25fde53c..ac9c3334d 100644 --- a/rpc/rpc/v1_0/routes.py +++ b/rpc/rpc/v1_0/routes.py @@ -11,21 +11,26 @@ querystring_schema, response_schema, ) -from marshmallow import fields, validate - from aries_cloudagent.admin.request_context import AdminRequestContext from aries_cloudagent.connections.models.conn_record import ConnRecord from aries_cloudagent.messaging.models.base import ( BaseModel, - BaseModelSchema, BaseModelError, + BaseModelSchema, ) from aries_cloudagent.messaging.models.base_record import match_post_filter from aries_cloudagent.messaging.models.openapi import OpenAPISchema from aries_cloudagent.messaging.valid import UUID4_EXAMPLE from aries_cloudagent.storage.base import BaseStorage, StorageRecord -from aries_cloudagent.storage.error import StorageNotFoundError, StorageError +from aries_cloudagent.storage.error import StorageError, StorageNotFoundError +from marshmallow import fields, validate +from rpc.v1_0.messages import ( + DRPCRequestMessage, + DRPCRequestMessageSchema, + DRPCResponseMessage, + DRPCResponseMessageSchema, +) from rpc.v1_0.models import ( RPC_REQUEST_EXAMPLE, RPC_RESPONSE_EXAMPLE, @@ -34,12 +39,6 @@ Request, Response, ) -from rpc.v1_0.messages import ( - DRPCRequestMessage, - DRPCRequestMessageSchema, - DRPCResponseMessage, - DRPCResponseMessageSchema, -) LOGGER = logging.getLogger(__name__) diff --git a/rpc/rpc/v1_0/tests/test_messages.py b/rpc/rpc/v1_0/tests/test_messages.py index 21287a40f..a14405866 100644 --- a/rpc/rpc/v1_0/tests/test_messages.py +++ b/rpc/rpc/v1_0/tests/test_messages.py @@ -1,5 +1,4 @@ import pytest - from marshmallow import ValidationError from rpc.v1_0.messages import DRPCResponseMessageSchema diff --git a/rpc/rpc/v1_0/tests/test_models.py b/rpc/rpc/v1_0/tests/test_models.py index 6c9d91d2e..953773323 100644 --- a/rpc/rpc/v1_0/tests/test_models.py +++ b/rpc/rpc/v1_0/tests/test_models.py @@ -1,13 +1,12 @@ import pytest - from marshmallow import ValidationError from rpc.v1_0.models import ( DRPCRecordSchema, RPCBaseModelSchema, + RPCErrorModelSchema, RPCRequestModelSchema, RPCResponseModelSchema, - RPCErrorModelSchema, ) rpc_base = {"jsonrpc": "2.0"} diff --git a/rpc/rpc/v1_0/tests/test_routes.py b/rpc/rpc/v1_0/tests/test_routes.py index 07bbfbe2d..9d61787b8 100644 --- a/rpc/rpc/v1_0/tests/test_routes.py +++ b/rpc/rpc/v1_0/tests/test_routes.py @@ -237,7 +237,9 @@ async def test_http_internal_server_error_thrown_on_update_storage_error(self, * ) self.storage.add_record = async_mock.CoroutineMock() - self.storage.update_record = async_mock.CoroutineMock(side_effect=StorageError()) + self.storage.update_record = async_mock.CoroutineMock( + side_effect=StorageError() + ) with self.assertRaises(web.HTTPInternalServerError): await test_module.drpc_send_request(self.request) diff --git a/rpc/rpc/v1_0/tests/test_schemas.py b/rpc/rpc/v1_0/tests/test_schemas.py index 00abb2bc3..9463d109d 100644 --- a/rpc/rpc/v1_0/tests/test_schemas.py +++ b/rpc/rpc/v1_0/tests/test_schemas.py @@ -1,7 +1,6 @@ -from marshmallow import ValidationError import pytest - from aries_cloudagent.messaging.valid import UUID4_EXAMPLE +from marshmallow import ValidationError from rpc.v1_0.messages import DRPCRequestMessageSchema, DRPCResponseMessageSchema from rpc.v1_0.routes import DRPCRequestJSONSchema, DRPCResponseJSONSchema