Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore(deps-dev): Bump pytest-asyncio from 0.14.0 to 0.23.6 in /oid4vci #109

Closed
wants to merge 8 commits into from
19 changes: 10 additions & 9 deletions oid4vci/poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion oid4vci/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ ruff = "^0.1.2"
black = "~23.7.0"
asynctest = "0.13.0"
pytest = "~7.4.0"
pytest-asyncio = "~0.14.0"
pytest-asyncio = "~0.23.6"
pytest-cov = "^3.0.0"
pytest-ruff = "^0.1.1"

Expand Down
936 changes: 499 additions & 437 deletions redis_events/integration/poetry.lock

Large diffs are not rendered by default.

6 changes: 3 additions & 3 deletions redis_events/integration/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,11 @@ pytest-asyncio = "^0.21.0"
asynctest = "^0.13.0"
requests = "^2.31.0"
aiohttp = "^3.7.4"
aries-cloudagent = { version = ">=0.10.3, < 1.0.0" }
fastapi = "^0.109.1"
aries-cloudagent = { version = "0.11.0" }
fastapi = "^0.109.2"
nest-asyncio = "^1.5.5"
pydantic = "^1.8.2"
redis = "^4.1.4"
redis = "^5.0.3"
uvicorn = "0.16.0"

[tool.poetry.dev-dependencies]
Expand Down
3 changes: 2 additions & 1 deletion redis_events/integration/tests/test_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,8 @@ def established_connection(faber, alice):
@pytest.mark.asyncio
async def test_base_redis_keys_are_set(redis):
time.sleep(1)
assert await redis.lrange("acapy-record-base", 0, -1) != []
# skip `acapy-record-base` because our custom implementation is not interested in base keys!
# assert await redis.lrange("acapy-record-base", 0, -1) != []
assert await redis.lrange("acapy-record-with-state-base", 0, -1) != []


Expand Down
1,114 changes: 581 additions & 533 deletions redis_events/poetry.lock

Large diffs are not rendered by default.

10 changes: 5 additions & 5 deletions redis_events/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,25 +9,25 @@ python = "^3.9"

# Define ACA-Py as an optional/extra dependancy so it can be
# explicitly installed with the plugin if desired.
aries-cloudagent = { version = ">=0.10.3, < 1.0.0", optional = true }
aries-cloudagent = { version = "0.11.0", optional = true }
aiohttp = "^3.7.4"
fastapi = "^0.109.2"
nest-asyncio = "^1.5.5"
pydantic = "^1.8.2"
redis = "^4.1.4"
redis = "^5.0.3"
uvicorn = "0.16.0"

[tool.poetry.extras]
aca-py = ["aries-cloudagent"]

[tool.poetry.dev-dependencies]
ruff = "^0.1.2"
ruff = "^0.3.0"
black = "23.7.0"
asynctest = "0.13.0"
pytest = "~7.4.0"
pytest-asyncio = "0.14.0"
pytest-asyncio = "^0.21.0"
pytest-cov = "2.10.1"
pytest-ruff = "^0.1.1"
pytest-ruff = "^0.3"
mock= "~4.0"
pre-commit = "^2.12.1"
pytest-mock = "^3.6.1"
Expand Down
38 changes: 28 additions & 10 deletions redis_events/redis_events/v1_0/redis_queue/events/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import json
import logging
import re
import time
from string import Template
from typing import Any, Optional, cast

Expand Down Expand Up @@ -84,11 +85,23 @@ def process_event_payload(event_payload: Any):

async def handle_event(profile: Profile, event: EventWithMetadata):
"""Push events from aca-py events."""
config_events = get_config(profile.settings).event or EventConfig.default()
pattern = event.metadata.pattern.pattern
template = config_events.event_topic_maps.get(pattern)

if not template:
LOGGER.warning(f"Could not infer template from pattern: {pattern}")
return

if "-with-state" not in template:
# We are only interested in state change webhooks. This avoids duplicate events
return

redis = profile.inject_or(RedisCluster)
if not redis:
redis = await redis_setup(profile, event)

LOGGER.info("Handling event: %s", event)
LOGGER.debug("Handling event: %s", event)
wallet_id = cast(Optional[str], profile.settings.get("wallet.id"))
try:
event_payload = process_event_payload(event.payload)
Expand All @@ -109,17 +122,22 @@ async def handle_event(profile: Profile, event: EventWithMetadata):
}
webhook_urls = profile.settings.get("admin.webhook_urls")
try:
config_events = get_config(profile.settings).event or EventConfig.default()
template = config_events.event_topic_maps[event.metadata.pattern.pattern]
redis_topic = Template(template).substitute(**payload)
LOGGER.info(f"Sending message {payload} with topic {redis_topic}")
LOGGER.debug(f"Sending message {payload} with topic {redis_topic}")

origin = profile.settings.get("default_label")
group_id = profile.settings.get("wallet.group_id")

metadata = {"time_ns": time.time_ns()}
metadata_wallet_id = {"x-wallet-id": wallet_id} if wallet_id else {}
metadata_group_id = {"group_id": group_id} if group_id else {}
metadata_origin = {"origin": origin} if origin else {}
metadata.update(metadata_wallet_id)
metadata.update(metadata_group_id)
metadata.update(metadata_origin)

outbound = str.encode(
json.dumps(
{
"payload": payload,
"metadata": {"x-wallet-id": wallet_id} if wallet_id else {},
}
),
json.dumps({"payload": payload, "metadata": metadata}),
)
await redis.rpush(
redis_topic,
Expand Down
Loading