Skip to content

Commit

Permalink
Cache alerts summary (#15)
Browse files Browse the repository at this point in the history
* Cache alerts summary for 60 seconds

* Build docker image on PRs to main branch

* Add handling of PRs in docker workflow

* Cache /enclosure/status

* Correctly cache things

* Temporarily move CluClient to lvmapi

* Add defaults

* Back to using CluClient from lvmopstools.clu

* Name the CluClient client
  • Loading branch information
albireox authored Dec 5, 2024
1 parent 4d2feea commit 6816c2e
Show file tree
Hide file tree
Showing 7 changed files with 431 additions and 300 deletions.
9 changes: 9 additions & 0 deletions .github/workflows/docker.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,11 @@ on:
- '*'
paths-ignore:
- 'docs/**'
pull_request:
branches:
- main
paths-ignore:
- 'docs/**'

jobs:
docker:
Expand All @@ -27,6 +32,10 @@ jobs:
then
BRANCH=$(echo ${GITHUB_REF#refs/heads/} | sed -r 's/[\/]/_/g')
echo TAGS=$USER/$APP:$BRANCH >> $GITHUB_OUTPUT
elif [[ $GITHUB_REF == refs/pull/* ]]
then
BRANCH=${{ github.head_ref || github.ref_name }}
echo TAGS=$USER/$APP:$BRANCH >> $GITHUB_OUTPUT
else
echo TAGS=$USER/$APP:${GITHUB_REF#refs/tags/} >> $GITHUB_OUTPUT
fi
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ dependencies = [
"sdsstools>=1.8.1",
"fastapi[standard]>=0.112.0",
"lvmgort>=1.1.2",
"lvmopstools[influxdb,kubernetes,schedule]>=0.4.1",
"lvmopstools[influxdb,kubernetes,schedule]>=0.4.4",
"gunicorn>=22.0.0",
"uvicorn[standard]>=0.24.0",
"sdss-clu>=2.2.1",
Expand Down
18 changes: 11 additions & 7 deletions src/lvmapi/routers/alerts.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,13 @@
import warnings

import polars
from fastapi import APIRouter, Request
from fastapi import APIRouter
from pydantic import BaseModel

from lvmopstools.weather import get_weather_data, is_weather_data_safe

from lvmapi.tools.alerts import enclosure_alerts, spec_temperature_alerts
from lvmapi.tools.general import cache_response


class AlertsSummary(BaseModel):
Expand All @@ -43,9 +44,12 @@ class AlertsSummary(BaseModel):
@router.get("")
@router.get("/")
@router.get("/summary")
async def summary(request: Request) -> AlertsSummary:
@cache_response("alerts:summary", ttl=60, response_model=AlertsSummary)
async def summary():
"""Summary of alerts."""

from lvmapi.app import app

now = time.time()

tasks: list[asyncio.Task] = []
Expand Down Expand Up @@ -100,11 +104,11 @@ async def summary(request: Request) -> AlertsSummary:
door_alert = enclosure_alerts_response.get("door_alert", None)

# These fake states are just for testing.
if request.app.state.use_fake_states:
humidity_alert = request.app.state.fake_states["humidity_alert"]
wind_alert = request.app.state.fake_states["wind_alert"]
rain_sensor_alarm = request.app.state.fake_states["rain_alert"]
door_alert = request.app.state.fake_states["door_alert"]
if app.state.use_fake_states:
humidity_alert = app.state.fake_states["humidity_alert"]
wind_alert = app.state.fake_states["wind_alert"]
rain_sensor_alarm = app.state.fake_states["rain_alert"]
door_alert = app.state.fake_states["door_alert"]

return AlertsSummary(
humidity_alert=humidity_alert,
Expand Down
8 changes: 6 additions & 2 deletions src/lvmapi/routers/enclosure.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

from lvmapi.auth import AuthDependency
from lvmapi.tasks import move_dome_task
from lvmapi.tools.general import cache_response
from lvmapi.tools.rabbitmq import send_clu_command


Expand All @@ -35,7 +36,9 @@ class PLCStatus(BaseModel):
labels: list[str] = []

@field_validator("value", mode="before")
def cast_value(cls, value: str) -> int:
def cast_value(cls, value: str | int) -> int:
if isinstance(value, int):
return value
return int(value, 16)

@field_validator("labels", mode="after")
Expand Down Expand Up @@ -91,7 +94,8 @@ class NPSBody(BaseModel):
@router.get("")
@router.get("/")
@router.get("/status")
async def status() -> EnclosureStatus:
@cache_response("enclosure:status", ttl=60, response_model=EnclosureStatus)
async def status():
"""Performs an emergency shutdown of the enclosure and telescopes."""

try:
Expand Down
74 changes: 69 additions & 5 deletions src/lvmapi/tools/general.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,17 +9,25 @@
from __future__ import annotations

import functools
from datetime import datetime, timedelta
import json
from datetime import datetime, timedelta, timezone

from typing import Any
from typing import Any, Type, TypeVar

import psycopg
import psycopg.sql
from aiocache import Cache
from fastapi import HTTPException
from pydantic import BaseModel

from lvmapi import config


__all__ = ["timed_cache", "get_db_connection", "insert_to_database"]
__all__ = [
"timed_cache",
"get_db_connection",
"insert_to_database",
]


def timed_cache(seconds: float):
Expand All @@ -39,15 +47,15 @@ def timed_cache(seconds: float):

def _wrapper(f):
update_delta = timedelta(seconds=seconds)
next_update = datetime.utcnow() + update_delta
next_update = datetime.now(timezone.utc) + update_delta

# Apply @lru_cache to f with no cache size limit
f = functools.lru_cache(None)(f)

@functools.wraps(f)
def _wrapped(*args, **kwargs):
nonlocal next_update
now = datetime.utcnow()
now = datetime.now(timezone.utc)
if now >= next_update:
f.cache_clear()
next_update = now + update_delta
Expand Down Expand Up @@ -123,3 +131,59 @@ async def insert_to_database(
for row in data:
values = [row.get(col, None) for col in columns]
await acursor.execute(query, values)


T = TypeVar("T", bound=BaseModel)


def cache_response(
key: str,
ttl: int = 60,
namespace: str = "lvmapi",
response_model: Type[T] | None = None,
):
"""Caching decorator for FastAPI endpoints.
See https://dev.to/sivakumarmanoharan/caching-in-fastapi-unlocking-high-performance-development-20ej
"""

def decorator(func):
@functools.wraps(func)
async def wrapper(*args, **kwargs):
cache_key = f"{namespace}:{key}"

assert Cache.REDIS
cache = Cache.REDIS(
endpoint="localhost", # type: ignore
port=6379, # type: ignore
namespace=namespace,
)

# Try to retrieve data from cache
cached_value = await cache.get(cache_key)
if cached_value:
if response_model:
return response_model(**json.loads(cached_value))
return json.loads(cached_value)

# Call the actual function if cache is not hit
response: T | Any = await func(*args, **kwargs)

try:
# Store the response in Redis with a TTL
if response_model:
cacheable = response.model_dump_json()
else:
cacheable = json.dumps(response)

await cache.set(cache_key, cacheable, ttl=ttl)

except Exception as e:
raise HTTPException(status_code=500, detail=f"Error caching data: {e}")

return response

return wrapper

return decorator
6 changes: 6 additions & 0 deletions src/lvmapi/tools/rabbitmq.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@

from __future__ import annotations

import uuid

from typing import TYPE_CHECKING

from lvmopstools.clu import CluClient, send_clu_command
Expand All @@ -23,6 +25,10 @@
__all__ = ["CluClient", "send_clu_command", "ping_actors"]


# CluClient is a singleton. Create a client with lvmapi name.
CluClient(name=f"lvmapi-{uuid.uuid4()}")


async def ping_actors(actors: list[str] | None = None):
"""Pings all actors and returns a list of replies."""

Expand Down
Loading

0 comments on commit 6816c2e

Please sign in to comment.