Merge pull request 'feat: add redis caching' (#9) from dev into main

Reviewed-on: #9
This commit was merged in pull request #9.
This commit is contained in:
2025-11-25 13:33:52 +00:00
3 changed files with 163 additions and 29 deletions

View File

@@ -145,7 +145,31 @@ jobs:
uses: ad-m/github-push-action@master
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
branch: ${{ env.PUSH_BRANCH }}
branch: main
- name: Update `dev` branch with new version
run: |
# ensure we have the latest remote refs
git fetch origin dev || true
# switch to dev if it exists remotely, otherwise create it
if git rev-parse --verify origin/dev >/dev/null 2>&1; then
git checkout dev
git pull origin dev
else
git checkout -b dev
fi
# replace the version line inside pyproject.toml
sed -E -i "s/^(version\s*=\s*)\".*\"/\1\"${{ env.VERSION }}\"/" pyproject.toml || true
git add pyproject.toml || true
git commit -m "chore: bump pyproject version for dev -> v${{ env.VERSION }}" || echo "no changes to commit"
- name: Push dev changes
uses: ad-m/github-push-action@master
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
branch: dev
- name: Build Changelog
id: build_changelog
uses: https://github.com/mikepenz/release-changelog-builder-action@v5

View File

@@ -1,16 +1,64 @@
"""Lightweight Python API service for signature validation
This can run independently to support the PHP application
"""Lightweight Python API service for signature validation.
This can run independently to support the PHP application.
"""
from __future__ import annotations
import asyncio
from contextlib import asynccontextmanager
import json
import logging
import os
import re
import time
from typing import Any, TYPE_CHECKING
if TYPE_CHECKING:
from collections.abc import AsyncIterator, Callable, Awaitable
# Avoid importing heavy modules at top-level to keep `import api_service` lightweight
from fastapi import FastAPI, Query
from fastapi import FastAPI, Query, Request, Response
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import JSONResponse
app = FastAPI(title="Signature Validation API")
CACHE_TTL_SECONDS = int(os.getenv("CACHE_TTL", str(72 * 3600)))
REDIS_URL = os.getenv("REDIS_URL", "")
redis_client = None
@asynccontextmanager
async def _lifespan(_app: FastAPI) -> AsyncIterator[None]:
"""Lifespan handler: connect to Redis on startup and close on shutdown."""
global redis_client
if REDIS_URL:
try:
import redis.asyncio as aioredis
redis_client = aioredis.from_url(REDIS_URL)
try:
pong = redis_client.ping()
if asyncio.iscoroutine(pong) or asyncio.isfuture(pong):
pong = await pong
if not pong:
logging.exception("redis ping failed")
redis_client = None
except Exception:
logging.exception("redis ping failed")
redis_client = None
except Exception:
logging.exception("failed to create redis client")
redis_client = None
yield
if redis_client is not None:
try:
await redis_client.close()
except Exception:
logging.exception("failed to close redis client")
app = FastAPI(title="Signature Validation API", lifespan=_lifespan)
# Optional path prefix support: when behind a reverse-proxy that uses a
# URL prefix (eg. `https://api.example.tld/library/...`) set `API_PREFIX` to
@@ -26,7 +74,10 @@ if _api_prefix_raw:
@app.middleware("http")
async def _strip_api_prefix(request, call_next):
async def _strip_api_prefix(
request: Request,
call_next: Callable[[Request], Awaitable[Response]],
) -> Response:
if api_prefix and request.url.path.startswith(api_prefix):
new_path = request.url.path[len(api_prefix) :]
request.scope["path"] = new_path or "/"
@@ -47,7 +98,7 @@ app.add_middleware(
cat = None
def _get_catalogue():
def _get_catalogue() -> Any:
global cat
if cat is None:
# import inside function to avoid expensive work during module import
@@ -57,9 +108,62 @@ def _get_catalogue():
return cat
# ---- Caching support ----------------------------------------------
# Uses an async Redis client when `REDIS_URL` is set, otherwise falls
# back to a small in-memory store with TTL. Cache TTL defaults to 72h.
CacheValue = dict[str, Any]
_in_memory_cache: dict[str, tuple[float, CacheValue]] = {}
_in_memory_lock = asyncio.Lock()
async def _cache_get(key: str) -> CacheValue | None:
if redis_client:
try:
val = await redis_client.get(key)
if val is None:
return None
return json.loads(val)
except Exception:
logging.exception("redis get failed")
return None
# fallback in-memory
async with _in_memory_lock:
entry = _in_memory_cache.get(key)
if not entry:
return None
expires_at, value = entry
if time.time() >= expires_at:
del _in_memory_cache[key]
return None
return value
async def _cache_set(key: str, value: CacheValue, ttl: int = CACHE_TTL_SECONDS) -> None:
if redis_client:
try:
await redis_client.set(key, json.dumps(value), ex=ttl)
return
except Exception:
logging.exception("redis set failed")
async with _in_memory_lock:
_in_memory_cache[key] = (time.time() + ttl, value)
# Redis lifecycle is handled by the lifespan context manager defined earlier
@app.get("/api/validate-signature")
async def validate_signature(signature: str = Query(...)):
"""Validate a book signature and return total pages"""
async def validate_signature(signature: str = Query(...)) -> JSONResponse:
"""Validate a book signature and return total pages."""
# check cache first
cache_key = f"signature:{signature}"
cached = await _cache_get(cache_key)
if cached is not None:
return JSONResponse(cached)
try:
book_result = _get_catalogue().get_book_with_data(signature)
if book_result and hasattr(book_result, "pages") and book_result.pages:
@@ -69,31 +173,36 @@ async def validate_signature(signature: str = Query(...)):
match = re.search(r"(\d+)", pages_str)
if match:
total_pages = int(match.group(1))
return JSONResponse(
{"valid": True, "total_pages": total_pages, "signature": signature},
)
result: CacheValue = {
"valid": True,
"total_pages": total_pages,
"signature": signature,
}
await _cache_set(cache_key, result)
return JSONResponse(result)
return JSONResponse(
{
"valid": False,
"error": "Signatur nicht gefunden oder keine Seitenzahl verfügbar",
"signature": signature,
},
)
result: CacheValue = {
"valid": False,
"error": "Signatur nicht gefunden oder keine Seitenzahl verfügbar",
"signature": signature,
}
await _cache_set(cache_key, result)
return JSONResponse(result)
except Exception as e:
return JSONResponse(
{
"valid": False,
"error": f"Fehler bei der Validierung: {e!s}",
"signature": signature,
},
status_code=500,
)
logging.exception("validate_signature failure")
result: CacheValue = {
"valid": False,
"error": f"Fehler bei der Validierung: {e!s}",
"signature": signature,
}
# store a failed response in cache as well so we avoid replaying errors
await _cache_set(cache_key, result)
return JSONResponse(result, status_code=500)
@app.get("/health")
async def health_check():
"""Health check endpoint"""
async def health_check() -> dict[str, str]:
"""Health check endpoint."""
return {"status": "ok", "service": "signature-validation"}

View File

@@ -9,6 +9,7 @@ dependencies = [
"fastapi>=0.122.0",
"pip>=25.3",
"uvicorn>=0.38.0",
"redis>=4.6.0",
]
[[tool.uv.index]]
name = "gitea"