1 Commits
main ... v0.1.2

Author SHA1 Message Date
Gitea CI
36a548eee6 Bump version: 0.1.1 → 0.1.2 2025-11-25 09:06:45 +00:00
4 changed files with 65 additions and 254 deletions

View File

@@ -75,18 +75,6 @@ jobs:
run: |
git config user.name "Gitea CI"
git config user.email "ci@git.theprivateserver.de"
- name: Determine branch to push to
id: push_branch
run: |
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
# workflow_dispatch runs on a branch ref like refs/heads/main
BRANCH=${GITHUB_REF#refs/heads/}
else
# for a merged PR use the PR base ref (target branch)
BRANCH=${{ github.event.pull_request.base.ref }}
fi
echo "PUSH_BRANCH=$BRANCH" >> $GITHUB_ENV
- name: Bump version
id: bump
run: |
@@ -145,31 +133,7 @@ jobs:
uses: ad-m/github-push-action@master
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
branch: main
- name: Update `dev` branch with new version
run: |
# ensure we have the latest remote refs
git fetch origin dev || true
# switch to dev if it exists remotely, otherwise create it
if git rev-parse --verify origin/dev >/dev/null 2>&1; then
git checkout dev
git pull origin dev
else
git checkout -b dev
fi
# replace the version line inside pyproject.toml
sed -E -i "s/^(version\s*=\s*)\".*\"/\1\"${{ env.VERSION }}\"/" pyproject.toml || true
git add pyproject.toml || true
git commit -m "chore: bump pyproject version for dev -> v${{ env.VERSION }}" || echo "no changes to commit"
- name: Push dev changes
uses: ad-m/github-push-action@master
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
branch: dev
branch: ${{ github.ref }}
- name: Build Changelog
id: build_changelog
uses: https://github.com/mikepenz/release-changelog-builder-action@v5

View File

@@ -5,7 +5,7 @@ on:
types: [opened, synchronize, edited, reopened]
jobs:
build-image:
build-and-smoke:
runs-on: ubuntu-latest
steps:
@@ -17,48 +17,24 @@ jobs:
- name: Install uv
uses: astral-sh/setup-uv@v5
- name: (optional) Prepare dependencies
- name: Set up Python
run: uv python install
with:
python-version-file: "pyproject.toml"
- name: Install the project dependencies
run: |
uv python install --python-version-file pyproject.toml || true
uv sync --all-groups || true
uv add pip || true
uv export --format requirements.txt -o requirements.txt || true
uv sync --all-groups
uv add pip
uv export --format requirements.txt -o requirements.txt
# uv run python -m pip install --upgrade pip
# uv run python -m pip install -r requirements.txt
- name: Build image
run: |
docker build -t semapform-api:test-pr .
- name: Save image artifact
run: |
docker save semapform-api:test-pr -o semapform-api.tar
- name: Upload image artifact
uses: https://github.com/christopherHX/gitea-upload-artifact@v4
with:
name: semapform-image
path: semapform-api.tar
retention-days: 1
smoke-tests:
needs: build-image
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Download image artifact
uses: christopherhx/gitea-download-artifact@v4
with:
name: semapform-image
- name: Restore image
run: |
docker load -i semapform-api.tar
- name: Start container (background)
run: |
# do NOT bind the container port to the host to avoid port conflicts on the runner
docker run -d --name semapform-test semapform-api:test-pr sleep infinity
- name: Start server in container and smoke test HTTP (in-container)
@@ -70,27 +46,30 @@ jobs:
# show container status to aid debugging
docker ps -a --filter name=semapform-test || true
echo "waiting for service to become ready inside container"
set -e
READY=0
for i in $(seq 1 20); do
echo "ready attempt $i"
if docker exec semapform-test python -c 'import urllib.request,sys; urllib.request.urlopen("http://127.0.0.1:8001/health", timeout=1); print("ok")' ; then
READY=1
break
# perform a readiness loop (try container-local /health) using small execs
echo "waiting for service to become ready inside container"
set -e
READY=0
for i in $(seq 1 20); do
echo "ready attempt $i"
if docker exec semapform-test python -c 'import urllib.request,sys; urllib.request.urlopen("http://127.0.0.1:8001/health", timeout=1); print("ok")' ; then
READY=1
break
fi
sleep 1
done
if [ "$READY" -ne 1 ]; then
echo "service did not become ready"
docker logs semapform-test --tail 200 || true
exit 1
fi
sleep 1
done
if [ "$READY" -ne 1 ]; then
echo "service did not become ready"
# Run the repository smoke-test script inside the container and surface its output
echo "running test_api.py inside container"
docker exec semapform-test python test_api.py || true
# dump the last 200 lines of logs so this step always displays useful output
docker logs semapform-test --tail 200 || true
exit 1
fi
echo "running test_api.py inside container"
docker exec semapform-test python test_api.py || true
docker logs semapform-test --tail 200 || true
- name: Cleanup container
if: always()

View File

@@ -1,89 +1,16 @@
"""Lightweight Python API service for signature validation.
This can run independently to support the PHP application.
"""Lightweight Python API service for signature validation
This can run independently to support the PHP application
"""
from __future__ import annotations
import asyncio
from contextlib import asynccontextmanager
import json
import logging
import os
import re
import time
from typing import Any, TYPE_CHECKING
if TYPE_CHECKING:
from collections.abc import AsyncIterator, Callable, Awaitable
# Avoid importing heavy modules at top-level to keep `import api_service` lightweight
from fastapi import FastAPI, Query, Request, Response
from fastapi import FastAPI, Query
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import JSONResponse
CACHE_TTL_SECONDS = int(os.getenv("CACHE_TTL", str(72 * 3600)))
REDIS_URL = os.getenv("REDIS_URL", "")
redis_client = None
@asynccontextmanager
async def _lifespan(_app: FastAPI) -> AsyncIterator[None]:
"""Lifespan handler: connect to Redis on startup and close on shutdown."""
global redis_client
if REDIS_URL:
try:
import redis.asyncio as aioredis
redis_client = aioredis.from_url(REDIS_URL)
try:
pong = redis_client.ping()
if asyncio.iscoroutine(pong) or asyncio.isfuture(pong):
pong = await pong
if not pong:
logging.exception("redis ping failed")
redis_client = None
except Exception:
logging.exception("redis ping failed")
redis_client = None
except Exception:
logging.exception("failed to create redis client")
redis_client = None
yield
if redis_client is not None:
try:
await redis_client.close()
except Exception:
logging.exception("failed to close redis client")
app = FastAPI(title="Signature Validation API", lifespan=_lifespan)
# Optional path prefix support: when behind a reverse-proxy that uses a
# URL prefix (eg. `https://api.example.tld/library/...`) set `API_PREFIX` to
# that prefix (example: `/library`) so incoming requests are rewritten to the
# application root. This keeps route definitions unchanged while supporting
# both proxied and direct deployments.
_api_prefix_raw = os.getenv("API_PREFIX", "").strip()
api_prefix = ""
if _api_prefix_raw:
if not _api_prefix_raw.startswith("/"):
_api_prefix_raw = "/" + _api_prefix_raw
api_prefix = _api_prefix_raw.rstrip("/")
@app.middleware("http")
async def _strip_api_prefix(
request: Request,
call_next: Callable[[Request], Awaitable[Response]],
) -> Response:
if api_prefix and request.url.path.startswith(api_prefix):
new_path = request.url.path[len(api_prefix) :]
request.scope["path"] = new_path or "/"
request.scope["root_path"] = api_prefix
return await call_next(request)
app = FastAPI(title="Signature Validation API")
# Allow PHP application to call this API
app.add_middleware(
@@ -98,7 +25,7 @@ app.add_middleware(
cat = None
def _get_catalogue() -> Any:
def _get_catalogue():
global cat
if cat is None:
# import inside function to avoid expensive work during module import
@@ -108,62 +35,9 @@ def _get_catalogue() -> Any:
return cat
# ---- Caching support ----------------------------------------------
# Uses an async Redis client when `REDIS_URL` is set, otherwise falls
# back to a small in-memory store with TTL. Cache TTL defaults to 72h.
CacheValue = dict[str, Any]
_in_memory_cache: dict[str, tuple[float, CacheValue]] = {}
_in_memory_lock = asyncio.Lock()
async def _cache_get(key: str) -> CacheValue | None:
if redis_client:
try:
val = await redis_client.get(key)
if val is None:
return None
return json.loads(val)
except Exception:
logging.exception("redis get failed")
return None
# fallback in-memory
async with _in_memory_lock:
entry = _in_memory_cache.get(key)
if not entry:
return None
expires_at, value = entry
if time.time() >= expires_at:
del _in_memory_cache[key]
return None
return value
async def _cache_set(key: str, value: CacheValue, ttl: int = CACHE_TTL_SECONDS) -> None:
if redis_client:
try:
await redis_client.set(key, json.dumps(value), ex=ttl)
return
except Exception:
logging.exception("redis set failed")
async with _in_memory_lock:
_in_memory_cache[key] = (time.time() + ttl, value)
# Redis lifecycle is handled by the lifespan context manager defined earlier
@app.get("/api/validate-signature")
async def validate_signature(signature: str = Query(...)) -> JSONResponse:
"""Validate a book signature and return total pages."""
# check cache first
cache_key = f"signature:{signature}"
cached = await _cache_get(cache_key)
if cached is not None:
return JSONResponse(cached)
async def validate_signature(signature: str = Query(...)):
"""Validate a book signature and return total pages"""
try:
book_result = _get_catalogue().get_book_with_data(signature)
if book_result and hasattr(book_result, "pages") and book_result.pages:
@@ -173,36 +47,31 @@ async def validate_signature(signature: str = Query(...)) -> JSONResponse:
match = re.search(r"(\d+)", pages_str)
if match:
total_pages = int(match.group(1))
result: CacheValue = {
"valid": True,
"total_pages": total_pages,
"signature": signature,
}
await _cache_set(cache_key, result)
return JSONResponse(result)
return JSONResponse(
{"valid": True, "total_pages": total_pages, "signature": signature},
)
result: CacheValue = {
"valid": False,
"error": "Signatur nicht gefunden oder keine Seitenzahl verfügbar",
"signature": signature,
}
await _cache_set(cache_key, result)
return JSONResponse(result)
return JSONResponse(
{
"valid": False,
"error": "Signatur nicht gefunden oder keine Seitenzahl verfügbar",
"signature": signature,
},
)
except Exception as e:
logging.exception("validate_signature failure")
result: CacheValue = {
"valid": False,
"error": f"Fehler bei der Validierung: {e!s}",
"signature": signature,
}
# store a failed response in cache as well so we avoid replaying errors
await _cache_set(cache_key, result)
return JSONResponse(result, status_code=500)
return JSONResponse(
{
"valid": False,
"error": f"Fehler bei der Validierung: {e!s}",
"signature": signature,
},
status_code=500,
)
@app.get("/health")
async def health_check() -> dict[str, str]:
"""Health check endpoint."""
async def health_check():
"""Health check endpoint"""
return {"status": "ok", "service": "signature-validation"}

View File

@@ -1,22 +1,21 @@
[project]
name = "semapform-api"
version = "0.2.0"
version = "0.1.2"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.13"
dependencies = [
"bibapi[catalogue]>=0.0.6",
"bibapi>=0.0.5",
"fastapi>=0.122.0",
"pip>=25.3",
"uvicorn>=0.38.0",
"redis>=4.6.0",
]
[[tool.uv.index]]
name = "gitea"
url = "https://git.theprivateserver.de/api/packages/PHB/pypi/simple/"
[tool.bumpversion]
current_version = "0.2.0"
current_version = "0.1.2"
parse = "(?P<major>\\d+)\\.(?P<minor>\\d+)\\.(?P<patch>\\d+)"
serialize = ["{major}.{minor}.{patch}"]
search = "{current_version}"