add __all__

This commit is contained in:
2025-11-01 21:32:28 +01:00
parent 4e0a19c973
commit b4108e4b36
3 changed files with 152 additions and 79 deletions

View File

@@ -1 +1,2 @@
__all__ = ["KomCache"]
from .cache import KomCache from .cache import KomCache

View File

@@ -1,11 +1,14 @@
from typing import Any import time
from sqlalchemy import create_engine, Column, String, Integer, Date, text from typing import Any, Tuple, Union
from sqlalchemy.orm import sessionmaker, declarative_base
from sqlalchemy.exc import SQLAlchemyError
from komcache.schemas.sqlite import CREATE_SQLITE_TABLES
from komcache.schemas.mariadb import CREATE_MARIADB_TABLES
from komconfig import KomConfig
import loguru import loguru
from komconfig import KomConfig
from sqlalchemy import Column, Date, Integer, String, create_engine, text
from sqlalchemy.exc import OperationalError, SQLAlchemyError
from sqlalchemy.orm import declarative_base, sessionmaker
from komcache.schemas.mariadb import CREATE_MARIADB_TABLES
from komcache.schemas.sqlite import CREATE_SQLITE_TABLES
log = loguru.logger log = loguru.logger
log.remove() log.remove()
@@ -28,6 +31,10 @@ class KomGrabber(Base):
complete = Column(Integer, nullable=False) complete = Column(Integer, nullable=False)
def protect(url: str) -> str:
return "mysql+pymysql://user:pass@host:3306/dbname"
class KomCache: class KomCache:
def __init__(self, db_path: str = ""): # Default to empty string if not provided def __init__(self, db_path: str = ""): # Default to empty string if not provided
self.db_path = db_path or config.cache.path self.db_path = db_path or config.cache.path
@@ -35,15 +42,24 @@ class KomCache:
if config.cache.mode == "local": if config.cache.mode == "local":
self.db_path = db_path or config.cache.path self.db_path = db_path or config.cache.path
log.debug(f"Cache path: {self.db_path}") log.debug(f"Cache path: {self.db_path}")
self.engine = create_engine(f"sqlite:///{self.db_path}") self.engine = create_engine(f"sqlite:///{self.db_path}", pool_pre_ping=True)
elif config.cache.mode == "remote": elif config.cache.mode == "remote":
db_url = ( db_url = (
config.cache.url config.cache.url
) # e.g., "mysql+pymysql://user:pass@host:3306/dbname" ) # e.g., "mysql+pymysql://user:pass@host:3306/dbname"
log.debug(f"Using remote DB URL: {db_url}")
self.engine = create_engine(db_url)
self.Session = sessionmaker(bind=self.engine) log.debug(f"Using remote DB URL: {protect(db_url)}")
self.engine = create_engine(
db_url,
pool_pre_ping=True,
pool_recycle=1800,
pool_size=5,
max_overflow=10,
pool_timeout=30,
connect_args={"connect_timeout": 10},
)
self.Session = sessionmaker(bind=self.engine, expire_on_commit=False)
# if tables do not exist, create them # if tables do not exist, create them
if config.cache.mode == "local": if config.cache.mode == "local":
if not self.query( if not self.query(
@@ -54,6 +70,29 @@ class KomCache:
if not self.query("SHOW TABLES LIKE 'komgrabber'"): if not self.query("SHOW TABLES LIKE 'komgrabber'"):
self.create_table() self.create_table()
def _run(self, fn_desc: str, callable_, retries: int = 2, *args, **kwargs):
attempt = 0
while True:
try:
return callable_(*args, **kwargs)
except OperationalError as e:
# MySQL server has gone away (2006) / Lost connection (2013)
if attempt < retries and any(
code in str(e.orig) for code in ("2006", "2013")
):
attempt += 1
wait = 1 * attempt
log.warning(
f"{fn_desc} failed due to connection loss (attempt {attempt}/{retries}). Retrying in {wait}s."
)
time.sleep(wait)
continue
log.error(f"{fn_desc} failed with OperationalError: {e}")
return None
except SQLAlchemyError as e:
log.error(f"{fn_desc} failed with SQLAlchemyError: {e}")
return None
def create_table(self): def create_table(self):
"""Ensure all tables are created in the database.""" """Ensure all tables are created in the database."""
if config.cache.mode == "local": if config.cache.mode == "local":
@@ -90,87 +129,110 @@ class KomCache:
return False return False
def query(self, query: str, args: dict[str, Any] = None): def query(self, query: str, args: dict[str, Any] = None):
"""Run an arbitrary SQL statement.
For SELECT (or other rowreturning) statements: returns list of rows.
For nonSELECT: executes and commits, returns [].
"""
if args is None: if args is None:
args = {} args = {}
try:
def _do():
session = self.Session() session = self.Session()
result = session.execute(text(query), args).fetchall() try:
session.close() result = session.execute(text(query), args)
return result # SQLAlchemy 1.4/2.0: result.returns_rows tells us if rows are present
except SQLAlchemyError as e: if getattr(result, "returns_rows", False):
log.error(f"Error executing query: {e}") rows = result.fetchall()
return rows
# Non-row statements: commit if they mutate
first = query.lstrip().split(None, 1)[0].upper()
if first in {
"INSERT",
"UPDATE",
"DELETE",
"REPLACE",
"ALTER",
"CREATE",
"DROP",
"TRUNCATE",
}:
session.commit()
return [] return []
finally:
session.close()
result = self._run("query", _do)
return result or []
def insert(self, query: str, args: dict[str, Any]) -> bool: def insert(self, query: str, args: dict[str, Any]) -> bool:
try: # (Optionally you can now just call self.query(query, args))
def _do():
session = self.Session() session = self.Session()
try:
session.execute(text(query), args) session.execute(text(query), args)
session.commit() session.commit()
session.close()
return True return True
except SQLAlchemyError as e: finally:
log.error(f"Error inserting data: {e}") session.close()
return False
return bool(self._run("insert", _do))
def update(self, query: str, args: dict[str, Any]) -> bool: def update(self, query: str, args: dict[str, Any]) -> bool:
try: def _do():
session = self.Session() session = self.Session()
try:
session.execute(text(query), args) session.execute(text(query), args)
session.commit() session.commit()
session.close()
return True return True
except SQLAlchemyError as e: finally:
log.error(f"Error updating data: {e}") session.close()
return False
return bool(self._run("update", _do))
def delete(self, query: str, args: dict[str, Any]) -> bool: def delete(self, query: str, args: dict[str, Any]) -> bool:
try: def _do():
session = self.Session() session = self.Session()
try:
session.execute(text(query), args) session.execute(text(query), args)
session.commit() session.commit()
session.close()
return True return True
except SQLAlchemyError as e: finally:
log.error(f"Error deleting data: {e}")
return False
def get_last_update_date(self, series_name: str) -> str:
try:
session = self.Session()
result = (
session.query(KomGrabber.last_update_date)
.filter_by(series_name=series_name)
.first()
)
session.close() session.close()
return result[0] if result else ""
except SQLAlchemyError as e:
log.error(f"Error fetching last update date: {e}")
return ""
def fetch_one(self, query: str, args: dict[str, Any] = None): return bool(self._run("delete", _do))
def fetch_one(self, query: str, args: dict[str, Any] = None) -> Union[Tuple, None]:
if args is None: if args is None:
args = {} args = {}
try:
def _do():
session = self.Session() session = self.Session()
result = session.execute(text(query), args).fetchone() try:
session.close() result = session.execute(text(query), args)
return result if getattr(result, "returns_rows", False):
except SQLAlchemyError as e: return result.fetchone()
log.error(f"Error executing query: {e}")
return None return None
finally:
session.close()
return self._run("fetch_one", _do)
def fetch_all(self, query: str, args: dict[str, Any] | None = None): def fetch_all(self, query: str, args: dict[str, Any] | None = None):
if args is None: if args is None:
args = {} args = {}
try:
def _do():
session = self.Session() session = self.Session()
result = session.execute(text(query), args).fetchall() try:
session.close() result = session.execute(text(query), args)
return result if getattr(result, "returns_rows", False):
except SQLAlchemyError as e: return result.fetchall()
log.error(f"Error executing query: {e}")
return [] return []
finally:
session.close()
result = self._run("fetch_all", _do)
return result or []
if __name__ == "__main__": if __name__ == "__main__":

View File

@@ -1,9 +1,13 @@
CREATE_MARIADB_TABLES = [""" CREATE_MARIADB_TABLES = [
"""
CREATE TABLE IF NOT EXISTS manga_requests ( CREATE TABLE IF NOT EXISTS manga_requests (
id INT AUTO_INCREMENT PRIMARY KEY, id INT AUTO_INCREMENT PRIMARY KEY,
manga_id INT, manga_id INT,
grabbed TINYINT(1) DEFAULT 0 grabbed TINYINT(1) DEFAULT 0,
);""","""CREATE TABLE IF NOT EXISTS komgrabber ( image TEXT NOT NULL,
title TEXT NOT NULL
);""",
"""CREATE TABLE IF NOT EXISTS komgrabber (
id INT AUTO_INCREMENT PRIMARY KEY, id INT AUTO_INCREMENT PRIMARY KEY,
name TEXT NOT NULL, name TEXT NOT NULL,
series_id TEXT NOT NULL, series_id TEXT NOT NULL,
@@ -12,12 +16,18 @@ CREATE TABLE IF NOT EXISTS manga_requests (
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
last_checked TIMESTAMP DEFAULT NULL, last_checked TIMESTAMP DEFAULT NULL,
completed TINYINT(1) DEFAULT 0 completed TINYINT(1) DEFAULT 0
);""","""CREATE TABLE IF NOT EXISTS komtagger ( );""",
"""CREATE TABLE IF NOT EXISTS komtagger (
id INT AUTO_INCREMENT PRIMARY KEY, id INT AUTO_INCREMENT PRIMARY KEY,
series_id TEXT NOT NULL, series_id TEXT NOT NULL,
title TEXT NOT NULL, title TEXT NOT NULL,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
last_checked TIMESTAMP DEFAULT NULL, last_checked DATETIME NOT NULL DEFAULT '1970-01-01 00:00:00',
status TEXT NOT NULL updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
UNIQUE KEY unique_series_id (series_id(255)),
status TEXT NOT NULL,
tag_status TEXT NOT NULL DEFAULT 'untagged',
anilist_id INT DEFAULT NULL
); );
"""] """,
]