feat: add migrations to create database and add / change features down the line
This commit is contained in:
@@ -10,8 +10,6 @@ from string import ascii_lowercase as lower
|
|||||||
from string import digits, punctuation
|
from string import digits, punctuation
|
||||||
from typing import Any, List, Optional, Tuple, Union
|
from typing import Any, List, Optional, Tuple, Union
|
||||||
|
|
||||||
import loguru
|
|
||||||
|
|
||||||
from src import DATABASE_DIR, settings
|
from src import DATABASE_DIR, settings
|
||||||
from src.backend.db import (
|
from src.backend.db import (
|
||||||
CREATE_ELSA_FILES_TABLE,
|
CREATE_ELSA_FILES_TABLE,
|
||||||
@@ -30,11 +28,9 @@ from src.errors import AppPresentError, NoResultError
|
|||||||
from src.logic import ELSA, Apparat, ApparatData, BookData, Prof
|
from src.logic import ELSA, Apparat, ApparatData, BookData, Prof
|
||||||
from src.logic.constants import SEMAP_MEDIA_ACCOUNTS
|
from src.logic.constants import SEMAP_MEDIA_ACCOUNTS
|
||||||
from src.logic.semester import Semester
|
from src.logic.semester import Semester
|
||||||
|
from src.shared.logging import log
|
||||||
from src.utils.blob import create_blob
|
from src.utils.blob import create_blob
|
||||||
|
|
||||||
log = loguru.logger
|
|
||||||
|
|
||||||
|
|
||||||
ascii_lowercase = lower + digits + punctuation
|
ascii_lowercase = lower + digits + punctuation
|
||||||
|
|
||||||
|
|
||||||
@@ -123,6 +119,66 @@ class Database:
|
|||||||
if not self.db_initialized:
|
if not self.db_initialized:
|
||||||
self.checkDatabaseStatus()
|
self.checkDatabaseStatus()
|
||||||
self.db_initialized = True
|
self.db_initialized = True
|
||||||
|
# run migrations after initial creation to bring schema up-to-date
|
||||||
|
try:
|
||||||
|
if self.db_path is not None:
|
||||||
|
self.run_migrations()
|
||||||
|
except Exception as e:
|
||||||
|
log.error(f"Error while running migrations: {e}")
|
||||||
|
|
||||||
|
# --- Migration helpers integrated into Database ---
|
||||||
|
def _ensure_migrations_table(self, conn: sql.Connection) -> None:
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute(
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS schema_migrations (
|
||||||
|
id TEXT PRIMARY KEY,
|
||||||
|
applied_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
def _applied_migrations(self, conn: sql.Connection) -> List[str]:
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute("SELECT id FROM schema_migrations ORDER BY id")
|
||||||
|
rows = cursor.fetchall()
|
||||||
|
return [r[0] for r in rows]
|
||||||
|
|
||||||
|
def _apply_sql_file(self, conn: sql.Connection, path: Path) -> None:
|
||||||
|
log.info(f"Applying migration {path.name}")
|
||||||
|
sql_text = path.read_text(encoding="utf-8")
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.executescript(sql_text)
|
||||||
|
cursor.execute(
|
||||||
|
"INSERT OR REPLACE INTO schema_migrations (id) VALUES (?)", (path.name,)
|
||||||
|
)
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
def run_migrations(self) -> None:
|
||||||
|
"""Apply unapplied .sql migrations from src/backend/migrations using this Database's connection."""
|
||||||
|
migrations_dir = Path(__file__).parent / "migrations"
|
||||||
|
if not migrations_dir.exists():
|
||||||
|
log.debug("Migrations directory does not exist, skipping migrations")
|
||||||
|
return
|
||||||
|
|
||||||
|
conn = self.connect()
|
||||||
|
try:
|
||||||
|
self._ensure_migrations_table(conn)
|
||||||
|
applied = set(self._applied_migrations(conn))
|
||||||
|
|
||||||
|
migration_files = sorted(
|
||||||
|
[p for p in migrations_dir.iterdir() if p.suffix == ".sql"]
|
||||||
|
)
|
||||||
|
for m in migration_files:
|
||||||
|
if m.name in applied:
|
||||||
|
log.debug(f"Skipping already applied migration {m.name}")
|
||||||
|
continue
|
||||||
|
self._apply_sql_file(conn, m)
|
||||||
|
finally:
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
# --- end migration helpers ---
|
||||||
|
|
||||||
def overwritePath(self, new_db_path: str):
|
def overwritePath(self, new_db_path: str):
|
||||||
log.debug("got new path, overwriting")
|
log.debug("got new path, overwriting")
|
||||||
@@ -204,39 +260,10 @@ class Database:
|
|||||||
"""
|
"""
|
||||||
Create the tables in the database
|
Create the tables in the database
|
||||||
"""
|
"""
|
||||||
conn = self.connect()
|
# Bootstrapping of tables is handled via migrations. Run migrations instead
|
||||||
cursor = conn.cursor()
|
# of executing the hard-coded DDL here. Migrations are idempotent and
|
||||||
cursor.execute(CREATE_TABLE_APPARAT)
|
# contain the CREATE TABLE IF NOT EXISTS statements.
|
||||||
cursor.execute(CREATE_TABLE_MESSAGES)
|
self.run_migrations()
|
||||||
cursor.execute(CREATE_TABLE_MEDIA)
|
|
||||||
cursor.execute(CREATE_TABLE_FILES)
|
|
||||||
cursor.execute(CREATE_TABLE_PROF)
|
|
||||||
cursor.execute(CREATE_TABLE_USER)
|
|
||||||
cursor.execute(CREATE_TABLE_SUBJECTS)
|
|
||||||
cursor.execute(CREATE_ELSA_TABLE)
|
|
||||||
cursor.execute(CREATE_ELSA_FILES_TABLE)
|
|
||||||
cursor.execute(CREATE_ELSA_MEDIA_TABLE)
|
|
||||||
# Helpful indices to speed up frequent lookups and joins
|
|
||||||
cursor.execute(
|
|
||||||
"CREATE INDEX IF NOT EXISTS idx_media_app_prof ON media(app_id, prof_id);"
|
|
||||||
)
|
|
||||||
cursor.execute(
|
|
||||||
"CREATE INDEX IF NOT EXISTS idx_media_deleted ON media(deleted);"
|
|
||||||
)
|
|
||||||
cursor.execute(
|
|
||||||
"CREATE INDEX IF NOT EXISTS idx_media_available ON media(available);"
|
|
||||||
)
|
|
||||||
cursor.execute(
|
|
||||||
"CREATE INDEX IF NOT EXISTS idx_messages_remind_at ON messages(remind_at);"
|
|
||||||
)
|
|
||||||
cursor.execute(
|
|
||||||
"CREATE INDEX IF NOT EXISTS idx_semesterapparat_prof ON semesterapparat(prof_id);"
|
|
||||||
)
|
|
||||||
cursor.execute(
|
|
||||||
"CREATE INDEX IF NOT EXISTS idx_semesterapparat_appnr ON semesterapparat(appnr);"
|
|
||||||
)
|
|
||||||
conn.commit()
|
|
||||||
self.close_connection(conn)
|
|
||||||
|
|
||||||
def insertInto(self, query: str, params: Tuple) -> None:
|
def insertInto(self, query: str, params: Tuple) -> None:
|
||||||
"""
|
"""
|
||||||
|
|||||||
68
src/backend/migration_runner.py
Normal file
68
src/backend/migration_runner.py
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
import os
|
||||||
|
import sqlite3 as sql
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from src import DATABASE_DIR, settings
|
||||||
|
from src.shared.logging import log
|
||||||
|
|
||||||
|
MIGRATIONS_DIR = Path(__file__).parent / "migrations"
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_migrations_table(conn: sql.Connection) -> None:
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute(
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS schema_migrations (
|
||||||
|
id TEXT PRIMARY KEY,
|
||||||
|
applied_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def _applied_migrations(conn: sql.Connection) -> List[str]:
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.execute("SELECT id FROM schema_migrations ORDER BY id")
|
||||||
|
rows = cursor.fetchall()
|
||||||
|
return [r[0] for r in rows]
|
||||||
|
|
||||||
|
|
||||||
|
def _apply_sql_file(conn: sql.Connection, path: Path) -> None:
|
||||||
|
log.info(f"Applying migration {path.name}")
|
||||||
|
sql_text = path.read_text(encoding="utf-8")
|
||||||
|
cursor = conn.cursor()
|
||||||
|
cursor.executescript(sql_text)
|
||||||
|
cursor.execute(
|
||||||
|
"INSERT OR REPLACE INTO schema_migrations (id) VALUES (?)", (path.name,)
|
||||||
|
)
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations(db_path: Path) -> None:
|
||||||
|
"""Run all unapplied migrations from the migrations directory against the database at db_path."""
|
||||||
|
if not MIGRATIONS_DIR.exists():
|
||||||
|
log.debug("Migrations directory does not exist, skipping migrations")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Ensure database directory exists
|
||||||
|
db_dir = settings.database.path or Path(DATABASE_DIR)
|
||||||
|
if not db_dir.exists():
|
||||||
|
os.makedirs(db_dir, exist_ok=True)
|
||||||
|
|
||||||
|
conn = sql.connect(db_path)
|
||||||
|
try:
|
||||||
|
_ensure_migrations_table(conn)
|
||||||
|
applied = set(_applied_migrations(conn))
|
||||||
|
|
||||||
|
migration_files = sorted(
|
||||||
|
[p for p in MIGRATIONS_DIR.iterdir() if p.suffix in (".sql",)]
|
||||||
|
)
|
||||||
|
for m in migration_files:
|
||||||
|
if m.name in applied:
|
||||||
|
log.debug(f"Skipping already applied migration {m.name}")
|
||||||
|
continue
|
||||||
|
_apply_sql_file(conn, m)
|
||||||
|
finally:
|
||||||
|
conn.close()
|
||||||
132
src/backend/migrations/V001__create_base_tables.sql
Normal file
132
src/backend/migrations/V001__create_base_tables.sql
Normal file
@@ -0,0 +1,132 @@
|
|||||||
|
BEGIN TRANSACTION;
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS semesterapparat (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||||
|
name TEXT,
|
||||||
|
prof_id INTEGER,
|
||||||
|
fach TEXT,
|
||||||
|
appnr INTEGER,
|
||||||
|
erstellsemester TEXT,
|
||||||
|
verlängert_am TEXT,
|
||||||
|
dauer BOOLEAN,
|
||||||
|
verlängerung_bis TEXT,
|
||||||
|
deletion_status INTEGER,
|
||||||
|
deleted_date TEXT,
|
||||||
|
apparat_id_adis INTEGER,
|
||||||
|
prof_id_adis INTEGER,
|
||||||
|
konto INTEGER,
|
||||||
|
FOREIGN KEY (prof_id) REFERENCES prof (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS media (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||||
|
bookdata TEXT,
|
||||||
|
app_id INTEGER,
|
||||||
|
prof_id INTEGER,
|
||||||
|
deleted INTEGER DEFAULT (0),
|
||||||
|
available BOOLEAN,
|
||||||
|
reservation BOOLEAN,
|
||||||
|
FOREIGN KEY (prof_id) REFERENCES prof (id),
|
||||||
|
FOREIGN KEY (app_id) REFERENCES semesterapparat (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS files (
|
||||||
|
id INTEGER PRIMARY KEY,
|
||||||
|
filename TEXT,
|
||||||
|
fileblob BLOB,
|
||||||
|
app_id INTEGER,
|
||||||
|
filetyp TEXT,
|
||||||
|
prof_id INTEGER REFERENCES prof (id),
|
||||||
|
FOREIGN KEY (app_id) REFERENCES semesterapparat (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS messages (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||||
|
created_at date NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
message TEXT NOT NULL,
|
||||||
|
remind_at date NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
user_id INTEGER NOT NULL,
|
||||||
|
appnr INTEGER,
|
||||||
|
FOREIGN KEY (user_id) REFERENCES user (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS prof (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||||
|
titel TEXT,
|
||||||
|
fname TEXT,
|
||||||
|
lname TEXT,
|
||||||
|
fullname TEXT NOT NULL UNIQUE,
|
||||||
|
mail TEXT,
|
||||||
|
telnr TEXT
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS user (
|
||||||
|
id integer NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||||
|
created_at datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
username TEXT NOT NULL UNIQUE,
|
||||||
|
password TEXT NOT NULL,
|
||||||
|
salt TEXT NOT NULL,
|
||||||
|
role TEXT NOT NULL,
|
||||||
|
email TEXT UNIQUE,
|
||||||
|
name TEXT
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS subjects (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||||
|
name TEXT NOT NULL UNIQUE
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS elsa (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||||
|
date TEXT NOT NULL,
|
||||||
|
semester TEXT NOT NULL,
|
||||||
|
prof_id INTEGER NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS elsa_files (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||||
|
filename TEXT NOT NULL,
|
||||||
|
fileblob BLOB NOT NULL,
|
||||||
|
elsa_id INTEGER NOT NULL,
|
||||||
|
filetyp TEXT NOT NULL,
|
||||||
|
FOREIGN KEY (elsa_id) REFERENCES elsa (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS elsa_media (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||||
|
work_author TEXT,
|
||||||
|
section_author TEXT,
|
||||||
|
year TEXT,
|
||||||
|
edition TEXT,
|
||||||
|
work_title TEXT,
|
||||||
|
chapter_title TEXT,
|
||||||
|
location TEXT,
|
||||||
|
publisher TEXT,
|
||||||
|
signature TEXT,
|
||||||
|
issue TEXT,
|
||||||
|
pages TEXT,
|
||||||
|
isbn TEXT,
|
||||||
|
type TEXT,
|
||||||
|
elsa_id INTEGER NOT NULL,
|
||||||
|
FOREIGN KEY (elsa_id) REFERENCES elsa (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS neweditions (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||||
|
new_bookdata TEXT,
|
||||||
|
old_edition_id INTEGER,
|
||||||
|
for_apparat INTEGER,
|
||||||
|
ordered BOOLEAN DEFAULT (0),
|
||||||
|
FOREIGN KEY (old_edition_id) REFERENCES media (id),
|
||||||
|
FOREIGN KEY (for_apparat) REFERENCES semesterapparat (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Helpful indices to speed up frequent lookups and joins
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_media_app_prof ON media(app_id, prof_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_media_deleted ON media(deleted);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_media_available ON media(available);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_messages_remind_at ON messages(remind_at);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_semesterapparat_prof ON semesterapparat(prof_id);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_semesterapparat_appnr ON semesterapparat(appnr);
|
||||||
|
|
||||||
|
COMMIT;
|
||||||
Reference in New Issue
Block a user