Merge pull request 'feat/database-ktrl_nr_daia_integration' (#19) from feat/database-ktrl_nr_daia_integration into dev

Reviewed-on: #19
This commit was merged in pull request #19.
This commit is contained in:
2025-10-21 14:28:06 +01:00
14 changed files with 588 additions and 47 deletions

View File

@@ -10,8 +10,6 @@ from string import ascii_lowercase as lower
from string import digits, punctuation
from typing import Any, List, Optional, Tuple, Union
import loguru
from src import DATABASE_DIR, settings
from src.backend.db import (
CREATE_ELSA_FILES_TABLE,
@@ -30,11 +28,9 @@ from src.errors import AppPresentError, NoResultError
from src.logic import ELSA, Apparat, ApparatData, BookData, Prof
from src.logic.constants import SEMAP_MEDIA_ACCOUNTS
from src.logic.semester import Semester
from src.shared.logging import log
from src.utils.blob import create_blob
log = loguru.logger
ascii_lowercase = lower + digits + punctuation
@@ -123,6 +119,66 @@ class Database:
if not self.db_initialized:
self.checkDatabaseStatus()
self.db_initialized = True
# run migrations after initial creation to bring schema up-to-date
try:
if self.db_path is not None:
self.run_migrations()
except Exception as e:
log.error(f"Error while running migrations: {e}")
# --- Migration helpers integrated into Database ---
def _ensure_migrations_table(self, conn: sql.Connection) -> None:
cursor = conn.cursor()
cursor.execute(
"""
CREATE TABLE IF NOT EXISTS schema_migrations (
id TEXT PRIMARY KEY,
applied_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
)
"""
)
conn.commit()
def _applied_migrations(self, conn: sql.Connection) -> List[str]:
cursor = conn.cursor()
cursor.execute("SELECT id FROM schema_migrations ORDER BY id")
rows = cursor.fetchall()
return [r[0] for r in rows]
def _apply_sql_file(self, conn: sql.Connection, path: Path) -> None:
log.info(f"Applying migration {path.name}")
sql_text = path.read_text(encoding="utf-8")
cursor = conn.cursor()
cursor.executescript(sql_text)
cursor.execute(
"INSERT OR REPLACE INTO schema_migrations (id) VALUES (?)", (path.name,)
)
conn.commit()
def run_migrations(self) -> None:
"""Apply unapplied .sql migrations from src/backend/migrations using this Database's connection."""
migrations_dir = Path(__file__).parent / "migrations"
if not migrations_dir.exists():
log.debug("Migrations directory does not exist, skipping migrations")
return
conn = self.connect()
try:
self._ensure_migrations_table(conn)
applied = set(self._applied_migrations(conn))
migration_files = sorted(
[p for p in migrations_dir.iterdir() if p.suffix == ".sql"]
)
for m in migration_files:
if m.name in applied:
log.debug(f"Skipping already applied migration {m.name}")
continue
self._apply_sql_file(conn, m)
finally:
conn.close()
# --- end migration helpers ---
def overwritePath(self, new_db_path: str):
log.debug("got new path, overwriting")
@@ -204,39 +260,10 @@ class Database:
"""
Create the tables in the database
"""
conn = self.connect()
cursor = conn.cursor()
cursor.execute(CREATE_TABLE_APPARAT)
cursor.execute(CREATE_TABLE_MESSAGES)
cursor.execute(CREATE_TABLE_MEDIA)
cursor.execute(CREATE_TABLE_FILES)
cursor.execute(CREATE_TABLE_PROF)
cursor.execute(CREATE_TABLE_USER)
cursor.execute(CREATE_TABLE_SUBJECTS)
cursor.execute(CREATE_ELSA_TABLE)
cursor.execute(CREATE_ELSA_FILES_TABLE)
cursor.execute(CREATE_ELSA_MEDIA_TABLE)
# Helpful indices to speed up frequent lookups and joins
cursor.execute(
"CREATE INDEX IF NOT EXISTS idx_media_app_prof ON media(app_id, prof_id);"
)
cursor.execute(
"CREATE INDEX IF NOT EXISTS idx_media_deleted ON media(deleted);"
)
cursor.execute(
"CREATE INDEX IF NOT EXISTS idx_media_available ON media(available);"
)
cursor.execute(
"CREATE INDEX IF NOT EXISTS idx_messages_remind_at ON messages(remind_at);"
)
cursor.execute(
"CREATE INDEX IF NOT EXISTS idx_semesterapparat_prof ON semesterapparat(prof_id);"
)
cursor.execute(
"CREATE INDEX IF NOT EXISTS idx_semesterapparat_appnr ON semesterapparat(appnr);"
)
conn.commit()
self.close_connection(conn)
# Bootstrapping of tables is handled via migrations. Run migrations instead
# of executing the hard-coded DDL here. Migrations are idempotent and
# contain the CREATE TABLE IF NOT EXISTS statements.
self.run_migrations()
def insertInto(self, query: str, params: Tuple) -> None:
"""
@@ -253,6 +280,21 @@ class Database:
conn.commit()
self.close_connection(conn)
def getWebADISAuth(self) -> Tuple[str, str]:
"""
Get the WebADIS authentication data from the database
Returns:
Tuple[str, str]: The username and password for WebADIS
"""
result = self.query_db(
"SELECT username, password FROM webadis_login WHERE effective_range='SAP'",
one=True,
)
if result is None:
return ("", "")
return (result[0], result[1])
@log.catch
def query_db(
self,

View File

@@ -0,0 +1,68 @@
import os
import sqlite3 as sql
from pathlib import Path
from typing import List
from src import DATABASE_DIR, settings
from src.shared.logging import log
MIGRATIONS_DIR = Path(__file__).parent / "migrations"
def _ensure_migrations_table(conn: sql.Connection) -> None:
cursor = conn.cursor()
cursor.execute(
"""
CREATE TABLE IF NOT EXISTS schema_migrations (
id TEXT PRIMARY KEY,
applied_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
)
"""
)
conn.commit()
def _applied_migrations(conn: sql.Connection) -> List[str]:
cursor = conn.cursor()
cursor.execute("SELECT id FROM schema_migrations ORDER BY id")
rows = cursor.fetchall()
return [r[0] for r in rows]
def _apply_sql_file(conn: sql.Connection, path: Path) -> None:
log.info(f"Applying migration {path.name}")
sql_text = path.read_text(encoding="utf-8")
cursor = conn.cursor()
cursor.executescript(sql_text)
cursor.execute(
"INSERT OR REPLACE INTO schema_migrations (id) VALUES (?)", (path.name,)
)
conn.commit()
def run_migrations(db_path: Path) -> None:
"""Run all unapplied migrations from the migrations directory against the database at db_path."""
if not MIGRATIONS_DIR.exists():
log.debug("Migrations directory does not exist, skipping migrations")
return
# Ensure database directory exists
db_dir = settings.database.path or Path(DATABASE_DIR)
if not db_dir.exists():
os.makedirs(db_dir, exist_ok=True)
conn = sql.connect(db_path)
try:
_ensure_migrations_table(conn)
applied = set(_applied_migrations(conn))
migration_files = sorted(
[p for p in MIGRATIONS_DIR.iterdir() if p.suffix in (".sql",)]
)
for m in migration_files:
if m.name in applied:
log.debug(f"Skipping already applied migration {m.name}")
continue
_apply_sql_file(conn, m)
finally:
conn.close()

View File

@@ -0,0 +1,132 @@
BEGIN TRANSACTION;
CREATE TABLE IF NOT EXISTS semesterapparat (
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
name TEXT,
prof_id INTEGER,
fach TEXT,
appnr INTEGER,
erstellsemester TEXT,
verlängert_am TEXT,
dauer BOOLEAN,
verlängerung_bis TEXT,
deletion_status INTEGER,
deleted_date TEXT,
apparat_id_adis INTEGER,
prof_id_adis INTEGER,
konto INTEGER,
FOREIGN KEY (prof_id) REFERENCES prof (id)
);
CREATE TABLE IF NOT EXISTS media (
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
bookdata TEXT,
app_id INTEGER,
prof_id INTEGER,
deleted INTEGER DEFAULT (0),
available BOOLEAN,
reservation BOOLEAN,
FOREIGN KEY (prof_id) REFERENCES prof (id),
FOREIGN KEY (app_id) REFERENCES semesterapparat (id)
);
CREATE TABLE IF NOT EXISTS files (
id INTEGER PRIMARY KEY,
filename TEXT,
fileblob BLOB,
app_id INTEGER,
filetyp TEXT,
prof_id INTEGER REFERENCES prof (id),
FOREIGN KEY (app_id) REFERENCES semesterapparat (id)
);
CREATE TABLE IF NOT EXISTS messages (
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
created_at date NOT NULL DEFAULT CURRENT_TIMESTAMP,
message TEXT NOT NULL,
remind_at date NOT NULL DEFAULT CURRENT_TIMESTAMP,
user_id INTEGER NOT NULL,
appnr INTEGER,
FOREIGN KEY (user_id) REFERENCES user (id)
);
CREATE TABLE IF NOT EXISTS prof (
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
titel TEXT,
fname TEXT,
lname TEXT,
fullname TEXT NOT NULL UNIQUE,
mail TEXT,
telnr TEXT
);
CREATE TABLE IF NOT EXISTS user (
id integer NOT NULL PRIMARY KEY AUTOINCREMENT,
created_at datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
username TEXT NOT NULL UNIQUE,
password TEXT NOT NULL,
salt TEXT NOT NULL,
role TEXT NOT NULL,
email TEXT UNIQUE,
name TEXT
);
CREATE TABLE IF NOT EXISTS subjects (
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
name TEXT NOT NULL UNIQUE
);
CREATE TABLE IF NOT EXISTS elsa (
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
date TEXT NOT NULL,
semester TEXT NOT NULL,
prof_id INTEGER NOT NULL
);
CREATE TABLE IF NOT EXISTS elsa_files (
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
filename TEXT NOT NULL,
fileblob BLOB NOT NULL,
elsa_id INTEGER NOT NULL,
filetyp TEXT NOT NULL,
FOREIGN KEY (elsa_id) REFERENCES elsa (id)
);
CREATE TABLE IF NOT EXISTS elsa_media (
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
work_author TEXT,
section_author TEXT,
year TEXT,
edition TEXT,
work_title TEXT,
chapter_title TEXT,
location TEXT,
publisher TEXT,
signature TEXT,
issue TEXT,
pages TEXT,
isbn TEXT,
type TEXT,
elsa_id INTEGER NOT NULL,
FOREIGN KEY (elsa_id) REFERENCES elsa (id)
);
CREATE TABLE IF NOT EXISTS neweditions (
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
new_bookdata TEXT,
old_edition_id INTEGER,
for_apparat INTEGER,
ordered BOOLEAN DEFAULT (0),
FOREIGN KEY (old_edition_id) REFERENCES media (id),
FOREIGN KEY (for_apparat) REFERENCES semesterapparat (id)
);
-- Helpful indices to speed up frequent lookups and joins
CREATE INDEX IF NOT EXISTS idx_media_app_prof ON media(app_id, prof_id);
CREATE INDEX IF NOT EXISTS idx_media_deleted ON media(deleted);
CREATE INDEX IF NOT EXISTS idx_media_available ON media(available);
CREATE INDEX IF NOT EXISTS idx_messages_remind_at ON messages(remind_at);
CREATE INDEX IF NOT EXISTS idx_semesterapparat_prof ON semesterapparat(prof_id);
CREATE INDEX IF NOT EXISTS idx_semesterapparat_appnr ON semesterapparat(appnr);
COMMIT;

View File

@@ -0,0 +1,10 @@
BEGIN TRANSACTION;
CREATE TABLE IF NOT EXISTS webadis_login (
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
username TEXT NOT NULL,
password TEXT NOT NULL
);
COMMIT;

View File

@@ -0,0 +1,6 @@
BEGIN TRANSACTION;
ALTER TABLE webadis_login
ADD COLUMN effective_range TEXT;
COMMIT;

View File

@@ -3,7 +3,8 @@ from PySide6.QtCore import QThread
from PySide6.QtCore import Signal as Signal
from src.backend.database import Database
from src.logic.webrequest import BibTextTransformer, WebRequest
from src.backend.webadis import get_book_medianr
from src.logic.webrequest import BibTextTransformer, TransformerType, WebRequest
from src.shared.logging import log
@@ -37,7 +38,7 @@ class AvailChecker(QThread):
)
# Pre-create reusable request and transformer to avoid per-item overhead
self._request = WebRequest().set_apparat(self.appnumber)
self._rds_transformer = BibTextTransformer("RDS")
self._rds_transformer = BibTextTransformer(TransformerType.RDS)
def run(self):
self.db = Database()
@@ -65,6 +66,12 @@ class AvailChecker(QThread):
break
log.info(f"State of {link}: " + str(state))
# #print("Updating availability of " + str(book_id) + " to " + str(state))
# use get_book_medianr to update the medianr of the book in the database
auth = self.db.getWebADISAuth
medianr = get_book_medianr(rds.items[0].callnumber, self.appnumber, auth)
book_data = book["bookdata"]
book_data.medianr = medianr
self.db.updateBookdata(book["id"], book_data)
self.db.setAvailability(book_id, state)
count += 1
self.updateProgress.emit(count, len(self.links))

35
src/backend/webadis.py Normal file
View File

@@ -0,0 +1,35 @@
from playwright.sync_api import sync_playwright
def get_book_medianr(signature: str, semesterapparat_nr: int, auth: tuple) -> str:
with sync_playwright() as playwright:
browser = playwright.chromium.launch(headless=True)
context = browser.new_context()
page = context.new_page()
page.goto(
"https://bsz.ibs-bw.de:22998/aDISWeb/app?service=direct/0/Home/$DirectLink&sp=SDAP42"
)
page.get_by_role("textbox", name="Benutzer").fill(auth[0])
page.get_by_role("textbox", name="Benutzer").press("Tab")
page.get_by_role("textbox", name="Kennwort").fill(auth[1])
page.get_by_role("textbox", name="Kennwort").press("Enter")
page.get_by_role("button", name="Katalog").click()
page.get_by_role("textbox", name="Signatur").click()
page.get_by_role("textbox", name="Signatur").fill(signature)
page.get_by_role("textbox", name="Signatur").press("Enter")
book_list = page.locator("iframe").content_frame.get_by_role(
"cell", name="Bibliothek der Pädagogischen"
)
# this will always find one result, we need to split the resulting text based on the entries that start with "* "
book_entries = book_list.inner_text().split("\n")
books = []
for entry in book_entries:
if entry.startswith("* "):
books.append(entry)
for book in books:
if f"Semesterapparat: {semesterapparat_nr}" in book:
return book.split("* ")[1].split(":")[0]
# ---------------------
context.close()
browser.close()

View File

@@ -80,6 +80,7 @@ class BookData:
old_book: Any | None = None
media_type: str | None = None #
in_library: bool | None = None # whether the book is in the library or not
medianr: int | None = None # Media number in the library system
def __post_init__(self):
self.library_location = (

View File

@@ -1,3 +1,4 @@
from enum import Enum
from typing import Any, Optional, Union
import requests
@@ -33,6 +34,14 @@ RATE_LIMIT = 20
RATE_PERIOD = 30
class TransformerType(Enum):
ARRAY = "ARRAY"
COinS = "COinS"
BibTeX = "BibTeX"
RIS = "RIS"
RDS = "RDS"
class WebRequest:
def __init__(self) -> None:
"""Request data from the web, and format it depending on the mode."""
@@ -185,10 +194,16 @@ class BibTextTransformer:
ValueError: Raised if mode is not in valid_modes
"""
valid_modes = ["ARRAY", "COinS", "BibTeX", "RIS", "RDS"]
valid_modes = [
TransformerType.ARRAY,
TransformerType.COinS,
TransformerType.BibTeX,
TransformerType.RIS,
TransformerType.RDS,
]
def __init__(self, mode: str = "ARRAY") -> None:
self.mode = mode
def __init__(self, mode: TransformerType = TransformerType.ARRAY) -> None:
self.mode = mode.value
self.field = None
self.signature = None
if mode not in self.valid_modes:

View File

@@ -2,14 +2,15 @@ from __future__ import annotations
import json
import re
import sys
from dataclasses import dataclass
from dataclasses import field as dataclass_field
from typing import Any, List
import loguru
from src import LOG_DIR
from src.logic.dataclass import BookData
import loguru
import sys
log = loguru.logger
log.remove()
@@ -36,6 +37,7 @@ class Item:
department: str | None = dataclass_field(default_factory=str)
locationhref: str | None = dataclass_field(default_factory=str)
location: str | None = dataclass_field(default_factory=str)
ktrl_nr: str | None = dataclass_field(default_factory=str)
def from_dict(self, data: dict):
"""Import data from dict"""
@@ -382,6 +384,8 @@ class RDSData:
def transform(self, data: str):
# rds_availability = RDS_AVAIL_DATA()
# rds_data = RDS_GENERIC_DATA()
print(data)
def __get_raw_data(data: str) -> list:
# create base data to be turned into pydantic classes
data = data.split("RDS ----------------------------------")[1]

View File

@@ -1,14 +1,133 @@
import os
import time
from concurrent.futures import ThreadPoolExecutor
from queue import Empty, Queue
from PySide6 import QtCore, QtWidgets
from PySide6.QtMultimedia import QAudioOutput, QMediaPlayer
from src.backend.catalogue import Catalogue
from src.backend.database import Database
from src.backend.webadis import get_book_medianr
from src.logic.SRU import SWB
from src.shared.logging import log
from .widget_sources.admin_update_signatures_ui import Ui_Dialog
class MedianrThread(QtCore.QThread):
progress = QtCore.Signal(int)
currtot = QtCore.Signal(int, int)
def __init__(self, books=None, thread_count=6):
super().__init__()
self.books = books or []
self.total = 0
# Database instances are not always thread-safe across threads; create one per worker.
# Keep a shared auth that is safe to read.
db_main = Database()
self.auth = db_main.getWebADISAuth()
if self.auth == ("", ""):
raise ValueError("WebADIS authentication not set in database.")
self.thread_count = max(1, thread_count)
self._stop_requested = False
def run(self):
# Use ThreadPoolExecutor to parallelize I/O-bound tasks.
total_books = len(self.books)
if total_books == 0:
log.debug("MedianrThread: no books to process")
return
chunk_size = (total_books + self.thread_count - 1) // self.thread_count
chunks = [
self.books[i : i + chunk_size] for i in range(0, total_books, chunk_size)
]
# queue for worker results and progress
q = Queue()
def medianrworker(chunk: list[dict]):
# Each worker creates its own Database instance for thread-safety
db = Database()
for book in chunk:
if self._stop_requested:
break
try:
booknr = get_book_medianr(
book["bookdata"].signature,
db.getApparatNrByBookId(book["id"]),
self.auth,
)
log.debug(
f"Book ID {book['id']} - Signature {book['bookdata'].signature} - Medianr {booknr}"
)
book_data = book["bookdata"]
book_data.medianr = booknr
db.updateBookdata(book["id"], book_data)
q.put(("progress", 1))
except Exception as e:
log.error(f"Medianr worker error for book {book}: {e}")
q.put(("progress", 1))
time.sleep(10)
q.put(("done", None))
processed = 0
finished_workers = 0
with ThreadPoolExecutor(max_workers=len(chunks)) as ex:
futures = [ex.submit(medianrworker, ch) for ch in chunks]
log.info(
f"Launched {len(futures)} worker thread(s) for {total_books} entries: {[len(ch) for ch in chunks]} entries per thread."
)
# aggregate progress
while finished_workers < len(chunks):
try:
kind, payload = q.get(timeout=0.1)
except Empty:
continue
if kind == "progress":
processed += int(payload)
self.progress.emit(processed)
# emit currtot with processed and current chunk total as best-effort
self.currtot.emit(processed, total_books)
elif kind == "done":
finished_workers += 1
# ensure final progress reached total_books
self.progress.emit(total_books)
self.currtot.emit(total_books, total_books)
def stop(self):
"""Request the thread to stop early."""
self._stop_requested = True
def process_chunk(self, books_chunk):
# kept for backward compatibility but not used by run(); still usable externally
db = Database()
for index, book in enumerate(books_chunk):
try:
booknr = get_book_medianr(
book["bookdata"].signature,
db.getApparatNrByBookId(book["id"]),
self.auth,
)
log.debug(
f"Book ID {book['id']} - Signature {book['bookdata'].signature} - Medianr {booknr}"
)
book_data = book["bookdata"]
book_data.medianr = booknr
db.updateBookdata(book["id"], book_data)
except Exception as e:
log.error(f"Medianr process_chunk error for book {book}: {e}")
self.progress.emit(index + 1)
self.currtot.emit(self.total + 1, len(books_chunk))
self.total += 1
class UpdaterThread(QtCore.QThread):
progress = QtCore.Signal(int)
currtot = QtCore.Signal(int, int)
@@ -98,6 +217,8 @@ class UpdateSignatures(QtWidgets.QDialog, Ui_Dialog):
self.catalogue = Catalogue()
self.player = QMediaPlayer()
self.audio_output = QAudioOutput()
self.spin_thread_count.setMaximum(os.cpu_count())
self.btn_add_medianr.clicked.connect(self.add_medianr)
def play_sound(self, sound_file: str):
self.player.setAudioOutput(self.audio_output)
@@ -114,6 +235,16 @@ class UpdateSignatures(QtWidgets.QDialog, Ui_Dialog):
self.updater.finished.connect(self.updater.deleteLater)
self.updater.start()
def add_medianr(self):
books = self.db.getAllBooks()
books = [book for book in books if book["bookdata"].medianr is None]
total_books = len(books)
self.progressBar.setMaximum(total_books)
self.medianr_thread = MedianrThread(books, self.spin_thread_count.value())
self.medianr_thread.progress.connect(self.update_progress)
self.medianr_thread.finished.connect(self.medianr_thread.deleteLater)
self.medianr_thread.start()
def add_missing(self):
books = self.db.getAllBooks()
total_books = len(books)

View File

@@ -30,6 +30,47 @@
</property>
</widget>
</item>
<item row="1" column="1">
<layout class="QFormLayout" name="formLayout_2">
<item row="0" column="0">
<widget class="QLabel" name="label">
<property name="text">
<string>Anzahl Parraleler Aktionen</string>
</property>
</widget>
</item>
<item row="1" column="1">
<widget class="QPushButton" name="btn_add_medianr">
<property name="text">
<string>Mediennummern ergänzen</string>
</property>
</widget>
</item>
<item row="0" column="1">
<widget class="QSpinBox" name="spin_thread_count">
<property name="minimum">
<number>1</number>
</property>
<property name="value">
<number>6</number>
</property>
</widget>
</item>
</layout>
</item>
<item row="2" column="1">
<spacer name="verticalSpacer">
<property name="orientation">
<enum>Qt::Vertical</enum>
</property>
<property name="sizeHint" stdset="0">
<size>
<width>20</width>
<height>40</height>
</size>
</property>
</spacer>
</item>
</layout>
</item>
<item>

View File

@@ -15,8 +15,9 @@ from PySide6.QtGui import (QBrush, QColor, QConicalGradient, QCursor,
QFont, QFontDatabase, QGradient, QIcon,
QImage, QKeySequence, QLinearGradient, QPainter,
QPalette, QPixmap, QRadialGradient, QTransform)
from PySide6.QtWidgets import (QApplication, QDialog, QFormLayout, QProgressBar,
QPushButton, QSizePolicy, QVBoxLayout, QWidget)
from PySide6.QtWidgets import (QApplication, QDialog, QFormLayout, QLabel,
QProgressBar, QPushButton, QSizePolicy, QSpacerItem,
QSpinBox, QVBoxLayout, QWidget)
class Ui_Dialog(object):
def setupUi(self, Dialog):
@@ -37,6 +38,32 @@ class Ui_Dialog(object):
self.formLayout.setWidget(0, QFormLayout.ItemRole.FieldRole, self.btn_add_missing_data)
self.formLayout_2 = QFormLayout()
self.formLayout_2.setObjectName(u"formLayout_2")
self.label = QLabel(Dialog)
self.label.setObjectName(u"label")
self.formLayout_2.setWidget(0, QFormLayout.ItemRole.LabelRole, self.label)
self.btn_add_medianr = QPushButton(Dialog)
self.btn_add_medianr.setObjectName(u"btn_add_medianr")
self.formLayout_2.setWidget(1, QFormLayout.ItemRole.FieldRole, self.btn_add_medianr)
self.spin_thread_count = QSpinBox(Dialog)
self.spin_thread_count.setObjectName(u"spin_thread_count")
self.spin_thread_count.setMinimum(1)
self.spin_thread_count.setValue(6)
self.formLayout_2.setWidget(0, QFormLayout.ItemRole.FieldRole, self.spin_thread_count)
self.formLayout.setLayout(1, QFormLayout.ItemRole.FieldRole, self.formLayout_2)
self.verticalSpacer = QSpacerItem(20, 40, QSizePolicy.Policy.Minimum, QSizePolicy.Policy.Expanding)
self.formLayout.setItem(2, QFormLayout.ItemRole.FieldRole, self.verticalSpacer)
self.verticalLayout.addLayout(self.formLayout)
@@ -56,5 +83,7 @@ class Ui_Dialog(object):
Dialog.setWindowTitle(QCoreApplication.translate("Dialog", u"Dialog", None))
self.btn_update_signatures.setText(QCoreApplication.translate("Dialog", u"Signaturen aktualisieren", None))
self.btn_add_missing_data.setText(QCoreApplication.translate("Dialog", u"Fehlende Daten hinzuf\u00fcgen", None))
self.label.setText(QCoreApplication.translate("Dialog", u"Anzahl Parraleler Aktionen", None))
self.btn_add_medianr.setText(QCoreApplication.translate("Dialog", u"Mediennummern erg\u00e4nzen", None))
# retranslateUi

View File

@@ -0,0 +1,20 @@
import sqlite3 as sql
from pathlib import Path
from src.backend.database import Database
p = Path("devtests_test_migrations.db")
if p.exists():
p.unlink()
print("Creating Database at", p)
db = Database(db_path=p)
conn = sql.connect(p)
c = conn.cursor()
c.execute("SELECT name FROM sqlite_master WHERE type='table'")
print("Tables:", sorted([r[0] for r in c.fetchall()]))
c.execute("SELECT id, applied_at FROM schema_migrations")
print("Migrations applied:", c.fetchall())
conn.close()