chore(codebase): import fixes, restructuring
This commit is contained in:
@@ -1,8 +1,6 @@
|
||||
from .utils.icon import Icon
|
||||
|
||||
__version__ = "1.0.0"
|
||||
__author__ = "Alexander Kirchner"
|
||||
__all__ = ["Icon", "__author__", "__version__", "settings"]
|
||||
__all__ = ["__author__", "__version__", "settings"]
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
__all__ = [
|
||||
"AdminCommands",
|
||||
"AutoAdder",
|
||||
"AvailChecker",
|
||||
"BookGrabber",
|
||||
"Database",
|
||||
"DocumentationThread",
|
||||
"NewEditionCheckerThread",
|
||||
"recreateElsaFile",
|
||||
"recreateFile",
|
||||
"Catalogue",
|
||||
]
|
||||
|
||||
from .admin_console import AdminCommands
|
||||
from .catalogue import Catalogue
|
||||
from .create_file import recreateElsaFile, recreateFile
|
||||
from .database import Database
|
||||
from .documentation_thread import DocumentationThread
|
||||
from .thread_bookgrabber import BookGrabber
|
||||
from .thread_neweditions import NewEditionCheckerThread
|
||||
from .threads_autoadder import AutoAdder
|
||||
from .threads_availchecker import AvailChecker
|
||||
@@ -1,110 +0,0 @@
|
||||
import hashlib
|
||||
import random
|
||||
|
||||
from .database import Database
|
||||
import loguru
|
||||
import sys
|
||||
from src import LOG_DIR
|
||||
|
||||
log = loguru.logger
|
||||
log.remove()
|
||||
log.add(sys.stdout, level="INFO")
|
||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
||||
|
||||
|
||||
# change passwords for apparats, change passwords for users, list users, create and delete users etc
|
||||
# create a class that has all commands. for each command, create a function that does the thing
|
||||
class AdminCommands:
|
||||
"""Basic Admin commands for the admin console. This class is used to create, delete, and list users. It also has the ability to change passwords for users."""
|
||||
|
||||
def __init__(self, db_path=None):
|
||||
"""Default Constructor for the AdminCommands class."""
|
||||
if db_path is None:
|
||||
self.db = Database()
|
||||
else:
|
||||
self.db = Database(db_path=db_path)
|
||||
log.info("AdminCommands initialized with database connection.")
|
||||
log.debug("location: {}", self.db.db_path)
|
||||
|
||||
def create_password(self, password: str) -> tuple[str, str]:
|
||||
"""Create a hashed password and a salt for the password.
|
||||
|
||||
Args:
|
||||
password (str): the base password to be hashed.
|
||||
|
||||
Returns:
|
||||
tuple[str,str]: a tuple containing the hashed password and the salt used to hash the password.
|
||||
"""
|
||||
salt = self.create_salt()
|
||||
hashed_password = self.hash_password(password)
|
||||
return (hashed_password, salt)
|
||||
|
||||
def create_salt(self) -> str:
|
||||
"""Generate a random 16 digit long salt for the password.
|
||||
|
||||
Returns:
|
||||
str: the randomized salt
|
||||
"""
|
||||
return "".join(
|
||||
random.choices(
|
||||
"abcdefghijklmnopqrstuvwxyzQWERTZUIOPLKJHGFDSAYXCVBNM0123456789", k=16
|
||||
)
|
||||
)
|
||||
|
||||
def create_admin(self):
|
||||
"""Create the admin in the database. This is only used once, when the database is created."""
|
||||
salt = self.create_salt()
|
||||
hashed_password = self.hash_password("admin")
|
||||
self.db.createUser("admin", salt + hashed_password, "admin", salt)
|
||||
|
||||
def create_user(self, username: str, password: str, role: str = "user") -> bool:
|
||||
"""Create a new user in the database.
|
||||
|
||||
Args:
|
||||
username (str): the username of the user to be created.
|
||||
password (str): the password of the user to be created.
|
||||
role (str, optional): the role of the user to be created. Defaults to "user".
|
||||
"""
|
||||
hashed_password, salt = self.create_password(password)
|
||||
status = self.db.createUser(
|
||||
user=username, password=salt + hashed_password, role=role, salt=salt
|
||||
)
|
||||
return status
|
||||
|
||||
def hash_password(self, password: str) -> str:
|
||||
"""Hash a password using SHA256.
|
||||
|
||||
Args:
|
||||
password (str): the password to be hashed.
|
||||
|
||||
Returns:
|
||||
str: the hashed password.
|
||||
"""
|
||||
hashed = hashlib.sha256((password).encode("utf-8")).hexdigest()
|
||||
return hashed
|
||||
|
||||
def list_users(self) -> list[tuple]:
|
||||
"""List all available users in the database.
|
||||
|
||||
Returns:
|
||||
list[tuple]: a list of all users, containing all stored data for each user in a tuple.
|
||||
"""
|
||||
return self.db.getUsers()
|
||||
|
||||
def delete_user(self, username: str):
|
||||
"""Delete a selected user from the database.
|
||||
|
||||
Args:
|
||||
username (str): the username of the user to be deleted.
|
||||
"""
|
||||
self.db.deleteUser(username)
|
||||
|
||||
def change_password(self, username, password):
|
||||
"""change the password for a user.
|
||||
|
||||
Args:
|
||||
username (str): username of the user to change the password for.
|
||||
password (str): the new, non-hashed password to change to.
|
||||
"""
|
||||
hashed_password = self.hash_password(password)
|
||||
self.db.changePassword(username, hashed_password)
|
||||
@@ -4,7 +4,7 @@ import regex
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from src.logic import BookData as Book
|
||||
from src.core.models import BookData as Book
|
||||
from src.shared.logging import log
|
||||
|
||||
URL = "https://rds.ibs-bw.de/phfreiburg/opac/RDSIndex/Search?type0%5B%5D=allfields&lookfor0%5B%5D={}&join=AND&bool0%5B%5D=AND&type0%5B%5D=au&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=ti&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=ct&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=isn&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=ta&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=co&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=py&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=pp&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=pu&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=si&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=zr&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=cc&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND"
|
||||
@@ -60,12 +60,14 @@ class Catalogue:
|
||||
title = title_el.get_text(strip=True) if title_el else None
|
||||
|
||||
ppn_el = soup.find(
|
||||
"div", class_="col-xs-12 col-md-5 col-lg-4 rds-dl-head RDS_PPN"
|
||||
"div",
|
||||
class_="col-xs-12 col-md-5 col-lg-4 rds-dl-head RDS_PPN",
|
||||
)
|
||||
# in ppn_el, get text of div col-xs-12 col-md-7 col-lg-8 rds-dl-panel
|
||||
ppn = (
|
||||
ppn_el.find_next_sibling(
|
||||
"div", class_="col-xs-12 col-md-7 col-lg-8 rds-dl-panel"
|
||||
"div",
|
||||
class_="col-xs-12 col-md-7 col-lg-8 rds-dl-panel",
|
||||
).get_text(strip=True)
|
||||
if ppn_el
|
||||
else None
|
||||
@@ -73,18 +75,21 @@ class Catalogue:
|
||||
|
||||
# get edition text at div class col-xs-12 col-md-5 col-lg-4 rds-dl-head RDS_EDITION
|
||||
edition_el = soup.find(
|
||||
"div", class_="col-xs-12 col-md-5 col-lg-4 rds-dl-head RDS_EDITION"
|
||||
"div",
|
||||
class_="col-xs-12 col-md-5 col-lg-4 rds-dl-head RDS_EDITION",
|
||||
)
|
||||
edition = (
|
||||
edition_el.find_next_sibling(
|
||||
"div", class_="col-xs-12 col-md-7 col-lg-8 rds-dl-panel"
|
||||
"div",
|
||||
class_="col-xs-12 col-md-7 col-lg-8 rds-dl-panel",
|
||||
).get_text(strip=True)
|
||||
if edition_el
|
||||
else None
|
||||
)
|
||||
|
||||
authors = soup.find_all(
|
||||
"div", class_="col-xs-12 col-md-5 col-lg-4 rds-dl-head RDS_PERSON"
|
||||
"div",
|
||||
class_="col-xs-12 col-md-5 col-lg-4 rds-dl-head RDS_PERSON",
|
||||
)
|
||||
author = None
|
||||
if authors:
|
||||
@@ -92,7 +97,8 @@ class Catalogue:
|
||||
author_names = []
|
||||
for author in authors:
|
||||
panel = author.find_next_sibling(
|
||||
"div", class_="col-xs-12 col-md-7 col-lg-8 rds-dl-panel"
|
||||
"div",
|
||||
class_="col-xs-12 col-md-7 col-lg-8 rds-dl-panel",
|
||||
)
|
||||
if panel:
|
||||
links = panel.find_all("a")
|
||||
@@ -109,7 +115,7 @@ class Catalogue:
|
||||
groups = []
|
||||
cur = {}
|
||||
for node in panel.select(
|
||||
"div.rds-dl.RDS_SIGNATURE, div.rds-dl.RDS_STATUS, div.rds-dl.RDS_LOCATION, div.col-xs-12.space"
|
||||
"div.rds-dl.RDS_SIGNATURE, div.rds-dl.RDS_STATUS, div.rds-dl.RDS_LOCATION, div.col-xs-12.space",
|
||||
):
|
||||
classes = node.get("class", [])
|
||||
# Separator between entries
|
||||
@@ -151,16 +157,15 @@ class Catalogue:
|
||||
author=author,
|
||||
edition=edition,
|
||||
)
|
||||
else:
|
||||
return Book(
|
||||
title=title,
|
||||
ppn=ppn,
|
||||
signature=signature,
|
||||
library_location=loc.split("\n\n")[-1],
|
||||
link=elink,
|
||||
author=author,
|
||||
edition=edition,
|
||||
)
|
||||
return Book(
|
||||
title=title,
|
||||
ppn=ppn,
|
||||
signature=signature,
|
||||
library_location=loc.split("\n\n")[-1],
|
||||
link=elink,
|
||||
author=author,
|
||||
edition=edition,
|
||||
)
|
||||
|
||||
def get(self, ppn: str) -> Book | None:
|
||||
# based on PPN, get title, people, edition, year, language, pages, isbn,
|
||||
@@ -208,14 +213,16 @@ class Catalogue:
|
||||
soup = BeautifulSoup(result, "html.parser")
|
||||
# get all authors, return them as a string seperated by ;
|
||||
authors = soup.find_all(
|
||||
"div", class_="col-xs-12 col-md-5 col-lg-4 rds-dl-head RDS_PERSON"
|
||||
"div",
|
||||
class_="col-xs-12 col-md-5 col-lg-4 rds-dl-head RDS_PERSON",
|
||||
)
|
||||
if authors:
|
||||
# get the names of the a href links in the div col-xs-12 col-md-7 col-lg-8 rds-dl-panel
|
||||
author_names = []
|
||||
for author in authors:
|
||||
panel = author.find_next_sibling(
|
||||
"div", class_="col-xs-12 col-md-7 col-lg-8 rds-dl-panel"
|
||||
"div",
|
||||
class_="col-xs-12 col-md-7 col-lg-8 rds-dl-panel",
|
||||
)
|
||||
if panel:
|
||||
links = panel.find_all("a")
|
||||
@@ -236,7 +243,7 @@ class Catalogue:
|
||||
groups = []
|
||||
cur = {}
|
||||
for node in panel.select(
|
||||
"div.rds-dl.RDS_SIGNATURE, div.rds-dl.RDS_STATUS, div.rds-dl.RDS_LOCATION, div.col-xs-12.space"
|
||||
"div.rds-dl.RDS_SIGNATURE, div.rds-dl.RDS_STATUS, div.rds-dl.RDS_LOCATION, div.col-xs-12.space",
|
||||
):
|
||||
classes = node.get("class", [])
|
||||
# Separator between entries
|
||||
@@ -271,9 +278,8 @@ class Catalogue:
|
||||
if "semesterapparat" in loc:
|
||||
signature = g.get("signature")
|
||||
return signature
|
||||
else:
|
||||
signature = g.get("signature")
|
||||
return signature
|
||||
signature = g.get("signature")
|
||||
return signature
|
||||
print("No signature found")
|
||||
return signature
|
||||
|
||||
|
||||
@@ -1,72 +0,0 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
from src.backend.database import Database
|
||||
|
||||
import loguru
|
||||
import sys
|
||||
from src import LOG_DIR
|
||||
log = loguru.logger
|
||||
log.remove()
|
||||
log.add(sys.stdout, level="INFO")
|
||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
||||
|
||||
|
||||
db = Database()
|
||||
|
||||
def recreateFile(name: str, app_id: int, filetype: str, open: bool = True) -> Path:
|
||||
"""
|
||||
recreateFile creates a file from the database and opens it in the respective program, if the open parameter is set to True.
|
||||
|
||||
Args:
|
||||
----
|
||||
- name (str): The filename selected by the user.
|
||||
- app_id (str): the id of the apparatus.
|
||||
- filetype (str): the extension of the file to be created.
|
||||
- open (bool, optional): Determines if the file should be opened. Defaults to True.
|
||||
|
||||
Returns:
|
||||
-------
|
||||
- Path: Absolute path to the file.
|
||||
"""
|
||||
path = db.recreateFile(name, app_id, filetype=filetype)
|
||||
path = Path(path)
|
||||
log.info(f"File created: {path}")
|
||||
if open:
|
||||
if os.getenv("OS") == "Windows_NT":
|
||||
path = path.resolve()
|
||||
os.startfile(path)
|
||||
else:
|
||||
path = path.resolve()
|
||||
os.system(f"open {path}")
|
||||
return path
|
||||
|
||||
|
||||
def recreateElsaFile(filename: str, filetype: str, open=True) -> Path:
|
||||
"""
|
||||
recreateElsaFile creates a file from the database and opens it in the respective program, if the open parameter is set to True.
|
||||
|
||||
Args:
|
||||
----
|
||||
- filename (str): The filename selected by the user.
|
||||
- open (bool, optional): Determines if the file should be opened. Defaults to True.
|
||||
|
||||
Returns:
|
||||
-------
|
||||
- Path: Absolute path to the file.
|
||||
"""
|
||||
if filename.startswith("(") and filename.endswith(")"):
|
||||
filename = str(filename[1:-1].replace("'", ""))
|
||||
if not isinstance(filename, str):
|
||||
raise ValueError("filename must be a string")
|
||||
path = db.recreateElsaFile(filename, filetype)
|
||||
path = Path(path)
|
||||
if open:
|
||||
if os.getenv("OS") == "Windows_NT":
|
||||
path = path.resolve()
|
||||
os.startfile(path)
|
||||
else:
|
||||
path = path.resolve()
|
||||
os.system(f"open {path}")
|
||||
return path
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,112 +0,0 @@
|
||||
CREATE_TABLE_APPARAT = """CREATE TABLE semesterapparat (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
name TEXT,
|
||||
prof_id INTEGER,
|
||||
fach TEXT,
|
||||
appnr INTEGER,
|
||||
erstellsemester TEXT,
|
||||
verlängert_am TEXT,
|
||||
dauer BOOLEAN,
|
||||
verlängerung_bis TEXT,
|
||||
deletion_status INTEGER,
|
||||
deleted_date TEXT,
|
||||
apparat_id_adis INTEGER,
|
||||
prof_id_adis INTEGER,
|
||||
konto INTEGER,
|
||||
FOREIGN KEY (prof_id) REFERENCES prof (id)
|
||||
)"""
|
||||
CREATE_TABLE_MEDIA = """CREATE TABLE media (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
bookdata TEXT,
|
||||
app_id INTEGER,
|
||||
prof_id INTEGER,
|
||||
deleted INTEGER DEFAULT (0),
|
||||
available BOOLEAN,
|
||||
reservation BOOLEAN,
|
||||
FOREIGN KEY (prof_id) REFERENCES prof (id),
|
||||
FOREIGN KEY (app_id) REFERENCES semesterapparat (id)
|
||||
)"""
|
||||
|
||||
CREATE_TABLE_FILES = """CREATE TABLE files (
|
||||
id INTEGER PRIMARY KEY,
|
||||
filename TEXT,
|
||||
fileblob BLOB,
|
||||
app_id INTEGER,
|
||||
filetyp TEXT,
|
||||
prof_id INTEGER REFERENCES prof (id),
|
||||
FOREIGN KEY (app_id) REFERENCES semesterapparat (id)
|
||||
)"""
|
||||
CREATE_TABLE_MESSAGES = """CREATE TABLE messages (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
created_at date NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
message TEXT NOT NULL,
|
||||
remind_at date NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
user_id INTEGER NOT NULL,
|
||||
appnr INTEGER,
|
||||
FOREIGN KEY (user_id) REFERENCES user (id)
|
||||
)"""
|
||||
CREATE_TABLE_PROF = """CREATE TABLE prof (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
titel TEXT,
|
||||
fname TEXT,
|
||||
lname TEXT,
|
||||
fullname TEXT NOT NULL UNIQUE,
|
||||
mail TEXT,
|
||||
telnr TEXT
|
||||
)"""
|
||||
CREATE_TABLE_USER = """CREATE TABLE user (
|
||||
id integer NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
created_at datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
username TEXT NOT NULL UNIQUE,
|
||||
password TEXT NOT NULL,
|
||||
salt TEXT NOT NULL,
|
||||
role TEXT NOT NULL,
|
||||
email TEXT UNIQUE,
|
||||
name TEXT
|
||||
)"""
|
||||
CREATE_TABLE_SUBJECTS = """CREATE TABLE subjects (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
name TEXT NOT NULL UNIQUE
|
||||
)"""
|
||||
|
||||
CREATE_ELSA_TABLE = """CREATE TABLE elsa (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
date TEXT NOT NULL,
|
||||
semester TEXT NOT NULL,
|
||||
prof_id INTEGER NOT NULL
|
||||
)"""
|
||||
CREATE_ELSA_FILES_TABLE = """CREATE TABLE elsa_files (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
filename TEXT NOT NULL,
|
||||
fileblob BLOB NOT NULL,
|
||||
elsa_id INTEGER NOT NULL,
|
||||
filetyp TEXT NOT NULL,
|
||||
FOREIGN KEY (elsa_id) REFERENCES elsa (id)
|
||||
)"""
|
||||
CREATE_ELSA_MEDIA_TABLE = """CREATE TABLE elsa_media (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
work_author TEXT,
|
||||
section_author TEXT,
|
||||
year TEXT,
|
||||
edition TEXT,
|
||||
work_title TEXT,
|
||||
chapter_title TEXT,
|
||||
location TEXT,
|
||||
publisher TEXT,
|
||||
signature TEXT,
|
||||
issue TEXT,
|
||||
pages TEXT,
|
||||
isbn TEXT,
|
||||
type TEXT,
|
||||
elsa_id INTEGER NOT NULL,
|
||||
FOREIGN KEY (elsa_id) REFERENCES elsa (id)
|
||||
)"""
|
||||
CREATE_TABLE_NEWEDITIONS = """CREATE TABLE neweditions (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
new_bookdata TEXT,
|
||||
old_edition_id INTEGER,
|
||||
for_apparat INTEGER,
|
||||
ordered BOOLEAN DEFAULT (0),
|
||||
FOREIGN KEY (old_edition_id) REFERENCES media (id),
|
||||
FOREIGN KEY (for_apparat) REFERENCES semesterapparat (id)
|
||||
)"""
|
||||
@@ -1,20 +0,0 @@
|
||||
import os
|
||||
from src import settings
|
||||
|
||||
database = settings.database
|
||||
|
||||
|
||||
def delete_temp_contents():
|
||||
"""
|
||||
delete_temp_contents deletes the contents of the temp directory.
|
||||
"""
|
||||
path = database.temp.expanduser()
|
||||
for root, dirs, files in os.walk(path):
|
||||
for file in files:
|
||||
os.remove(os.path.join(root, file))
|
||||
for dir in dirs:
|
||||
os.rmdir(os.path.join(root, dir))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
delete_temp_contents()
|
||||
@@ -1,23 +0,0 @@
|
||||
from PySide6.QtCore import QThread, Slot
|
||||
from src.utils.documentation import website, QuietHandler
|
||||
from wsgiref.simple_server import make_server
|
||||
|
||||
|
||||
class DocumentationThread(QThread):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self._server = None # store server so we can shut it down
|
||||
|
||||
def run(self):
|
||||
# launch_documentation()
|
||||
self._server = make_server(
|
||||
"localhost", 8000, website(), handler_class=QuietHandler
|
||||
)
|
||||
while not self.isInterruptionRequested():
|
||||
self._server.handle_request()
|
||||
|
||||
@Slot() # slot you can connect to aboutToQuit
|
||||
def stop(self):
|
||||
self.requestInterruption() # ask the loop above to exit
|
||||
if self._server:
|
||||
self._server.shutdown() # unblock handle_request()
|
||||
@@ -1,68 +0,0 @@
|
||||
import os
|
||||
import sqlite3 as sql
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
from src import DATABASE_DIR, settings
|
||||
from src.shared.logging import log
|
||||
|
||||
MIGRATIONS_DIR = Path(__file__).parent / "migrations"
|
||||
|
||||
|
||||
def _ensure_migrations_table(conn: sql.Connection) -> None:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS schema_migrations (
|
||||
id TEXT PRIMARY KEY,
|
||||
applied_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
"""
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
|
||||
def _applied_migrations(conn: sql.Connection) -> List[str]:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT id FROM schema_migrations ORDER BY id")
|
||||
rows = cursor.fetchall()
|
||||
return [r[0] for r in rows]
|
||||
|
||||
|
||||
def _apply_sql_file(conn: sql.Connection, path: Path) -> None:
|
||||
log.info(f"Applying migration {path.name}")
|
||||
sql_text = path.read_text(encoding="utf-8")
|
||||
cursor = conn.cursor()
|
||||
cursor.executescript(sql_text)
|
||||
cursor.execute(
|
||||
"INSERT OR REPLACE INTO schema_migrations (id) VALUES (?)", (path.name,)
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
|
||||
def run_migrations(db_path: Path) -> None:
|
||||
"""Run all unapplied migrations from the migrations directory against the database at db_path."""
|
||||
if not MIGRATIONS_DIR.exists():
|
||||
log.debug("Migrations directory does not exist, skipping migrations")
|
||||
return
|
||||
|
||||
# Ensure database directory exists
|
||||
db_dir = settings.database.path or Path(DATABASE_DIR)
|
||||
if not db_dir.exists():
|
||||
os.makedirs(db_dir, exist_ok=True)
|
||||
|
||||
conn = sql.connect(db_path)
|
||||
try:
|
||||
_ensure_migrations_table(conn)
|
||||
applied = set(_applied_migrations(conn))
|
||||
|
||||
migration_files = sorted(
|
||||
[p for p in MIGRATIONS_DIR.iterdir() if p.suffix in (".sql",)]
|
||||
)
|
||||
for m in migration_files:
|
||||
if m.name in applied:
|
||||
log.debug(f"Skipping already applied migration {m.name}")
|
||||
continue
|
||||
_apply_sql_file(conn, m)
|
||||
finally:
|
||||
conn.close()
|
||||
@@ -1,132 +0,0 @@
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS semesterapparat (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
name TEXT,
|
||||
prof_id INTEGER,
|
||||
fach TEXT,
|
||||
appnr INTEGER,
|
||||
erstellsemester TEXT,
|
||||
verlängert_am TEXT,
|
||||
dauer BOOLEAN,
|
||||
verlängerung_bis TEXT,
|
||||
deletion_status INTEGER,
|
||||
deleted_date TEXT,
|
||||
apparat_id_adis INTEGER,
|
||||
prof_id_adis INTEGER,
|
||||
konto INTEGER,
|
||||
FOREIGN KEY (prof_id) REFERENCES prof (id)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS media (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
bookdata TEXT,
|
||||
app_id INTEGER,
|
||||
prof_id INTEGER,
|
||||
deleted INTEGER DEFAULT (0),
|
||||
available BOOLEAN,
|
||||
reservation BOOLEAN,
|
||||
FOREIGN KEY (prof_id) REFERENCES prof (id),
|
||||
FOREIGN KEY (app_id) REFERENCES semesterapparat (id)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS files (
|
||||
id INTEGER PRIMARY KEY,
|
||||
filename TEXT,
|
||||
fileblob BLOB,
|
||||
app_id INTEGER,
|
||||
filetyp TEXT,
|
||||
prof_id INTEGER REFERENCES prof (id),
|
||||
FOREIGN KEY (app_id) REFERENCES semesterapparat (id)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS messages (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
created_at date NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
message TEXT NOT NULL,
|
||||
remind_at date NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
user_id INTEGER NOT NULL,
|
||||
appnr INTEGER,
|
||||
FOREIGN KEY (user_id) REFERENCES user (id)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS prof (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
titel TEXT,
|
||||
fname TEXT,
|
||||
lname TEXT,
|
||||
fullname TEXT NOT NULL UNIQUE,
|
||||
mail TEXT,
|
||||
telnr TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS user (
|
||||
id integer NOT NULL PRIMARY KEY AUTOINCREMENT,
|
||||
created_at datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
username TEXT NOT NULL UNIQUE,
|
||||
password TEXT NOT NULL,
|
||||
salt TEXT NOT NULL,
|
||||
role TEXT NOT NULL,
|
||||
email TEXT UNIQUE,
|
||||
name TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS subjects (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
name TEXT NOT NULL UNIQUE
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS elsa (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
date TEXT NOT NULL,
|
||||
semester TEXT NOT NULL,
|
||||
prof_id INTEGER NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS elsa_files (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
filename TEXT NOT NULL,
|
||||
fileblob BLOB NOT NULL,
|
||||
elsa_id INTEGER NOT NULL,
|
||||
filetyp TEXT NOT NULL,
|
||||
FOREIGN KEY (elsa_id) REFERENCES elsa (id)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS elsa_media (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
work_author TEXT,
|
||||
section_author TEXT,
|
||||
year TEXT,
|
||||
edition TEXT,
|
||||
work_title TEXT,
|
||||
chapter_title TEXT,
|
||||
location TEXT,
|
||||
publisher TEXT,
|
||||
signature TEXT,
|
||||
issue TEXT,
|
||||
pages TEXT,
|
||||
isbn TEXT,
|
||||
type TEXT,
|
||||
elsa_id INTEGER NOT NULL,
|
||||
FOREIGN KEY (elsa_id) REFERENCES elsa (id)
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS neweditions (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
new_bookdata TEXT,
|
||||
old_edition_id INTEGER,
|
||||
for_apparat INTEGER,
|
||||
ordered BOOLEAN DEFAULT (0),
|
||||
FOREIGN KEY (old_edition_id) REFERENCES media (id),
|
||||
FOREIGN KEY (for_apparat) REFERENCES semesterapparat (id)
|
||||
);
|
||||
|
||||
-- Helpful indices to speed up frequent lookups and joins
|
||||
CREATE INDEX IF NOT EXISTS idx_media_app_prof ON media(app_id, prof_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_media_deleted ON media(deleted);
|
||||
CREATE INDEX IF NOT EXISTS idx_media_available ON media(available);
|
||||
CREATE INDEX IF NOT EXISTS idx_messages_remind_at ON messages(remind_at);
|
||||
CREATE INDEX IF NOT EXISTS idx_semesterapparat_prof ON semesterapparat(prof_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_semesterapparat_appnr ON semesterapparat(appnr);
|
||||
|
||||
COMMIT;
|
||||
@@ -1,10 +0,0 @@
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS webadis_login (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
username TEXT NOT NULL,
|
||||
password TEXT NOT NULL
|
||||
);
|
||||
|
||||
COMMIT;
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
BEGIN TRANSACTION;
|
||||
|
||||
ALTER TABLE webadis_login
|
||||
ADD COLUMN effective_range TEXT;
|
||||
|
||||
COMMIT;
|
||||
@@ -1,10 +0,0 @@
|
||||
import pickle
|
||||
from typing import Any, ByteString
|
||||
|
||||
|
||||
def make_pickle(data: Any):
|
||||
return pickle.dumps(data)
|
||||
|
||||
|
||||
def load_pickle(data: ByteString):
|
||||
return pickle.loads(data)
|
||||
@@ -1,26 +0,0 @@
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
import yaml
|
||||
|
||||
|
||||
@dataclass
|
||||
class Settings:
|
||||
"""Settings for the app."""
|
||||
|
||||
save_path: str
|
||||
database_name: str
|
||||
database_path: str
|
||||
bib_id: str
|
||||
default_apps: bool = True
|
||||
custom_applications: list[dict] = field(default_factory=list)
|
||||
|
||||
def save_settings(self):
|
||||
"""Save the settings to the config file."""
|
||||
with open("config.yaml", "w") as f:
|
||||
yaml.dump(self.__dict__, f)
|
||||
|
||||
def load_settings(self):
|
||||
"""Load the settings from the config file."""
|
||||
with open("config.yaml", "r") as f:
|
||||
data = yaml.safe_load(f)
|
||||
return data
|
||||
@@ -1,199 +0,0 @@
|
||||
from PySide6.QtCore import QThread, Signal
|
||||
|
||||
from src.backend import Database
|
||||
from src.logic.webrequest import BibTextTransformer, WebRequest
|
||||
from src.shared.logging import log
|
||||
|
||||
# Logger configured centrally in main; this module just uses `log`
|
||||
|
||||
|
||||
class BookGrabber(QThread):
|
||||
updateSignal = Signal(int, int)
|
||||
done = Signal()
|
||||
|
||||
def __init__(self):
|
||||
super(BookGrabber, self).__init__(parent=None)
|
||||
self.is_Running = True
|
||||
log.info("Starting worker thread")
|
||||
self.data = []
|
||||
self.app_id = None
|
||||
self.prof_id = None
|
||||
self.mode = None
|
||||
self.book_id = None
|
||||
self.use_any = False
|
||||
self.use_exact = False
|
||||
self.app_nr = None
|
||||
self.tstate = (self.app_id, self.prof_id, self.mode, self.data)
|
||||
self.request = WebRequest()
|
||||
self.db = Database()
|
||||
|
||||
def add_values(
|
||||
self, app_id: int, prof_id: int, mode: str, data, any_book=False, exact=False
|
||||
):
|
||||
self.app_id = app_id
|
||||
self.prof_id = prof_id
|
||||
self.mode = mode
|
||||
self.data: list[str] = data
|
||||
self.use_any = any_book
|
||||
self.use_exact = exact
|
||||
log.info(f"Working on {len(self.data)} entries")
|
||||
self.tstate = (self.app_nr, self.prof_id, self.mode, self.data)
|
||||
log.debug("State: " + str(self.tstate))
|
||||
app_nr = self.db.query_db(
|
||||
"SELECT appnr FROM semesterapparat WHERE id = ?", (self.app_id,)
|
||||
)[0][0]
|
||||
self.request.set_apparat(app_nr)
|
||||
# log.debug(self.tstate)
|
||||
|
||||
def run(self):
|
||||
item = 0
|
||||
iterdata = self.data
|
||||
# log.debug(iterdata)
|
||||
|
||||
for entry in iterdata:
|
||||
# log.debug(entry)
|
||||
log.info("Processing entry: {}", entry)
|
||||
|
||||
webdata = self.request.get_ppn(entry)
|
||||
if self.use_any:
|
||||
webdata = webdata.use_any_book
|
||||
webdata = webdata.get_data()
|
||||
|
||||
if webdata == "error":
|
||||
continue
|
||||
|
||||
bd = BibTextTransformer(self.mode)
|
||||
log.debug(webdata)
|
||||
if self.mode == "ARRAY":
|
||||
if self.use_exact:
|
||||
bd = bd.use_signature(entry)
|
||||
bd = bd.get_data(webdata).return_data()
|
||||
log.debug(bd)
|
||||
if bd is None:
|
||||
# bd = BookData
|
||||
continue
|
||||
bd.signature = entry
|
||||
transformer = (
|
||||
BibTextTransformer("RDS").get_data(webdata).return_data("rds_data")
|
||||
)
|
||||
|
||||
# confirm lock is acquired
|
||||
self.db.addBookToDatabase(bd, self.app_id, self.prof_id)
|
||||
# get latest book id
|
||||
self.book_id = self.db.getLastBookId()
|
||||
log.info("Added book to database")
|
||||
state = 0
|
||||
for result in transformer.RDS_DATA:
|
||||
# log.debug(result.RDS_LOCATION)
|
||||
if str(self.app_nr) in result.RDS_LOCATION:
|
||||
state = 1
|
||||
break
|
||||
|
||||
log.info(f"State of {entry}: {state}")
|
||||
log.debug(
|
||||
"updating availability of " + str(self.book_id) + " to " + str(state)
|
||||
)
|
||||
try:
|
||||
self.db.setAvailability(self.book_id, state)
|
||||
log.debug("Added book to database")
|
||||
except Exception as e:
|
||||
log.error(f"Failed to update availability: {e}")
|
||||
log.debug("Failed to update availability: " + str(e))
|
||||
|
||||
# time.sleep(5)
|
||||
item += 1
|
||||
self.updateSignal.emit(item, len(self.data))
|
||||
log.info("Worker thread finished")
|
||||
# self.done.emit()
|
||||
self.quit()
|
||||
|
||||
def stop(self):
|
||||
self.is_Running = False
|
||||
|
||||
|
||||
class BookGrabberTest(QThread):
|
||||
updateSignal = Signal(int, int)
|
||||
done = Signal()
|
||||
|
||||
def __init__(self, appnr: int):
|
||||
super(BookGrabberTest, self).__init__(parent=None)
|
||||
self.is_Running = True
|
||||
log.info("Starting worker thread")
|
||||
self.data = None
|
||||
self.app_nr = None
|
||||
self.prof_id = None
|
||||
self.mode = None
|
||||
self.book_id = None
|
||||
self.use_any = False
|
||||
self.use_exact = False
|
||||
self.app_nr = appnr
|
||||
self.tstate = (self.app_nr, self.prof_id, self.mode, self.data)
|
||||
self.results = []
|
||||
|
||||
def add_values(
|
||||
self, app_nr: int, prof_id: int, mode: str, data, any_book=False, exact=False
|
||||
):
|
||||
self.app_nr = app_nr
|
||||
self.prof_id = prof_id
|
||||
self.mode = mode
|
||||
self.data = data
|
||||
self.use_any = any_book
|
||||
self.use_exact = exact
|
||||
log.info(f"Working on {len(self.data)} entries")
|
||||
self.tstate = (self.app_nr, self.prof_id, self.mode, self.data)
|
||||
log.debug("State: " + str(self.tstate))
|
||||
# log.debug(self.tstate)
|
||||
|
||||
def run(self):
|
||||
item = 0
|
||||
iterdata = self.data
|
||||
# log.debug(iterdata)
|
||||
for entry in iterdata:
|
||||
# log.debug(entry)
|
||||
signature = str(entry)
|
||||
log.info("Processing entry: " + signature)
|
||||
|
||||
webdata = WebRequest().set_apparat(self.app_nr).get_ppn(entry)
|
||||
if self.use_any:
|
||||
webdata = webdata.use_any_book
|
||||
webdata = webdata.get_data()
|
||||
|
||||
if webdata == "error":
|
||||
continue
|
||||
|
||||
bd = BibTextTransformer(self.mode)
|
||||
if self.mode == "ARRAY":
|
||||
if self.use_exact:
|
||||
bd = bd.use_signature(entry)
|
||||
bd = bd.get_data(webdata).return_data()
|
||||
if bd is None:
|
||||
# bd = BookData
|
||||
continue
|
||||
bd.signature = entry
|
||||
transformer = (
|
||||
BibTextTransformer("RDS").get_data(webdata).return_data("rds_data")
|
||||
)
|
||||
|
||||
# confirm lock is acquired
|
||||
# get latest book id
|
||||
log.info("Added book to database")
|
||||
state = 0
|
||||
for result in transformer.RDS_DATA:
|
||||
# log.debug(result.RDS_LOCATION)
|
||||
if str(self.app_nr) in result.RDS_LOCATION:
|
||||
state = 1
|
||||
break
|
||||
|
||||
log.info(f"State of {signature}: {state}")
|
||||
# log.debug("updating availability of " + str(self.book_id) + " to " + str(state))
|
||||
self.results.append(bd)
|
||||
|
||||
# time.sleep(5)
|
||||
item += 1
|
||||
self.updateSignal.emit(item, len(self.data))
|
||||
log.info("Worker thread finished")
|
||||
# self.done.emit()
|
||||
self.quit()
|
||||
|
||||
def stop(self):
|
||||
self.is_Running = False
|
||||
@@ -10,7 +10,7 @@ from PySide6.QtCore import QThread, Signal
|
||||
|
||||
# from src.logic.webrequest import BibTextTransformer, WebRequest
|
||||
from src.backend.catalogue import Catalogue
|
||||
from src.logic import BookData
|
||||
from src.core.models import BookData
|
||||
from src.logic.SRU import SWB
|
||||
from src.shared.logging import log
|
||||
|
||||
@@ -28,10 +28,10 @@ RVK_ALLOWED = r"[A-Z0-9.\-\/]" # conservative RVK character set
|
||||
|
||||
|
||||
def find_newer_edition(
|
||||
swb_result: BookData, dnb_result: List[BookData]
|
||||
swb_result: BookData,
|
||||
dnb_result: List[BookData],
|
||||
) -> Optional[List[BookData]]:
|
||||
"""
|
||||
New edition if:
|
||||
"""New edition if:
|
||||
- year > swb.year OR
|
||||
- edition_number > swb.edition_number
|
||||
BUT: discard any candidate with year < swb.year (if both years are known).
|
||||
@@ -161,8 +161,8 @@ class NewEditionCheckerThread(QThread):
|
||||
|
||||
def __init__(self, entries: Optional[list["BookData"]] = None, parent=None):
|
||||
super().__init__(parent)
|
||||
self.entries: list["BookData"] = entries if entries is not None else []
|
||||
self.results: list[tuple["BookData", list["BookData"]]] = []
|
||||
self.entries: list[BookData] = entries if entries is not None else []
|
||||
self.results: list[tuple[BookData, list[BookData]]] = []
|
||||
|
||||
def reset(self):
|
||||
self.entries = []
|
||||
@@ -196,12 +196,13 @@ class NewEditionCheckerThread(QThread):
|
||||
|
||||
@classmethod
|
||||
def _process_book(
|
||||
cls, book: "BookData"
|
||||
cls,
|
||||
book: "BookData",
|
||||
) -> tuple["BookData", list["BookData"]] | None:
|
||||
"""Process one book; returns (original, [found editions]) or None on failure."""
|
||||
if not book.title:
|
||||
return None
|
||||
response: list["BookData"] = []
|
||||
response: list[BookData] = []
|
||||
query = [
|
||||
f"pica.tit={book.title}",
|
||||
f"pica.vlg={book.publisher}",
|
||||
@@ -303,7 +304,7 @@ class NewEditionCheckerThread(QThread):
|
||||
futures = [ex.submit(self._worker, ch, q) for ch in chunks]
|
||||
|
||||
log.info(
|
||||
f"Launched {len(futures)} worker thread(s) for {total} entries: {sizes} entries per thread."
|
||||
f"Launched {len(futures)} worker thread(s) for {total} entries: {sizes} entries per thread.",
|
||||
)
|
||||
for idx, sz in enumerate(sizes, 1):
|
||||
log.debug(f"Thread {idx}: {sz} entries")
|
||||
|
||||
@@ -1,59 +0,0 @@
|
||||
import sys
|
||||
import time
|
||||
|
||||
import loguru
|
||||
|
||||
# from icecream import ic
|
||||
from PySide6.QtCore import QThread
|
||||
from PySide6.QtCore import Signal as Signal
|
||||
|
||||
from src import LOG_DIR
|
||||
from src.backend import Database
|
||||
|
||||
log = loguru.logger
|
||||
log.remove()
|
||||
log.add(sys.stdout, level="INFO")
|
||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
||||
|
||||
|
||||
# from src.transformers import RDS_AVAIL_DATA
|
||||
|
||||
|
||||
class AutoAdder(QThread):
|
||||
updateSignal = Signal(int)
|
||||
|
||||
setTextSignal = Signal(int)
|
||||
progress = Signal(int)
|
||||
|
||||
def __init__(self, data=None, app_id=None, prof_id=None, parent=None):
|
||||
super().__init__(parent)
|
||||
self.data = data
|
||||
self.app_id = app_id
|
||||
self.prof_id = prof_id
|
||||
|
||||
# #print("Launched AutoAdder")
|
||||
# #print(self.data, self.app_id, self.prof_id)
|
||||
|
||||
def run(self):
|
||||
self.db = Database()
|
||||
# show the dialog, start the thread to gather data and dynamically update progressbar and listwidget
|
||||
log.info("Starting worker thread")
|
||||
item = 0
|
||||
for entry in self.data:
|
||||
try:
|
||||
self.updateSignal.emit(item)
|
||||
self.setTextSignal.emit(entry)
|
||||
item += 1
|
||||
self.progress.emit(item)
|
||||
time.sleep(1)
|
||||
|
||||
except Exception as e:
|
||||
# #print(e)
|
||||
log.exception(
|
||||
f"The query failed with message {e} for signature {entry}"
|
||||
)
|
||||
continue
|
||||
if item == len(self.data):
|
||||
log.info("Worker thread finished")
|
||||
# teminate thread
|
||||
self.finished.emit()
|
||||
@@ -1,83 +0,0 @@
|
||||
# from icecream import ic
|
||||
from PySide6.QtCore import QThread
|
||||
from PySide6.QtCore import Signal as Signal
|
||||
|
||||
from src.backend.database import Database
|
||||
from src.backend.webadis import get_book_medianr
|
||||
from src.logic.webrequest import BibTextTransformer, TransformerType, WebRequest
|
||||
from src.shared.logging import log
|
||||
|
||||
|
||||
class AvailChecker(QThread):
|
||||
updateSignal = Signal(str, int)
|
||||
updateProgress = Signal(int, int)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
links: list[str] | None = None,
|
||||
appnumber: int | None = None,
|
||||
parent=None,
|
||||
books: list[dict] | None = None,
|
||||
):
|
||||
if links is None:
|
||||
links = []
|
||||
super().__init__(parent)
|
||||
log.info("Starting worker thread")
|
||||
log.info(
|
||||
"Checking availability for "
|
||||
+ str(links)
|
||||
+ " with appnumber "
|
||||
+ str(appnumber)
|
||||
+ "..."
|
||||
)
|
||||
self.links = links
|
||||
self.appnumber = appnumber
|
||||
self.books = books or []
|
||||
log.info(
|
||||
f"Started worker with appnumber: {self.appnumber} and links: {self.links} and {len(self.books)} books..."
|
||||
)
|
||||
# Pre-create reusable request and transformer to avoid per-item overhead
|
||||
self._request = WebRequest().set_apparat(self.appnumber)
|
||||
self._rds_transformer = BibTextTransformer(TransformerType.RDS)
|
||||
|
||||
def run(self):
|
||||
self.db = Database()
|
||||
state = 0
|
||||
count = 0
|
||||
for link in self.links:
|
||||
log.info("Processing entry: " + str(link))
|
||||
data = self._request.get_ppn(link).get_data()
|
||||
rds = self._rds_transformer.get_data(data).return_data("rds_availability")
|
||||
|
||||
book_id = None
|
||||
if not rds or not rds.items:
|
||||
log.warning(f"No RDS data found for link {link}")
|
||||
continue
|
||||
for item in rds.items:
|
||||
sign = item.superlocation
|
||||
loc = item.location
|
||||
# # #print(item.location)
|
||||
if str(self.appnumber) in sign or str(self.appnumber) in loc:
|
||||
state = 1
|
||||
break
|
||||
for book in self.books:
|
||||
if book["bookdata"].signature == link:
|
||||
book_id = book["id"]
|
||||
break
|
||||
log.info(f"State of {link}: " + str(state))
|
||||
# #print("Updating availability of " + str(book_id) + " to " + str(state))
|
||||
# use get_book_medianr to update the medianr of the book in the database
|
||||
auth = self.db.getWebADISAuth
|
||||
medianr = get_book_medianr(rds.items[0].callnumber, self.appnumber, auth)
|
||||
book_data = book["bookdata"]
|
||||
book_data.medianr = medianr
|
||||
self.db.updateBookdata(book["id"], book_data)
|
||||
self.db.setAvailability(book_id, state)
|
||||
count += 1
|
||||
self.updateProgress.emit(count, len(self.links))
|
||||
self.updateSignal.emit(item.callnumber, state)
|
||||
|
||||
log.info("Worker thread finished")
|
||||
# teminate thread
|
||||
|
||||
self.quit()
|
||||
@@ -1,35 +0,0 @@
|
||||
from playwright.sync_api import sync_playwright
|
||||
|
||||
|
||||
def get_book_medianr(signature: str, semesterapparat_nr: int, auth: tuple) -> str:
|
||||
with sync_playwright() as playwright:
|
||||
browser = playwright.chromium.launch(headless=True)
|
||||
context = browser.new_context()
|
||||
page = context.new_page()
|
||||
page.goto(
|
||||
"https://bsz.ibs-bw.de:22998/aDISWeb/app?service=direct/0/Home/$DirectLink&sp=SDAP42"
|
||||
)
|
||||
page.get_by_role("textbox", name="Benutzer").fill(auth[0])
|
||||
page.get_by_role("textbox", name="Benutzer").press("Tab")
|
||||
page.get_by_role("textbox", name="Kennwort").fill(auth[1])
|
||||
page.get_by_role("textbox", name="Kennwort").press("Enter")
|
||||
page.get_by_role("button", name="Katalog").click()
|
||||
page.get_by_role("textbox", name="Signatur").click()
|
||||
page.get_by_role("textbox", name="Signatur").fill(signature)
|
||||
page.get_by_role("textbox", name="Signatur").press("Enter")
|
||||
book_list = page.locator("iframe").content_frame.get_by_role(
|
||||
"cell", name="Bibliothek der Pädagogischen"
|
||||
)
|
||||
# this will always find one result, we need to split the resulting text based on the entries that start with "* "
|
||||
book_entries = book_list.inner_text().split("\n")
|
||||
books = []
|
||||
for entry in book_entries:
|
||||
if entry.startswith("* "):
|
||||
books.append(entry)
|
||||
for book in books:
|
||||
if f"Semesterapparat: {semesterapparat_nr}" in book:
|
||||
return book.split("* ")[1].split(":")[0]
|
||||
|
||||
# ---------------------
|
||||
context.close()
|
||||
browser.close()
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum
|
||||
@@ -5,19 +7,18 @@ from typing import Any, Optional, Union
|
||||
|
||||
import regex
|
||||
|
||||
from src.logic.openai import name_tester, run_shortener, semester_converter
|
||||
from src.logic.semester import Semester
|
||||
from src.core.semester import Semester
|
||||
|
||||
|
||||
@dataclass
|
||||
class Prof:
|
||||
id: Optional[int] = None
|
||||
_title: Optional[str] = None
|
||||
firstname: Optional[str] = None
|
||||
lastname: Optional[str] = None
|
||||
fullname: Optional[str] = None
|
||||
mail: Optional[str] = None
|
||||
telnr: Optional[str] = None
|
||||
id: int | None = None
|
||||
_title: str | None = None
|
||||
firstname: str | None = None
|
||||
lastname: str | None = None
|
||||
fullname: str | None = None
|
||||
mail: str | None = None
|
||||
telnr: str | None = None
|
||||
|
||||
# add function that sets the data based on a dict
|
||||
def from_dict(self, data: dict[str, Union[str, int]]):
|
||||
@@ -37,7 +38,7 @@ class Prof:
|
||||
self._title = value
|
||||
|
||||
# add function that sets the data from a tuple
|
||||
def from_tuple(self, data: tuple[Union[str, int], ...]) -> "Prof":
|
||||
def from_tuple(self, data: tuple[Union[str, int], ...]) -> Prof:
|
||||
self.id = data[0]
|
||||
self._title = data[1]
|
||||
self.firstname = data[2]
|
||||
@@ -92,12 +93,12 @@ class BookData:
|
||||
self.year = regex.sub(r"[^\d]", "", str(self.year)) if self.year else None
|
||||
self.in_library = True if self.signature else False
|
||||
|
||||
def from_dict(self, data: dict) -> "BookData":
|
||||
def from_dict(self, data: dict) -> BookData:
|
||||
for key, value in data.items():
|
||||
setattr(self, key, value)
|
||||
return self
|
||||
|
||||
def merge(self, other: "BookData") -> "BookData":
|
||||
def merge(self, other: BookData) -> BookData:
|
||||
for key, value in other.__dict__.items():
|
||||
# merge lists, if the attribute is a list, extend it
|
||||
if isinstance(value, list):
|
||||
@@ -142,12 +143,12 @@ class BookData:
|
||||
return "eBook"
|
||||
return "Druckausgabe"
|
||||
|
||||
def from_string(self, data: str) -> "BookData":
|
||||
def from_string(self, data: str) -> BookData:
|
||||
ndata = json.loads(data)
|
||||
|
||||
return BookData(**ndata)
|
||||
|
||||
def from_LehmannsSearchResult(self, result: Any) -> "BookData":
|
||||
def from_LehmannsSearchResult(self, result: Any) -> BookData:
|
||||
self.title = result.title
|
||||
self.author = "; ".join(result.authors) if result.authors else None
|
||||
self.edition = str(result.edition) if result.edition else None
|
||||
@@ -177,10 +178,10 @@ class BookData:
|
||||
|
||||
@dataclass
|
||||
class MailData:
|
||||
subject: Optional[str] = None
|
||||
body: Optional[str] = None
|
||||
mailto: Optional[str] = None
|
||||
prof: Optional[str] = None
|
||||
subject: str | None
|
||||
body: str | None
|
||||
mailto: str | None
|
||||
prof: str | None
|
||||
|
||||
|
||||
class Subjects(Enum):
|
||||
@@ -242,7 +243,7 @@ class Apparat:
|
||||
prof_id_adis: str | None = None
|
||||
konto: int | None = None
|
||||
|
||||
def from_tuple(self, data: tuple[Any, ...]) -> "Apparat":
|
||||
def from_tuple(self, data: tuple[Any, ...]) -> Apparat:
|
||||
self.id = data[0]
|
||||
self.name = data[1]
|
||||
self.prof_id = data[2]
|
||||
@@ -273,7 +274,7 @@ class ELSA:
|
||||
semester: str | None = None
|
||||
prof_id: int | None = None
|
||||
|
||||
def from_tuple(self, data: tuple[Any, ...]) -> "ELSA":
|
||||
def from_tuple(self, data: tuple[Any, ...]) -> ELSA:
|
||||
self.id = data[0]
|
||||
self.date = data[1]
|
||||
self.semester = data[2]
|
||||
@@ -289,27 +290,27 @@ class ApparatData:
|
||||
|
||||
@dataclass
|
||||
class XMLMailSubmission:
|
||||
name: Optional[str] = None
|
||||
lastname: Optional[str] = None
|
||||
title: Optional[str] = None
|
||||
telno: Optional[int] = None
|
||||
email: Optional[str] = None
|
||||
app_name: Optional[str] = None
|
||||
subject: Optional[str] = None
|
||||
semester: Optional[Semester] = None
|
||||
books: Optional[list[BookData]] = None
|
||||
name: str | None
|
||||
lastname: str | None
|
||||
title: str | None
|
||||
telno: int | None
|
||||
email: str | None
|
||||
app_name: str | None
|
||||
subject: str | None
|
||||
semester: Semester | None
|
||||
books: list[BookData] | None
|
||||
|
||||
|
||||
@dataclass
|
||||
class Book:
|
||||
author: str = None
|
||||
year: str = None
|
||||
edition: str = None
|
||||
title: str = None
|
||||
location: str = None
|
||||
publisher: str = None
|
||||
signature: str = None
|
||||
internal_notes: str = None
|
||||
author: str | None
|
||||
year: str | None
|
||||
edition: str | None
|
||||
title: str | None
|
||||
location: str | None
|
||||
publisher: str | None
|
||||
signature: str | None
|
||||
internal_notes: str | None
|
||||
|
||||
@property
|
||||
def has_signature(self) -> bool:
|
||||
@@ -354,18 +355,18 @@ class Book:
|
||||
|
||||
@dataclass
|
||||
class SemapDocument:
|
||||
subject: str = None
|
||||
phoneNumber: int = None
|
||||
mail: str = None
|
||||
title: str = None
|
||||
subject: str | None
|
||||
phoneNumber: int | None
|
||||
mail: str | None
|
||||
title: str | None
|
||||
personName: str | None
|
||||
personTitle: str | None
|
||||
title_suggestions: list[str] = None
|
||||
semester: Union[str, Semester] = None
|
||||
books: list[Book] = None
|
||||
eternal: bool = False
|
||||
personName: str = None
|
||||
personTitle: str = None
|
||||
title_length = 0
|
||||
title_max_length = 0
|
||||
title_length: int = 0
|
||||
title_max_length: int = 0
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
"""."""
|
||||
@@ -376,6 +377,8 @@ class SemapDocument:
|
||||
|
||||
@property
|
||||
def nameSetter(self):
|
||||
from src.services.openai import name_tester, run_shortener
|
||||
|
||||
data = name_tester(self.personTitle)
|
||||
name = f"{data['last_name']}, {data['first_name']}"
|
||||
if data["title"] is not None:
|
||||
@@ -394,6 +397,8 @@ class SemapDocument:
|
||||
|
||||
@property
|
||||
def renameSemester(self) -> None:
|
||||
from src.services.openai import semester_converter
|
||||
|
||||
if self.semester:
|
||||
if ", Dauer" in self.semester:
|
||||
self.semester = self.semester.split(",")[0]
|
||||
|
||||
631
src/logic/SRU.py
631
src/logic/SRU.py
@@ -1,631 +0,0 @@
|
||||
import re
|
||||
import xml.etree.ElementTree as ET
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum
|
||||
from typing import Dict, Iterable, List, Optional, Tuple, Union
|
||||
|
||||
import requests
|
||||
from requests.adapters import HTTPAdapter
|
||||
|
||||
# centralized logging used via src.shared.logging
|
||||
from src.logic.dataclass import BookData
|
||||
from src.shared.logging import log
|
||||
|
||||
log # ensure imported logger is referenced
|
||||
|
||||
|
||||
# -----------------------
|
||||
# Dataclasses
|
||||
# -----------------------
|
||||
|
||||
|
||||
# --- MARC XML structures ---
|
||||
@dataclass
|
||||
class ControlField:
|
||||
tag: str
|
||||
value: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class SubField:
|
||||
code: str
|
||||
value: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class DataField:
|
||||
tag: str
|
||||
ind1: str = " "
|
||||
ind2: str = " "
|
||||
subfields: List[SubField] = field(default_factory=list)
|
||||
|
||||
|
||||
@dataclass
|
||||
class MarcRecord:
|
||||
leader: str
|
||||
controlfields: List[ControlField] = field(default_factory=list)
|
||||
datafields: List[DataField] = field(default_factory=list)
|
||||
|
||||
|
||||
# --- SRU record wrapper ---
|
||||
@dataclass
|
||||
class Record:
|
||||
recordSchema: str
|
||||
recordPacking: str
|
||||
recordData: MarcRecord
|
||||
recordPosition: int
|
||||
|
||||
|
||||
@dataclass
|
||||
class EchoedSearchRequest:
|
||||
version: str
|
||||
query: str
|
||||
maximumRecords: int
|
||||
recordPacking: str
|
||||
recordSchema: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class SearchRetrieveResponse:
|
||||
version: str
|
||||
numberOfRecords: int
|
||||
records: List[Record] = field(default_factory=list)
|
||||
echoedSearchRetrieveRequest: Optional[EchoedSearchRequest] = None
|
||||
|
||||
|
||||
# -----------------------
|
||||
# Parser
|
||||
# -----------------------
|
||||
|
||||
ZS = "http://www.loc.gov/zing/srw/"
|
||||
MARC = "http://www.loc.gov/MARC21/slim"
|
||||
NS = {"zs": ZS, "marc": MARC}
|
||||
|
||||
|
||||
def _text(elem: Optional[ET.Element]) -> str:
|
||||
return (elem.text or "") if elem is not None else ""
|
||||
|
||||
|
||||
def _req_text(parent: ET.Element, path: str) -> Optional[str]:
|
||||
el = parent.find(path, NS)
|
||||
if el is None or el.text is None:
|
||||
return None
|
||||
return el.text
|
||||
|
||||
|
||||
def parse_marc_record(record_el: ET.Element) -> MarcRecord:
|
||||
"""
|
||||
record_el is the <marc:record> element (default ns MARC in your sample)
|
||||
"""
|
||||
# leader
|
||||
leader_text = _req_text(record_el, "marc:leader") or ""
|
||||
|
||||
# controlfields
|
||||
controlfields: List[ControlField] = []
|
||||
for cf in record_el.findall("marc:controlfield", NS):
|
||||
tag = cf.get("tag", "").strip()
|
||||
controlfields.append(ControlField(tag=tag, value=_text(cf)))
|
||||
|
||||
# datafields
|
||||
datafields: List[DataField] = []
|
||||
for df in record_el.findall("marc:datafield", NS):
|
||||
tag = df.get("tag", "").strip()
|
||||
ind1 = df.get("ind1") or " "
|
||||
ind2 = df.get("ind2") or " "
|
||||
subfields: List[SubField] = []
|
||||
for sf in df.findall("marc:subfield", NS):
|
||||
code = sf.get("code", "")
|
||||
subfields.append(SubField(code=code, value=_text(sf)))
|
||||
datafields.append(DataField(tag=tag, ind1=ind1, ind2=ind2, subfields=subfields))
|
||||
|
||||
return MarcRecord(
|
||||
leader=leader_text, controlfields=controlfields, datafields=datafields
|
||||
)
|
||||
|
||||
|
||||
def parse_record(zs_record_el: ET.Element) -> Record:
|
||||
recordSchema = _req_text(zs_record_el, "zs:recordSchema") or ""
|
||||
recordPacking = _req_text(zs_record_el, "zs:recordPacking") or ""
|
||||
|
||||
# recordData contains a MARC <record> with default MARC namespace in your sample
|
||||
recordData_el = zs_record_el.find("zs:recordData", NS)
|
||||
if recordData_el is None:
|
||||
raise ValueError("Missing zs:recordData")
|
||||
|
||||
marc_record_el = recordData_el.find("marc:record", NS)
|
||||
if marc_record_el is None:
|
||||
# If the MARC record uses default ns (xmlns="...") ElementTree still needs the ns-qualified name
|
||||
# We already searched with prefix; this covers both default and prefixed cases.
|
||||
raise ValueError("Missing MARC21 record inside zs:recordData")
|
||||
|
||||
marc_record = parse_marc_record(marc_record_el)
|
||||
|
||||
recordPosition = int(_req_text(zs_record_el, "zs:recordPosition") or "0")
|
||||
return Record(
|
||||
recordSchema=recordSchema,
|
||||
recordPacking=recordPacking,
|
||||
recordData=marc_record,
|
||||
recordPosition=recordPosition,
|
||||
)
|
||||
|
||||
|
||||
def parse_echoed_request(root: ET.Element) -> Optional[EchoedSearchRequest]:
|
||||
el = root.find("zs:echoedSearchRetrieveRequest", NS)
|
||||
if el is None:
|
||||
return None
|
||||
|
||||
# Be permissive with missing fields
|
||||
version = _text(el.find("zs:version", NS))
|
||||
query = _text(el.find("zs:query", NS))
|
||||
maximumRecords_text = _text(el.find("zs:maximumRecords", NS)) or "0"
|
||||
recordPacking = _text(el.find("zs:recordPacking", NS))
|
||||
recordSchema = _text(el.find("zs:recordSchema", NS))
|
||||
|
||||
try:
|
||||
maximumRecords = int(maximumRecords_text)
|
||||
except ValueError:
|
||||
maximumRecords = 0
|
||||
|
||||
return EchoedSearchRequest(
|
||||
version=version,
|
||||
query=query,
|
||||
maximumRecords=maximumRecords,
|
||||
recordPacking=recordPacking,
|
||||
recordSchema=recordSchema,
|
||||
)
|
||||
|
||||
|
||||
def parse_search_retrieve_response(
|
||||
xml_str: Union[str, bytes],
|
||||
) -> SearchRetrieveResponse:
|
||||
root = ET.fromstring(xml_str)
|
||||
|
||||
# Root is zs:searchRetrieveResponse
|
||||
version = _req_text(root, "zs:version")
|
||||
numberOfRecords = int(_req_text(root, "zs:numberOfRecords") or "0")
|
||||
|
||||
records_parent = root.find("zs:records", NS)
|
||||
records: List[Record] = []
|
||||
if records_parent is not None:
|
||||
for r in records_parent.findall("zs:record", NS):
|
||||
records.append(parse_record(r))
|
||||
|
||||
echoed = parse_echoed_request(root)
|
||||
|
||||
return SearchRetrieveResponse(
|
||||
version=version,
|
||||
numberOfRecords=numberOfRecords,
|
||||
records=records,
|
||||
echoedSearchRetrieveRequest=echoed,
|
||||
)
|
||||
|
||||
|
||||
# --- Query helpers over MarcRecord ---
|
||||
|
||||
|
||||
def iter_datafields(
|
||||
rec: MarcRecord,
|
||||
tag: Optional[str] = None,
|
||||
ind1: Optional[str] = None,
|
||||
ind2: Optional[str] = None,
|
||||
) -> Iterable[DataField]:
|
||||
"""Yield datafields, optionally filtered by tag/indicators."""
|
||||
for df in rec.datafields:
|
||||
if tag is not None and df.tag != tag:
|
||||
continue
|
||||
if ind1 is not None and df.ind1 != ind1:
|
||||
continue
|
||||
if ind2 is not None and df.ind2 != ind2:
|
||||
continue
|
||||
yield df
|
||||
|
||||
|
||||
def subfield_values(
|
||||
rec: MarcRecord,
|
||||
tag: str,
|
||||
code: str,
|
||||
*,
|
||||
ind1: Optional[str] = None,
|
||||
ind2: Optional[str] = None,
|
||||
) -> List[str]:
|
||||
"""All values for subfield `code` in every `tag` field (respecting indicators)."""
|
||||
out: List[str] = []
|
||||
for df in iter_datafields(rec, tag, ind1, ind2):
|
||||
out.extend(sf.value for sf in df.subfields if sf.code == code)
|
||||
return out
|
||||
|
||||
|
||||
def first_subfield_value(
|
||||
rec: MarcRecord,
|
||||
tag: str,
|
||||
code: str,
|
||||
*,
|
||||
ind1: Optional[str] = None,
|
||||
ind2: Optional[str] = None,
|
||||
default: Optional[str] = None,
|
||||
) -> Optional[str]:
|
||||
"""First value for subfield `code` in `tag` (respecting indicators)."""
|
||||
for df in iter_datafields(rec, tag, ind1, ind2):
|
||||
for sf in df.subfields:
|
||||
if sf.code == code:
|
||||
return sf.value
|
||||
return default
|
||||
|
||||
|
||||
def find_datafields_with_subfields(
|
||||
rec: MarcRecord,
|
||||
tag: str,
|
||||
*,
|
||||
where_all: Optional[Dict[str, str]] = None,
|
||||
where_any: Optional[Dict[str, str]] = None,
|
||||
casefold: bool = False,
|
||||
ind1: Optional[str] = None,
|
||||
ind2: Optional[str] = None,
|
||||
) -> List[DataField]:
|
||||
"""
|
||||
Return datafields of `tag` whose subfields match constraints:
|
||||
- where_all: every (code -> exact value) must be present
|
||||
- where_any: at least one (code -> exact value) present
|
||||
Set `casefold=True` for case-insensitive comparison.
|
||||
"""
|
||||
where_all = where_all or {}
|
||||
where_any = where_any or {}
|
||||
matched: List[DataField] = []
|
||||
|
||||
for df in iter_datafields(rec, tag, ind1, ind2):
|
||||
# Map code -> list of values (with optional casefold applied)
|
||||
vals: Dict[str, List[str]] = {}
|
||||
for sf in df.subfields:
|
||||
v = sf.value.casefold() if casefold else sf.value
|
||||
vals.setdefault(sf.code, []).append(v)
|
||||
|
||||
ok = True
|
||||
for c, v in where_all.items():
|
||||
vv = v.casefold() if casefold else v
|
||||
if c not in vals or vv not in vals[c]:
|
||||
ok = False
|
||||
break
|
||||
|
||||
if ok and where_any:
|
||||
any_ok = any(
|
||||
(c in vals) and ((v.casefold() if casefold else v) in vals[c])
|
||||
for c, v in where_any.items()
|
||||
)
|
||||
if not any_ok:
|
||||
ok = False
|
||||
|
||||
if ok:
|
||||
matched.append(df)
|
||||
|
||||
return matched
|
||||
|
||||
|
||||
def controlfield_value(
|
||||
rec: MarcRecord, tag: str, default: Optional[str] = None
|
||||
) -> Optional[str]:
|
||||
"""Get the first controlfield value by tag (e.g., '001', '005')."""
|
||||
for cf in rec.controlfields:
|
||||
if cf.tag == tag:
|
||||
return cf.value
|
||||
return default
|
||||
|
||||
|
||||
def datafields_value(
|
||||
data: List[DataField], code: str, default: Optional[str] = None
|
||||
) -> Optional[str]:
|
||||
"""Get the first value for a specific subfield code in a list of datafields."""
|
||||
for df in data:
|
||||
for sf in df.subfields:
|
||||
if sf.code == code:
|
||||
return sf.value
|
||||
return default
|
||||
|
||||
|
||||
def datafield_value(
|
||||
df: DataField, code: str, default: Optional[str] = None
|
||||
) -> Optional[str]:
|
||||
"""Get the first value for a specific subfield code in a datafield."""
|
||||
for sf in df.subfields:
|
||||
if sf.code == code:
|
||||
return sf.value
|
||||
return default
|
||||
|
||||
|
||||
def _smart_join_title(a: str, b: Optional[str]) -> str:
|
||||
"""
|
||||
Join 245 $a and $b with MARC-style punctuation.
|
||||
If $b is present, join with ' : ' unless either side already supplies punctuation.
|
||||
"""
|
||||
a = a.strip()
|
||||
if not b:
|
||||
return a
|
||||
b = b.strip()
|
||||
if a.endswith((":", ";", "/")) or b.startswith((":", ";", "/")):
|
||||
return f"{a} {b}"
|
||||
return f"{a} : {b}"
|
||||
|
||||
|
||||
def subfield_values_from_fields(
|
||||
fields: Iterable[DataField],
|
||||
code: str,
|
||||
) -> List[str]:
|
||||
"""All subfield values with given `code` across a list of DataField."""
|
||||
return [sf.value for df in fields for sf in df.subfields if sf.code == code]
|
||||
|
||||
|
||||
def first_subfield_value_from_fields(
|
||||
fields: Iterable[DataField],
|
||||
code: str,
|
||||
default: Optional[str] = None,
|
||||
) -> Optional[str]:
|
||||
"""First subfield value with given `code` across a list of DataField."""
|
||||
for df in fields:
|
||||
for sf in df.subfields:
|
||||
if sf.code == code:
|
||||
return sf.value
|
||||
return default
|
||||
|
||||
|
||||
def subfield_value_pairs_from_fields(
|
||||
fields: Iterable[DataField],
|
||||
code: str,
|
||||
) -> List[Tuple[DataField, str]]:
|
||||
"""
|
||||
Return (DataField, value) pairs for all subfields with `code`.
|
||||
Useful if you need to know which field a value came from.
|
||||
"""
|
||||
out: List[Tuple[DataField, str]] = []
|
||||
for df in fields:
|
||||
for sf in df.subfields:
|
||||
if sf.code == code:
|
||||
out.append((df, sf.value))
|
||||
return out
|
||||
|
||||
|
||||
def book_from_marc(rec: MarcRecord) -> BookData:
|
||||
# PPN from controlfield 001
|
||||
ppn = controlfield_value(rec, "001")
|
||||
|
||||
# Title = 245 $a + 245 $b (if present)
|
||||
t_a = first_subfield_value(rec, "245", "a")
|
||||
t_b = first_subfield_value(rec, "245", "b")
|
||||
title = _smart_join_title(t_a, t_b) if t_a else None
|
||||
|
||||
# Signature = 924 where $9 == "Frei 129" → take that field's $g
|
||||
frei_fields = find_datafields_with_subfields(
|
||||
rec, "924", where_all={"9": "Frei 129"}
|
||||
)
|
||||
signature = first_subfield_value_from_fields(frei_fields, "g")
|
||||
|
||||
# Year = 264 $c (prefer ind2="1" publication; fallback to any 264)
|
||||
year = first_subfield_value(rec, "264", "c", ind2="1") or first_subfield_value(
|
||||
rec, "264", "c"
|
||||
)
|
||||
isbn = subfield_values(rec, "020", "a")
|
||||
mediatype = first_subfield_value(rec, "338", "a")
|
||||
lang = subfield_values(rec, "041", "a")
|
||||
authors = subfield_values(rec, "700", "a")
|
||||
author = None
|
||||
if authors:
|
||||
author = "; ".join(authors)
|
||||
|
||||
return BookData(
|
||||
ppn=ppn,
|
||||
title=title,
|
||||
signature=signature,
|
||||
edition=first_subfield_value(rec, "250", "a") or "",
|
||||
year=year,
|
||||
pages=first_subfield_value(rec, "300", "a") or "",
|
||||
publisher=first_subfield_value(rec, "264", "b") or "",
|
||||
isbn=isbn,
|
||||
language=lang,
|
||||
link="",
|
||||
author=author,
|
||||
media_type=mediatype,
|
||||
)
|
||||
|
||||
|
||||
class SWBData(Enum):
|
||||
URL = "https://sru.k10plus.de/opac-de-627!rec=1?version=1.1&operation=searchRetrieve&query={}&maximumRecords=100&recordSchema=marcxml"
|
||||
ARGSCHEMA = "pica."
|
||||
NAME = "SWB"
|
||||
|
||||
|
||||
class DNBData(Enum):
|
||||
URL = "https://services.dnb.de/sru/dnb?version=1.1&operation=searchRetrieve&query={}&maximumRecords=100&recordSchema=MARC21-xml"
|
||||
ARGSCHEMA = ""
|
||||
NAME = "DNB"
|
||||
|
||||
|
||||
class SRUSite(Enum):
|
||||
SWB = SWBData
|
||||
DNB = DNBData
|
||||
|
||||
|
||||
RVK_ALLOWED = r"[A-Z0-9.\-\/]" # conservative char set typically seen in RVK notations
|
||||
|
||||
|
||||
def find_newer_edition(
|
||||
swb_result: BookData, dnb_result: List[BookData]
|
||||
) -> Optional[List[BookData]]:
|
||||
"""
|
||||
New edition if:
|
||||
- year > swb.year OR
|
||||
- edition_number > swb.edition_number
|
||||
|
||||
Additional guards & preferences:
|
||||
- If both have signatures and they differ, skip (not the same work).
|
||||
- For duplicates (same ppn): keep the one that has a signature, and
|
||||
prefer a signature that matches swb_result.signature.
|
||||
- If multiple remain: keep the single 'latest' by (year desc,
|
||||
edition_number desc, best-signature-match desc, has-signature desc).
|
||||
"""
|
||||
|
||||
def norm_sig(s: Optional[str]) -> str:
|
||||
if not s:
|
||||
return ""
|
||||
# normalize: lowercase, collapse whitespace, keep alnum + a few separators
|
||||
s = s.lower()
|
||||
s = re.sub(r"\s+", " ", s).strip()
|
||||
# remove obvious noise; adjust if your signature format differs
|
||||
s = re.sub(r"[^a-z0-9\-_/\. ]+", "", s)
|
||||
return s
|
||||
|
||||
def has_sig(b: BookData) -> bool:
|
||||
return bool(getattr(b, "signature", None))
|
||||
|
||||
def sig_matches_swb(b: BookData) -> bool:
|
||||
if not has_sig(b) or not has_sig(swb_result):
|
||||
return False
|
||||
return norm_sig(b.signature) == norm_sig(swb_result.signature)
|
||||
|
||||
def strictly_newer(b: BookData) -> bool:
|
||||
by_year = (
|
||||
b.year is not None
|
||||
and swb_result.year is not None
|
||||
and b.year > swb_result.year
|
||||
)
|
||||
by_edition = (
|
||||
b.edition_number is not None
|
||||
and swb_result.edition_number is not None
|
||||
and b.edition_number > swb_result.edition_number
|
||||
)
|
||||
return by_year or by_edition
|
||||
|
||||
swb_sig_norm = norm_sig(getattr(swb_result, "signature", None))
|
||||
|
||||
# 1) Filter to same-work AND newer
|
||||
candidates: List[BookData] = []
|
||||
for b in dnb_result:
|
||||
# Skip if both signatures exist and don't match (different work)
|
||||
b_sig = getattr(b, "signature", None)
|
||||
if b_sig and swb_result.signature:
|
||||
if norm_sig(b_sig) != swb_sig_norm:
|
||||
continue # not the same work
|
||||
|
||||
# Keep only if newer by rules
|
||||
if strictly_newer(b):
|
||||
candidates.append(b)
|
||||
|
||||
if not candidates:
|
||||
return None
|
||||
|
||||
# 2) Dedupe by PPN, preferring signature (and matching signature if possible)
|
||||
by_ppn: dict[Optional[str], BookData] = {}
|
||||
for b in candidates:
|
||||
key = getattr(b, "ppn", None)
|
||||
prev = by_ppn.get(key)
|
||||
if prev is None:
|
||||
by_ppn[key] = b
|
||||
continue
|
||||
|
||||
# Compute preference score for both
|
||||
def ppn_pref_score(x: BookData) -> tuple[int, int]:
|
||||
# (signature matches swb, has signature)
|
||||
return (1 if sig_matches_swb(x) else 0, 1 if has_sig(x) else 0)
|
||||
|
||||
if ppn_pref_score(b) > ppn_pref_score(prev):
|
||||
by_ppn[key] = b
|
||||
|
||||
deduped = list(by_ppn.values())
|
||||
if not deduped:
|
||||
return None
|
||||
|
||||
# 3) If multiple remain, keep only the latest one.
|
||||
# Order: year desc, edition_number desc, signature-match desc, has-signature desc
|
||||
def sort_key(b: BookData):
|
||||
year = b.year if b.year is not None else -1
|
||||
ed = b.edition_number if b.edition_number is not None else -1
|
||||
sig_match = 1 if sig_matches_swb(b) else 0
|
||||
sig_present = 1 if has_sig(b) else 0
|
||||
return (year, ed, sig_match, sig_present)
|
||||
|
||||
best = max(deduped, key=sort_key)
|
||||
return [best] if best else None
|
||||
|
||||
|
||||
class Api:
|
||||
def __init__(self, site: str, url: str, prefix: str):
|
||||
self.site = site
|
||||
self.url = url
|
||||
self.prefix = prefix
|
||||
# Reuse TCP connections across requests for better performance
|
||||
self._session = requests.Session()
|
||||
# Slightly larger connection pool for concurrent calls
|
||||
adapter = HTTPAdapter(pool_connections=10, pool_maxsize=20)
|
||||
self._session.mount("http://", adapter)
|
||||
self._session.mount("https://", adapter)
|
||||
|
||||
def close(self):
|
||||
try:
|
||||
self._session.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def __del__(self):
|
||||
# Best-effort cleanup
|
||||
self.close()
|
||||
|
||||
def get(self, query_args: Iterable[str]) -> List[Record]:
|
||||
# if any query_arg ends with =, remove it
|
||||
if self.site == "DNB":
|
||||
args = [arg for arg in query_args if not arg.startswith("pica.")]
|
||||
if args == []:
|
||||
raise ValueError("DNB queries must include at least one search term")
|
||||
query_args = args
|
||||
# query_args = [f"{self.prefix}{arg}" for arg in query_args]
|
||||
query = "+and+".join(query_args)
|
||||
query = query.replace(" ", "%20").replace("&", "%26")
|
||||
# query_args = [arg for arg in query_args if not arg.endswith("=")]
|
||||
# query = "+and+".join(query_args)
|
||||
# query = query.replace(" ", "%20").replace("&", "%26")
|
||||
# insert the query into the url url is
|
||||
url = self.url.format(query)
|
||||
|
||||
log.debug(url)
|
||||
headers = {
|
||||
"User-Agent": f"{self.site} SRU Client, <alexander.kirchner@ph-freiburg.de>",
|
||||
"Accept": "application/xml",
|
||||
"Accept-Charset": "latin1,utf-8;q=0.7,*;q=0.3",
|
||||
}
|
||||
# Use persistent session and set timeouts to avoid hanging
|
||||
resp = self._session.get(url, headers=headers, timeout=(3.05, 60))
|
||||
if resp.status_code != 200:
|
||||
raise Exception(f"Error fetching data from SWB: {resp.status_code}")
|
||||
# Parse using raw bytes (original behavior) to preserve encoding edge cases
|
||||
sr = parse_search_retrieve_response(resp.content)
|
||||
return sr.records
|
||||
|
||||
def getBooks(self, query_args: Iterable[str]) -> List[BookData]:
|
||||
records: List[Record] = self.get(query_args)
|
||||
# Avoid printing on hot paths; rely on logger if needed
|
||||
log.debug(f"{self.site} found {len(records)} records for args={query_args}")
|
||||
books: List[BookData] = []
|
||||
# extract title from query_args if present
|
||||
title = None
|
||||
for arg in query_args:
|
||||
if arg.startswith("pica.tit="):
|
||||
title = arg.split("=")[1]
|
||||
break
|
||||
for rec in records:
|
||||
book = book_from_marc(rec.recordData)
|
||||
books.append(book)
|
||||
if title:
|
||||
books = [
|
||||
b
|
||||
for b in books
|
||||
if b.title and b.title.lower().startswith(title.lower())
|
||||
]
|
||||
return books
|
||||
|
||||
def getLinkForBook(self, book: BookData) -> str:
|
||||
# Not implemented: depends on catalog front-end; return empty string for now
|
||||
return ""
|
||||
|
||||
|
||||
class SWB(Api):
|
||||
def __init__(self):
|
||||
self.site = SWBData.NAME.value
|
||||
self.url = SWBData.URL.value
|
||||
self.prefix = SWBData.ARGSCHEMA.value
|
||||
super().__init__(self.site, self.url, self.prefix)
|
||||
@@ -1,35 +1,8 @@
|
||||
"""Sorting utilities for semester data."""
|
||||
|
||||
from .c_sort import custom_sort, sort_semesters_list
|
||||
|
||||
__all__ = [
|
||||
"custom_sort",
|
||||
"sort_semesters_list",
|
||||
"APP_NRS",
|
||||
"PROF_TITLES",
|
||||
"SEMAP_MEDIA_ACCOUNTS",
|
||||
"csv_to_list",
|
||||
"ELSA",
|
||||
"Apparat",
|
||||
"ApparatData",
|
||||
"BookData",
|
||||
"Prof",
|
||||
"Semester",
|
||||
"SemapDocument",
|
||||
"elsa_word_to_csv",
|
||||
"pdf_to_semap",
|
||||
"word_docx_to_csv",
|
||||
"word_to_semap",
|
||||
"ZoteroController",
|
||||
"eml_to_semap",
|
||||
]
|
||||
from .c_sort import custom_sort, sort_semesters_list
|
||||
from .constants import APP_NRS, PROF_TITLES, SEMAP_MEDIA_ACCOUNTS
|
||||
from .csvparser import csv_to_list
|
||||
from .dataclass import ELSA, Apparat, ApparatData, BookData, Prof
|
||||
from .semester import Semester
|
||||
from .wordparser import (
|
||||
SemapDocument,
|
||||
elsa_word_to_csv,
|
||||
pdf_to_semap,
|
||||
word_docx_to_csv,
|
||||
word_to_semap,
|
||||
)
|
||||
from .xmlparser import eml_to_semap
|
||||
from .zotero import ZoteroController
|
||||
|
||||
@@ -1,213 +0,0 @@
|
||||
APP_NRS = [i for i in range(1, 181)]
|
||||
|
||||
PROF_TITLES = [
|
||||
"Dr. mult.",
|
||||
"Dr. paed.",
|
||||
"Dr. rer. pol.",
|
||||
"Dr. sc. techn.",
|
||||
"Drs.",
|
||||
"Dr. agr.",
|
||||
"Dr. habil.",
|
||||
"Dr. oec.",
|
||||
"Dr. med.",
|
||||
"Dr. e. h.",
|
||||
"Dr. oec. publ.",
|
||||
"Dr. -Ing.",
|
||||
"Dr. theol.",
|
||||
"Dr. med. vet.",
|
||||
"Dr. ing.",
|
||||
"Dr. rer. nat.",
|
||||
"Dr. des.",
|
||||
"Dr. sc. mus.",
|
||||
"Dr. h. c.",
|
||||
"Dr. pharm.",
|
||||
"Dr. med. dent.",
|
||||
"Dr. phil. nat.",
|
||||
"Dr. phil.",
|
||||
"Dr. iur.",
|
||||
"Dr.",
|
||||
"Kein Titel",
|
||||
]
|
||||
|
||||
SEMAP_MEDIA_ACCOUNTS = {
|
||||
1: "1008000055",
|
||||
2: "1008000188",
|
||||
3: "1008000211",
|
||||
4: "1008000344",
|
||||
5: "1008000477",
|
||||
6: "1008000500",
|
||||
7: "1008000633",
|
||||
8: "1008000766",
|
||||
9: "1008000899",
|
||||
10: "1008000922",
|
||||
11: "1008001044",
|
||||
12: "1008001177",
|
||||
13: "1008001200",
|
||||
14: "1008001333",
|
||||
15: "1008001466",
|
||||
16: "1008001599",
|
||||
17: "1008001622",
|
||||
18: "1008001755",
|
||||
19: "1008001888",
|
||||
20: "1008001911",
|
||||
21: "1008002033",
|
||||
22: "1008002166",
|
||||
23: "1008002299",
|
||||
24: "1008002322",
|
||||
25: "1008002455",
|
||||
26: "1008002588",
|
||||
27: "1008002611",
|
||||
28: "1008002744",
|
||||
29: "1008002877",
|
||||
30: "1008002900",
|
||||
31: "1008003022",
|
||||
32: "1008003155",
|
||||
33: "1008003288",
|
||||
34: "1008003311",
|
||||
35: "1008003444",
|
||||
36: "1008003577",
|
||||
37: "1008003600",
|
||||
38: "1008003733",
|
||||
39: "1008003866",
|
||||
40: "1008003999",
|
||||
41: "1008004011",
|
||||
42: "1008004144",
|
||||
43: "1008004277",
|
||||
44: "1008004300",
|
||||
45: "1008004433",
|
||||
46: "1008004566",
|
||||
47: "1008004699",
|
||||
48: "1008004722",
|
||||
49: "1008004855",
|
||||
50: "1008004988",
|
||||
51: "1008005000",
|
||||
52: "1008005133",
|
||||
53: "1008005266",
|
||||
54: "1008005399",
|
||||
55: "1008005422",
|
||||
56: "1008005555",
|
||||
57: "1008005688",
|
||||
58: "1008005711",
|
||||
59: "1008005844",
|
||||
60: "1008005977",
|
||||
61: "1008006099",
|
||||
62: "1008006122",
|
||||
63: "1008006255",
|
||||
64: "1008006388",
|
||||
65: "1008006411",
|
||||
66: "1008006544",
|
||||
67: "1008006677",
|
||||
68: "1008006700",
|
||||
69: "1008006833",
|
||||
70: "1008006966",
|
||||
71: "1008007088",
|
||||
72: "1008007111",
|
||||
73: "1008007244",
|
||||
74: "1008007377",
|
||||
75: "1008007400",
|
||||
76: "1008007533",
|
||||
77: "1008007666",
|
||||
78: "1008007799",
|
||||
79: "1008007822",
|
||||
80: "1008007955",
|
||||
81: "1008008077",
|
||||
82: "1008008100",
|
||||
83: "1008008233",
|
||||
84: "1008008366",
|
||||
85: "1008008499",
|
||||
86: "1008008522",
|
||||
87: "1008008655",
|
||||
88: "1008008788",
|
||||
89: "1008008811",
|
||||
90: "1008008944",
|
||||
91: "1008009066",
|
||||
92: "1008009199",
|
||||
93: "1008009222",
|
||||
94: "1008009355",
|
||||
95: "1008009488",
|
||||
96: "1008009511",
|
||||
97: "1008009644",
|
||||
98: "1008009777",
|
||||
99: "1008009800",
|
||||
100: "1008009933",
|
||||
101: "1008010022",
|
||||
102: "1008010155",
|
||||
103: "1008010288",
|
||||
104: "1008010311",
|
||||
105: "1008010444",
|
||||
106: "1008010577",
|
||||
107: "1008010600",
|
||||
108: "1008010733",
|
||||
109: "1008010866",
|
||||
110: "1008010999",
|
||||
111: "1008011011",
|
||||
112: "1008011144",
|
||||
113: "1008011277",
|
||||
114: "1008011300",
|
||||
115: "1008011433",
|
||||
116: "1008011566",
|
||||
117: "1008011699",
|
||||
118: "1008011722",
|
||||
119: "1008011855",
|
||||
120: "1008011988",
|
||||
121: "1008012000",
|
||||
122: "1008012133",
|
||||
123: "1008012266",
|
||||
124: "1008012399",
|
||||
125: "1008012422",
|
||||
126: "1008012555",
|
||||
127: "1008012688",
|
||||
128: "1008012711",
|
||||
129: "1008012844",
|
||||
130: "1008012977",
|
||||
131: "1008013099",
|
||||
132: "1008013122",
|
||||
133: "1008013255",
|
||||
134: "1008013388",
|
||||
135: "1008013411",
|
||||
136: "1008013544",
|
||||
137: "1008013677",
|
||||
138: "1008013700",
|
||||
139: "1008013833",
|
||||
140: "1008013966",
|
||||
141: "1008014088",
|
||||
142: "1008014111",
|
||||
143: "1008014244",
|
||||
144: "1008014377",
|
||||
145: "1008014400",
|
||||
146: "1008014533",
|
||||
147: "1008014666",
|
||||
148: "1008014799",
|
||||
149: "1008014822",
|
||||
150: "1008014955",
|
||||
151: "1008015077",
|
||||
152: "1008015100",
|
||||
153: "1008015233",
|
||||
154: "1008015366",
|
||||
155: "1008015499",
|
||||
156: "1008015522",
|
||||
157: "1008015655",
|
||||
158: "1008015788",
|
||||
159: "1008015811",
|
||||
160: "1008015944",
|
||||
161: "1008016066",
|
||||
162: "1008016199",
|
||||
163: "1008016222",
|
||||
164: "1008016355",
|
||||
165: "1008016488",
|
||||
166: "1008016511",
|
||||
167: "1008016644",
|
||||
168: "1008016777",
|
||||
169: "1008016800",
|
||||
170: "1008016933",
|
||||
171: "1008017055",
|
||||
172: "1008017188",
|
||||
173: "1008017211",
|
||||
174: "1008017344",
|
||||
175: "1008017477",
|
||||
176: "1008017500",
|
||||
177: "1008017633",
|
||||
178: "1008017766",
|
||||
179: "1008017899",
|
||||
180: "1008017922",
|
||||
}
|
||||
@@ -1,23 +0,0 @@
|
||||
import csv
|
||||
|
||||
from charset_normalizer import detect
|
||||
|
||||
|
||||
def csv_to_list(path: str) -> list[str]:
|
||||
"""
|
||||
Extracts the data from a csv file and returns it as a pandas dataframe
|
||||
"""
|
||||
encoding = detect(open(path, "rb").read())["encoding"]
|
||||
with open(path, newline="", encoding=encoding) as csvfile:
|
||||
# if decoder fails to map, assign ""
|
||||
reader = csv.reader(csvfile, delimiter=";", quotechar="|")
|
||||
ret = []
|
||||
for row in reader:
|
||||
ret.append(row[0].replace('"', ""))
|
||||
return ret
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
text = csv_to_list("C:/Users/aky547/Desktop/semap/71.csv")
|
||||
# remove linebreaks
|
||||
# #print(text)
|
||||
@@ -1,410 +0,0 @@
|
||||
import json
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
import regex
|
||||
|
||||
from src.logic.openai import name_tester, run_shortener, semester_converter
|
||||
from src.logic.semester import Semester
|
||||
|
||||
|
||||
@dataclass
|
||||
class Prof:
|
||||
id: Optional[int] = None
|
||||
_title: Optional[str] = None
|
||||
firstname: Optional[str] = None
|
||||
lastname: Optional[str] = None
|
||||
fullname: Optional[str] = None
|
||||
mail: Optional[str] = None
|
||||
telnr: Optional[str] = None
|
||||
|
||||
# add function that sets the data based on a dict
|
||||
def from_dict(self, data: dict[str, Union[str, int]]):
|
||||
for key, value in data.items():
|
||||
if hasattr(self, key):
|
||||
setattr(self, key, value)
|
||||
return self
|
||||
|
||||
@property
|
||||
def title(self) -> str:
|
||||
if self._title is None or self._title == "None":
|
||||
return ""
|
||||
return self._title
|
||||
|
||||
@title.setter
|
||||
def title(self, value: str):
|
||||
self._title = value
|
||||
|
||||
# add function that sets the data from a tuple
|
||||
def from_tuple(self, data: tuple[Union[str, int], ...]) -> "Prof":
|
||||
setattr(self, "id", data[0])
|
||||
setattr(self, "_title", data[1])
|
||||
setattr(self, "firstname", data[2])
|
||||
setattr(self, "lastname", data[3])
|
||||
setattr(self, "fullname", data[4])
|
||||
setattr(self, "mail", data[5])
|
||||
setattr(self, "telnr", data[6])
|
||||
return self
|
||||
|
||||
def name(self, comma: bool = False) -> Optional[str]:
|
||||
if self.firstname is None and self.lastname is None:
|
||||
if "," in self.fullname:
|
||||
self.firstname = self.fullname.split(",")[1].strip()
|
||||
self.lastname = self.fullname.split(",")[0].strip()
|
||||
else:
|
||||
return self.fullname
|
||||
|
||||
if comma:
|
||||
return f"{self.lastname}, {self.firstname}"
|
||||
return f"{self.lastname} {self.firstname}"
|
||||
|
||||
|
||||
@dataclass
|
||||
class BookData:
|
||||
ppn: str | None = None
|
||||
title: str | None = None
|
||||
signature: str | None = None
|
||||
edition: str | None = None
|
||||
link: str | None = None
|
||||
isbn: Union[str, list[str], None] = field(default_factory=list)
|
||||
author: str | None = None
|
||||
language: Union[str, list[str], None] = field(default_factory=list)
|
||||
publisher: str | None = None
|
||||
place: str | None = None
|
||||
year: int | None = None
|
||||
pages: str | None = None
|
||||
library_location: str | None = None
|
||||
in_apparat: bool | None = False
|
||||
adis_idn: str | None = None
|
||||
old_book: Any | None = None
|
||||
media_type: str | None = None #
|
||||
in_library: bool | None = None # whether the book is in the library or not
|
||||
medianr: int | None = None # Media number in the library system
|
||||
|
||||
def __post_init__(self):
|
||||
self.library_location = (
|
||||
str(self.library_location) if self.library_location else None
|
||||
)
|
||||
if isinstance(self.language, list) and self.language:
|
||||
self.language = [lang.strip() for lang in self.language if lang.strip()]
|
||||
self.language = ",".join(self.language)
|
||||
self.year = regex.sub(r"[^\d]", "", str(self.year)) if self.year else None
|
||||
self.in_library = True if self.signature else False
|
||||
|
||||
def from_dict(self, data: dict) -> "BookData":
|
||||
for key, value in data.items():
|
||||
setattr(self, key, value)
|
||||
return self
|
||||
|
||||
def merge(self, other: "BookData") -> "BookData":
|
||||
for key, value in other.__dict__.items():
|
||||
# merge lists, if the attribute is a list, extend it
|
||||
if isinstance(value, list):
|
||||
current_value = getattr(self, key)
|
||||
if current_value is None:
|
||||
current_value = []
|
||||
elif not isinstance(current_value, list):
|
||||
current_value = [current_value]
|
||||
# extend the list with the new values, but only if they are not already in the list
|
||||
for v in value:
|
||||
if v not in current_value:
|
||||
current_value.append(v)
|
||||
setattr(self, key, current_value)
|
||||
if value is not None and (
|
||||
getattr(self, key) is None or getattr(self, key) == ""
|
||||
):
|
||||
setattr(self, key, value)
|
||||
# in language, drop all entries that are longer than 3 characters
|
||||
if isinstance(self.language, list):
|
||||
self.language = [lang for lang in self.language if len(lang) <= 4]
|
||||
return self
|
||||
|
||||
@property
|
||||
def to_dict(self) -> str:
|
||||
"""Convert the dataclass to a dictionary."""
|
||||
data_dict = {
|
||||
key: value for key, value in self.__dict__.items() if value is not None
|
||||
}
|
||||
# remove old_book from data_dict
|
||||
if "old_book" in data_dict:
|
||||
del data_dict["old_book"]
|
||||
return json.dumps(data_dict, ensure_ascii=False)
|
||||
|
||||
def from_dataclass(self, dataclass: Optional[Any]) -> None:
|
||||
if dataclass is None:
|
||||
return
|
||||
for key, value in dataclass.__dict__.items():
|
||||
setattr(self, key, value)
|
||||
|
||||
def get_book_type(self) -> str:
|
||||
if "Online" in self.pages:
|
||||
return "eBook"
|
||||
else:
|
||||
return "Druckausgabe"
|
||||
|
||||
def from_string(self, data: str) -> "BookData":
|
||||
ndata = json.loads(data)
|
||||
|
||||
return BookData(**ndata)
|
||||
|
||||
def from_LehmannsSearchResult(self, result: Any) -> "BookData":
|
||||
self.title = result.title
|
||||
self.author = "; ".join(result.authors) if result.authors else None
|
||||
self.edition = str(result.edition) if result.edition else None
|
||||
self.link = result.url
|
||||
self.isbn = (
|
||||
result.isbn13
|
||||
if isinstance(result.isbn13, list)
|
||||
else [result.isbn13]
|
||||
if result.isbn13
|
||||
else []
|
||||
)
|
||||
self.pages = str(result.pages) if result.pages else None
|
||||
self.publisher = result.publisher
|
||||
self.year = str(result.year) if result.year else None
|
||||
# self.pages = str(result.pages) if result.pages else None
|
||||
return self
|
||||
|
||||
@property
|
||||
def edition_number(self) -> Optional[int]:
|
||||
if self.edition is None:
|
||||
return 0
|
||||
match = regex.search(r"(\d+)", self.edition)
|
||||
if match:
|
||||
return int(match.group(1))
|
||||
return 0
|
||||
|
||||
|
||||
@dataclass
|
||||
class MailData:
|
||||
subject: Optional[str] = None
|
||||
body: Optional[str] = None
|
||||
mailto: Optional[str] = None
|
||||
prof: Optional[str] = None
|
||||
|
||||
|
||||
class Subjects(Enum):
|
||||
BIOLOGY = (1, "Biologie")
|
||||
CHEMISTRY = (2, "Chemie")
|
||||
GERMAN = (3, "Deutsch")
|
||||
ENGLISH = (4, "Englisch")
|
||||
PEDAGOGY = (5, "Erziehungswissenschaft")
|
||||
FRENCH = (6, "Französisch")
|
||||
GEOGRAPHY = (7, "Geographie")
|
||||
HISTORY = (8, "Geschichte")
|
||||
HEALTH_EDUCATION = (9, "Gesundheitspädagogik")
|
||||
HTW = (10, "Haushalt / Textil")
|
||||
ART = (11, "Kunst")
|
||||
MATH_IT = (12, "Mathematik / Informatik")
|
||||
MEDIAPEDAGOGY = (13, "Medien in der Bildung")
|
||||
MUSIC = (14, "Musik")
|
||||
PHILOSOPHY = (15, "Philosophie")
|
||||
PHYSICS = (16, "Physik")
|
||||
POLITICS = (17, "Politikwissenschaft")
|
||||
PRORECTORATE = (18, "Prorektorat Lehre und Studium")
|
||||
PSYCHOLOGY = (19, "Psychologie")
|
||||
SOCIOLOGY = (20, "Soziologie")
|
||||
SPORT = (21, "Sport")
|
||||
TECHNIC = (22, "Technik")
|
||||
THEOLOGY = (23, "Theologie")
|
||||
ECONOMICS = (24, "Wirtschaftslehre")
|
||||
|
||||
@property
|
||||
def id(self) -> int:
|
||||
return self.value[0]
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self.value[1]
|
||||
|
||||
@classmethod
|
||||
def get_index(cls, name: str) -> Optional[int]:
|
||||
for i in cls:
|
||||
if i.name == name:
|
||||
return i.id - 1
|
||||
return None
|
||||
|
||||
|
||||
@dataclass
|
||||
class Apparat:
|
||||
id: int | None = None
|
||||
name: str | None = None
|
||||
prof_id: int | None = None
|
||||
subject: str | None = None
|
||||
appnr: int | None = None
|
||||
created_semester: str | None = None
|
||||
extended_at: str | None = None
|
||||
eternal: bool = False
|
||||
extend_until: str | None = None
|
||||
deleted: int | None = None
|
||||
deleted_date: str | None = None
|
||||
apparat_id_adis: str | None = None
|
||||
prof_id_adis: str | None = None
|
||||
konto: int | None = None
|
||||
|
||||
def from_tuple(self, data: tuple[Any, ...]) -> "Apparat":
|
||||
self.id = data[0]
|
||||
self.name = data[1]
|
||||
self.prof_id = data[2]
|
||||
self.subject = data[3]
|
||||
self.appnr = data[4]
|
||||
self.created_semester = data[5]
|
||||
self.extended_at = data[6]
|
||||
self.eternal = data[7]
|
||||
self.extend_until = data[8]
|
||||
self.deleted = data[9]
|
||||
self.deleted_date = data[10]
|
||||
self.apparat_id_adis = data[11]
|
||||
self.prof_id_adis = data[12]
|
||||
self.konto = data[13]
|
||||
return self
|
||||
|
||||
@property
|
||||
def get_semester(self) -> Optional[str]:
|
||||
if self.extend_until is not None:
|
||||
return self.extend_until
|
||||
else:
|
||||
return self.created_semester
|
||||
|
||||
|
||||
@dataclass
|
||||
class ELSA:
|
||||
id: int | None = None
|
||||
date: str | None = None
|
||||
semester: str | None = None
|
||||
prof_id: int | None = None
|
||||
|
||||
def from_tuple(self, data: tuple[Any, ...]) -> "ELSA":
|
||||
self.id = data[0]
|
||||
self.date = data[1]
|
||||
self.semester = data[2]
|
||||
self.prof_id = data[3]
|
||||
return self
|
||||
|
||||
|
||||
@dataclass
|
||||
class ApparatData:
|
||||
prof: Prof = field(default_factory=Prof)
|
||||
apparat: Apparat = field(default_factory=Apparat)
|
||||
|
||||
|
||||
@dataclass
|
||||
class XMLMailSubmission:
|
||||
name: Optional[str] = None
|
||||
lastname: Optional[str] = None
|
||||
title: Optional[str] = None
|
||||
telno: Optional[int] = None
|
||||
email: Optional[str] = None
|
||||
app_name: Optional[str] = None
|
||||
subject: Optional[str] = None
|
||||
semester: Optional[Semester] = None
|
||||
books: Optional[list[BookData]] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class Book:
|
||||
author: str = None
|
||||
year: str = None
|
||||
edition: str = None
|
||||
title: str = None
|
||||
location: str = None
|
||||
publisher: str = None
|
||||
signature: str = None
|
||||
internal_notes: str = None
|
||||
|
||||
@property
|
||||
def has_signature(self) -> bool:
|
||||
return self.signature is not None and self.signature != ""
|
||||
|
||||
@property
|
||||
def is_empty(self) -> bool:
|
||||
return all(
|
||||
[
|
||||
self.author == "",
|
||||
self.year == "",
|
||||
self.edition == "",
|
||||
self.title == "",
|
||||
self.location == "",
|
||||
self.publisher == "",
|
||||
self.signature == "",
|
||||
self.internal_notes == "",
|
||||
]
|
||||
)
|
||||
|
||||
def from_dict(self, data: dict[str, Any]):
|
||||
for key, value in data.items():
|
||||
value = value.strip()
|
||||
if value == "\u2002\u2002\u2002\u2002\u2002":
|
||||
value = ""
|
||||
|
||||
if key == "Autorenname(n):Nachname, Vorname":
|
||||
self.author = value
|
||||
elif key == "Jahr/Auflage":
|
||||
self.year = value.split("/")[0] if "/" in value else value
|
||||
self.edition = value.split("/")[1] if "/" in value else ""
|
||||
elif key == "Titel":
|
||||
self.title = value
|
||||
elif key == "Ort und Verlag":
|
||||
self.location = value.split(",")[0] if "," in value else value
|
||||
self.publisher = value.split(",")[1] if "," in value else ""
|
||||
elif key == "Standnummer":
|
||||
self.signature = value.strip()
|
||||
elif key == "Interne Vermerke":
|
||||
self.internal_notes = value
|
||||
|
||||
|
||||
@dataclass
|
||||
class SemapDocument:
|
||||
subject: str = None
|
||||
phoneNumber: int = None
|
||||
mail: str = None
|
||||
title: str = None
|
||||
title_suggestions: list[str] = None
|
||||
semester: Union[str, Semester] = None
|
||||
books: list[Book] = None
|
||||
eternal: bool = False
|
||||
personName: str = None
|
||||
personTitle: str = None
|
||||
title_length = 0
|
||||
title_max_length = 0
|
||||
|
||||
def __post_init__(self):
|
||||
self.title_suggestions = []
|
||||
|
||||
@property
|
||||
def nameSetter(self):
|
||||
data = name_tester(self.personTitle)
|
||||
name = f"{data['last_name']}, {data['first_name']}"
|
||||
if data["title"] is not None:
|
||||
title = data["title"]
|
||||
self.personTitle = title
|
||||
self.personName = name
|
||||
self.title_length = len(self.title) + 3 + len(self.personName.split(",")[0])
|
||||
if self.title_length > 40:
|
||||
name_len = len(self.personName.split(",")[0])
|
||||
self.title_max_length = 38 - name_len
|
||||
suggestions = run_shortener(self.title, self.title_max_length)
|
||||
for suggestion in suggestions:
|
||||
self.title_suggestions.append(suggestion["shortened_string"])
|
||||
else:
|
||||
self.title_suggestions = []
|
||||
pass
|
||||
|
||||
@property
|
||||
def renameSemester(self) -> None:
|
||||
if self.semester:
|
||||
if ", Dauer" in self.semester:
|
||||
self.semester = self.semester.split(",")[0]
|
||||
self.eternal = True
|
||||
self.semester = Semester().from_string(self.semester)
|
||||
else:
|
||||
self.semester = Semester().from_string(
|
||||
semester_converter(self.semester)
|
||||
)
|
||||
|
||||
@property
|
||||
def signatures(self) -> list[str]:
|
||||
if self.books is not None:
|
||||
return [book.signature for book in self.books if book.has_signature]
|
||||
return []
|
||||
@@ -1,45 +0,0 @@
|
||||
import csv
|
||||
|
||||
import pandas as pd
|
||||
from docx import Document
|
||||
|
||||
|
||||
def csv_to_list(path: str) -> list[str]:
|
||||
"""
|
||||
Extracts the data from a csv file and returns it as a pandas dataframe
|
||||
"""
|
||||
with open(path, newline="") as csvfile:
|
||||
reader = csv.reader(csvfile, delimiter=";", quotechar="|")
|
||||
data = []
|
||||
for row in reader:
|
||||
for i in range(len(row)):
|
||||
row[i] = row[i].replace('"', "")
|
||||
data.append(row)
|
||||
ret = []
|
||||
for i in data:
|
||||
ret.append(i[0])
|
||||
return ret
|
||||
|
||||
|
||||
def word_docx_to_csv(path) -> pd.DataFrame:
|
||||
doc = Document(path)
|
||||
tables = doc.tables
|
||||
|
||||
m_data = []
|
||||
for table in tables:
|
||||
data = []
|
||||
for row in table.rows:
|
||||
row_data = []
|
||||
for cell in row.cells:
|
||||
text = cell.text
|
||||
text = text.replace("\n", "")
|
||||
row_data.append(text)
|
||||
data.append(row_data)
|
||||
df = pd.DataFrame(data)
|
||||
df.columns = df.iloc[0]
|
||||
df = df.iloc[1:]
|
||||
|
||||
m_data.append(df)
|
||||
|
||||
df = m_data[2]
|
||||
return df
|
||||
@@ -1,312 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from dataclasses import asdict, dataclass, field
|
||||
from typing import Iterable, List, Optional
|
||||
from urllib.parse import quote_plus, urljoin
|
||||
|
||||
import httpx
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from src.logic.dataclass import BookData
|
||||
|
||||
BASE = "https://www.lehmanns.de"
|
||||
SEARCH_URL = "https://www.lehmanns.de/search/quick?mediatype_id=&q="
|
||||
|
||||
|
||||
@dataclass
|
||||
class LehmannsSearchResult:
|
||||
title: str
|
||||
url: str
|
||||
|
||||
# Core fields from the listing card
|
||||
year: Optional[int] = None
|
||||
edition: Optional[int] = None
|
||||
publisher: Optional[str] = None
|
||||
isbn13: Optional[str] = None
|
||||
|
||||
# Extras from the listing card
|
||||
description: Optional[str] = None
|
||||
authors: list[str] = field(default_factory=list)
|
||||
media_type: Optional[str] = None
|
||||
book_format: Optional[str] = None
|
||||
price_eur: Optional[float] = None
|
||||
currency: str = "EUR"
|
||||
image: Optional[str] = None
|
||||
|
||||
# From detail page:
|
||||
pages: Optional[str] = None # "<N> Seiten"
|
||||
buyable: bool = True # set in enrich_pages (detail page)
|
||||
unavailable_hint: Optional[str] = (
|
||||
None # e.g. "Titel ist leider vergriffen; keine Neuauflage"
|
||||
)
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
return asdict(self)
|
||||
|
||||
|
||||
class LehmannsClient:
|
||||
"""Scrapes quick-search results, then enriches (and filters) via product pages."""
|
||||
|
||||
def __init__(self, timeout: float = 20.0):
|
||||
self.client = httpx.Client(
|
||||
headers={
|
||||
"User-Agent": (
|
||||
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 "
|
||||
"(KHTML, like Gecko) Chrome/124.0 Safari/537.36"
|
||||
),
|
||||
"Accept-Language": "de-DE,de;q=0.9,en;q=0.8",
|
||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
|
||||
},
|
||||
timeout=timeout,
|
||||
follow_redirects=True,
|
||||
)
|
||||
|
||||
def close(self):
|
||||
self.client.close()
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, *exc):
|
||||
self.close()
|
||||
|
||||
# ------------------- Search (listing) -------------------
|
||||
|
||||
def build_search_url(self, title: str) -> str:
|
||||
# spaces -> '+'
|
||||
return SEARCH_URL + quote_plus(title)
|
||||
|
||||
def search_by_title(
|
||||
self,
|
||||
title: str,
|
||||
limit: Optional[int] = None,
|
||||
strict: bool = False,
|
||||
only_latest: bool = True,
|
||||
) -> List[BookData]:
|
||||
"""
|
||||
Parse the listing page only (no availability check here).
|
||||
Use enrich_pages(...) afterwards to fetch detail pages, add 'pages',
|
||||
and drop unbuyable items.
|
||||
"""
|
||||
url = self.build_search_url(title=title)
|
||||
html = self._get(url)
|
||||
if not html:
|
||||
return []
|
||||
results = self._parse_results(html)
|
||||
self.enrich_pages(results)
|
||||
|
||||
results = [BookData().from_LehmannsSearchResult(r) for r in results]
|
||||
if strict:
|
||||
# filter results to only those with exact title match (case-insensitive)
|
||||
title_lower = title.lower()
|
||||
results = [r for r in results if r.title and r.title.lower() == title_lower]
|
||||
# results = [r for r in results if r.buyable]
|
||||
return results
|
||||
if limit is not None:
|
||||
results = results[: max(0, limit)]
|
||||
if only_latest and len(results) > 1:
|
||||
# keep only the latest edition (highest edition number)
|
||||
results.sort(key=lambda r: (r.edition_number or 0), reverse=True)
|
||||
results = [results[0]]
|
||||
return results
|
||||
|
||||
# ------------------- Detail enrichment & filtering -------------------
|
||||
|
||||
def enrich_pages(
|
||||
self, results: Iterable[LehmannsSearchResult], drop_unbuyable: bool = True
|
||||
) -> List[LehmannsSearchResult]:
|
||||
"""
|
||||
Fetch each result.url, extract:
|
||||
- pages: from <span class="book-meta meta-seiten" itemprop="numberOfPages">...</span>
|
||||
- availability: from <li class="availability-3">...</li>
|
||||
* if it contains "Titel ist leider vergriffen", mark buyable=False
|
||||
* if it also contains "keine Neuauflage", set unavailable_hint accordingly
|
||||
If drop_unbuyable=True, exclude non-buyable results from the returned list.
|
||||
"""
|
||||
enriched: List[LehmannsSearchResult] = []
|
||||
for r in results:
|
||||
try:
|
||||
html = self._get(r.url)
|
||||
if not html:
|
||||
# Can't verify; keep as-is when not dropping, else skip
|
||||
if not drop_unbuyable:
|
||||
enriched.append(r)
|
||||
continue
|
||||
|
||||
soup = BeautifulSoup(html, "html.parser") # type: ignore
|
||||
|
||||
# Pages
|
||||
pages_node = soup.select_one( # type: ignore
|
||||
"span.book-meta.meta-seiten[itemprop='numberOfPages'], "
|
||||
"span.book-meta.meta-seiten[itemprop='numberofpages'], "
|
||||
".meta-seiten [itemprop='numberOfPages'], "
|
||||
".meta-seiten[itemprop='numberOfPages'], "
|
||||
".book-meta.meta-seiten"
|
||||
)
|
||||
if pages_node:
|
||||
text = pages_node.get_text(" ", strip=True)
|
||||
m = re.search(r"\d+", text)
|
||||
if m:
|
||||
r.pages = f"{m.group(0)} Seiten"
|
||||
|
||||
# Availability via li.availability-3
|
||||
avail_li = soup.select_one("li.availability-3") # type: ignore
|
||||
if avail_li:
|
||||
avail_text = " ".join(
|
||||
avail_li.get_text(" ", strip=True).split()
|
||||
).lower()
|
||||
if "titel ist leider vergriffen" in avail_text:
|
||||
r.buyable = False
|
||||
if "keine neuauflage" in avail_text:
|
||||
r.unavailable_hint = (
|
||||
"Titel ist leider vergriffen; keine Neuauflage"
|
||||
)
|
||||
else:
|
||||
r.unavailable_hint = "Titel ist leider vergriffen"
|
||||
|
||||
# Append or drop
|
||||
if (not drop_unbuyable) or r.buyable:
|
||||
enriched.append(r)
|
||||
|
||||
except Exception:
|
||||
# On any per-item error, keep the record if not dropping; else skip
|
||||
if not drop_unbuyable:
|
||||
enriched.append(r)
|
||||
continue
|
||||
|
||||
return enriched
|
||||
|
||||
# ------------------- Internals -------------------
|
||||
|
||||
def _get(self, url: str) -> Optional[str]:
|
||||
try:
|
||||
r = self.client.get(url)
|
||||
r.encoding = "utf-8"
|
||||
if r.status_code == 200 and "text/html" in (
|
||||
r.headers.get("content-type") or ""
|
||||
):
|
||||
return r.text
|
||||
except httpx.HTTPError:
|
||||
pass
|
||||
return None
|
||||
|
||||
def _parse_results(self, html: str) -> List[LehmannsSearchResult]:
|
||||
soup = BeautifulSoup(html, "html.parser")
|
||||
results: list[LehmannsSearchResult] = []
|
||||
|
||||
for block in soup.select("div.info-block"):
|
||||
a = block.select_one(".title a[href]")
|
||||
if not a:
|
||||
continue
|
||||
url = urljoin(BASE, a["href"].strip())
|
||||
base_title = (block.select_one(".title [itemprop='name']") or a).get_text( # type: ignore
|
||||
strip=True
|
||||
)
|
||||
|
||||
# Alternative headline => extend title
|
||||
alt_tag = block.select_one(".description[itemprop='alternativeHeadline']") # type: ignore
|
||||
alternative_headline = alt_tag.get_text(strip=True) if alt_tag else None
|
||||
title = (
|
||||
f"{base_title} : {alternative_headline}"
|
||||
if alternative_headline
|
||||
else base_title
|
||||
)
|
||||
description = alternative_headline
|
||||
|
||||
# Authors from .author
|
||||
authors: list[str] = []
|
||||
author_div = block.select_one("div.author") # type: ignore
|
||||
if author_div:
|
||||
t = author_div.get_text(" ", strip=True)
|
||||
t = re.sub(r"^\s*von\s+", "", t, flags=re.I)
|
||||
for part in re.split(r"\s*;\s*|\s*&\s*|\s+und\s+", t):
|
||||
name = " ".join(part.split())
|
||||
if name:
|
||||
authors.append(name)
|
||||
|
||||
# Media + format
|
||||
media_type = None
|
||||
book_format = None
|
||||
type_text = block.select_one(".type") # type: ignore
|
||||
if type_text:
|
||||
t = type_text.get_text(" ", strip=True)
|
||||
m = re.search(r"\b(Buch|eBook|Hörbuch)\b", t)
|
||||
if m:
|
||||
media_type = m.group(1)
|
||||
fm = re.search(r"\(([^)]+)\)", t)
|
||||
if fm:
|
||||
book_format = fm.group(1).strip().upper()
|
||||
|
||||
# Year
|
||||
year = None
|
||||
y = block.select_one("[itemprop='copyrightYear']") # type: ignore
|
||||
if y:
|
||||
try:
|
||||
year = int(y.get_text(strip=True))
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# Edition
|
||||
edition = None
|
||||
ed = block.select_one("[itemprop='bookEdition']") # type: ignore
|
||||
if ed:
|
||||
m = re.search(r"\d+", ed.get_text(strip=True))
|
||||
if m:
|
||||
edition = int(m.group())
|
||||
|
||||
# Publisher
|
||||
publisher = None
|
||||
pub = block.select_one( # type: ignore
|
||||
".publisherprop [itemprop='name']"
|
||||
) or block.select_one(".publisher [itemprop='name']") # type: ignore
|
||||
if pub:
|
||||
publisher = pub.get_text(strip=True)
|
||||
|
||||
# ISBN-13
|
||||
isbn13 = None
|
||||
isbn_tag = block.select_one(".isbn [itemprop='isbn'], [itemprop='isbn']") # type: ignore
|
||||
if isbn_tag:
|
||||
digits = re.sub(r"[^0-9Xx]", "", isbn_tag.get_text(strip=True))
|
||||
m = re.search(r"(97[89]\d{10})", digits)
|
||||
if m:
|
||||
isbn13 = m.group(1)
|
||||
|
||||
# Price (best effort)
|
||||
price_eur = None
|
||||
txt = block.get_text(" ", strip=True)
|
||||
mprice = re.search(r"(\d{1,3}(?:\.\d{3})*,\d{2})\s*€", txt)
|
||||
if not mprice and block.parent:
|
||||
sib = block.parent.get_text(" ", strip=True)
|
||||
mprice = re.search(r"(\d{1,3}(?:\.\d{3})*,\d{2})\s*€", sib)
|
||||
if mprice:
|
||||
num = mprice.group(1).replace(".", "").replace(",", ".")
|
||||
try:
|
||||
price_eur = float(num)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# Image (best-effort)
|
||||
image = None
|
||||
left_img = block.find_previous("img") # type: ignore
|
||||
if left_img and left_img.get("src"):
|
||||
image = urljoin(BASE, left_img["src"])
|
||||
|
||||
results.append(
|
||||
LehmannsSearchResult(
|
||||
title=title,
|
||||
url=url,
|
||||
description=description,
|
||||
authors=authors,
|
||||
media_type=media_type,
|
||||
book_format=book_format,
|
||||
year=year,
|
||||
edition=edition,
|
||||
publisher=publisher,
|
||||
isbn13=isbn13,
|
||||
price_eur=price_eur,
|
||||
image=image,
|
||||
)
|
||||
)
|
||||
|
||||
return results
|
||||
@@ -1,58 +0,0 @@
|
||||
import json
|
||||
from typing import Any
|
||||
|
||||
from openai import OpenAI
|
||||
|
||||
from src import settings
|
||||
|
||||
|
||||
def init_client() -> OpenAI:
|
||||
"""Initialize the OpenAI client with the API key and model from settings."""
|
||||
global client, model, api_key
|
||||
if not settings.openAI.api_key:
|
||||
raise ValueError("OpenAI API key is not set in the configuration.")
|
||||
if not settings.openAI.model:
|
||||
raise ValueError("OpenAI model is not set in the configuration.")
|
||||
|
||||
model = settings.openAI.model
|
||||
api_key = settings.openAI.api_key
|
||||
client = OpenAI(api_key=api_key)
|
||||
return client
|
||||
|
||||
|
||||
def run_shortener(title: str, length: int) -> list[dict[str, Any]]:
|
||||
client = init_client()
|
||||
response = client.responses.create( # type: ignore
|
||||
model=model,
|
||||
instructions="""you are a sentence shortener. The next message will contain the string to shorten and the length limit.
|
||||
You need to shorten the string to be under the length limit, while keeping as much detail as possible. The result may NOT be longer than the length limit.
|
||||
based on that, please reply only the shortened string. Give me 5 choices. if the length is too long, discard the string and try another one.Return the data as a python list containing the result as {"shortened_string": shortened_string, "length": lengthasInt}. Do not return the answer in a codeblock, use a pure string. Before answering, check the results and if ANY is longer than the needed_length, discard all and try again""",
|
||||
input=f'{{"string":"{title}", "needed_length":{length}}}',
|
||||
)
|
||||
answers = response.output_text
|
||||
return eval(answers) # type: ignore
|
||||
# answers are strings in json format, so we need to convert them to a list of dicts
|
||||
|
||||
|
||||
def name_tester(name: str) -> dict:
|
||||
client = init_client()
|
||||
response = client.responses.create( # type: ignore
|
||||
model=model,
|
||||
instructions="""you are a name tester, You are given a name and will have to split the name into first name, last name, and if present the title. Return the name in a json format with the keys "title", "first_name", "last_name". If no title is present, set title to none. Do NOt return the answer in a codeblock, use a pure json string. Assume the names are in the usual german naming scheme""",
|
||||
input=f'{{"name":"{name}"}}',
|
||||
)
|
||||
answers = response.output_text
|
||||
|
||||
return json.loads(answers)
|
||||
|
||||
|
||||
def semester_converter(semester: str) -> str:
|
||||
client = init_client()
|
||||
response = client.responses.create( # type: ignore
|
||||
model=model,
|
||||
instructions="""you are a semester converter. You will be given a string. Convert this into a string like this: SoSe YY or WiSe YY/YY+1. Do not return the answer in a codeblock, use a pure string.""",
|
||||
input=semester,
|
||||
)
|
||||
answers = response.output_text
|
||||
|
||||
return answers
|
||||
@@ -1,23 +0,0 @@
|
||||
# add depend path to system path
|
||||
|
||||
from pdfquery import PDFQuery
|
||||
|
||||
|
||||
def pdf_to_csv(path: str) -> str:
|
||||
"""
|
||||
Extracts the data from a pdf file and returns it as a pandas dataframe
|
||||
"""
|
||||
file = PDFQuery(path)
|
||||
file.load()
|
||||
# get the text from the pdf file
|
||||
text_elems = file.extract([("with_formatter", "text"), ("all_text", "*")])
|
||||
extracted_text = text_elems["all_text"]
|
||||
|
||||
return extracted_text
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
text = pdf_to_csv("54_pdf.pdf")
|
||||
# remove linebreaks
|
||||
text = text.replace("\n", "")
|
||||
# print(text)
|
||||
@@ -1,248 +0,0 @@
|
||||
"""Semester helper class
|
||||
|
||||
A small utility around the *German* academic calendar that distinguishes
|
||||
between *Wintersemester* (WiSe) and *Sommersemester* (SoSe).
|
||||
|
||||
Key points
|
||||
----------
|
||||
* A **`Semester`** is identified by a *term* ("SoSe" or "WiSe") and the last two
|
||||
digits of the calendar year in which the term *starts*.
|
||||
* Formatting **never** pads the year with a leading zero – so ``6`` stays ``6``.
|
||||
* ``offset(n)`` and the static ``generate_missing`` reliably walk the timeline
|
||||
one semester at a time with correct year transitions:
|
||||
|
||||
SoSe 6 → **WiSe 6/7** → SoSe 7 → WiSe 7/8 → …
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import datetime
|
||||
import re
|
||||
|
||||
from src.shared.logging import log
|
||||
|
||||
|
||||
class Semester:
|
||||
"""Represents a German university semester (WiSe or SoSe)."""
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Class‑level defaults – will be *copied* to each instance and then
|
||||
# potentially overwritten in ``__init__``.
|
||||
# ------------------------------------------------------------------
|
||||
_year: int | None = int(str(datetime.datetime.now().year)[2:]) # 24 → 24
|
||||
_semester: str | None = None # "WiSe" or "SoSe" – set later
|
||||
_month: int | None = datetime.datetime.now().month
|
||||
value: str | None = None # Human‑readable label, e.g. "WiSe 23/24"
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Construction helpers
|
||||
# ------------------------------------------------------------------
|
||||
def __init__(
|
||||
self,
|
||||
year: int | None = None,
|
||||
semester: str | None = None,
|
||||
month: int | None = None,
|
||||
) -> None:
|
||||
if year is not None:
|
||||
self._year = int(year)
|
||||
if semester is not None:
|
||||
if semester not in ("WiSe", "SoSe"):
|
||||
raise ValueError("semester must be 'WiSe' or 'SoSe'")
|
||||
self._semester = semester
|
||||
if month is not None:
|
||||
self._month = month
|
||||
|
||||
self.__post_init__()
|
||||
|
||||
def __post_init__(self) -> None: # noqa: D401 – keep original name
|
||||
if self._year is None:
|
||||
self._year = int(str(datetime.datetime.now().year)[2:])
|
||||
if self._month is None:
|
||||
self._month = datetime.datetime.now().month
|
||||
if self._semester is None:
|
||||
self._generate_semester_from_month()
|
||||
self._compute_value()
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Dunder helpers
|
||||
# ------------------------------------------------------------------
|
||||
def __str__(self) -> str: # noqa: D401 – keep original name
|
||||
return self.value or "<invalid Semester>"
|
||||
|
||||
def __repr__(self) -> str: # Helpful for debugging lists
|
||||
return f"Semester({self._year!r}, {self._semester!r})"
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Internal helpers
|
||||
# ------------------------------------------------------------------
|
||||
def _generate_semester_from_month(self) -> None:
|
||||
"""Infer *WiSe* / *SoSe* from the month attribute."""
|
||||
self._semester = "WiSe" if (self._month <= 3 or self._month > 9) else "SoSe"
|
||||
|
||||
def _compute_value(self) -> None:
|
||||
"""Human‑readable semester label – e.g. ``WiSe 23/24`` or ``SoSe 24``."""
|
||||
year = self._year
|
||||
if self._semester == "WiSe":
|
||||
next_year = (year + 1) % 100 # wrap 99 → 0
|
||||
self.value = f"WiSe {year}/{next_year}"
|
||||
else: # SoSe
|
||||
self.value = f"SoSe {year}"
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Public API
|
||||
# ------------------------------------------------------------------
|
||||
def offset(self, value: int) -> "Semester":
|
||||
"""Return a new :class:`Semester` *value* steps away.
|
||||
|
||||
The algorithm maps every semester to a monotonically increasing
|
||||
*linear index* so that simple addition suffices:
|
||||
|
||||
``index = year * 2 + (0 if SoSe else 1)``.
|
||||
"""
|
||||
if not isinstance(value, int):
|
||||
raise TypeError("value must be an int (number of semesters to jump)")
|
||||
if value == 0:
|
||||
return Semester(self._year, self._semester)
|
||||
|
||||
current_idx = self._year * 2 + (0 if self._semester == "SoSe" else 1)
|
||||
target_idx = current_idx + value
|
||||
if target_idx < 0:
|
||||
raise ValueError("offset would result in a negative year – not supported")
|
||||
|
||||
new_year, semester_bit = divmod(target_idx, 2)
|
||||
new_semester = "SoSe" if semester_bit == 0 else "WiSe"
|
||||
return Semester(new_year, new_semester)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Comparison helpers
|
||||
# ------------------------------------------------------------------
|
||||
def isPastSemester(self, current: "Semester") -> bool:
|
||||
log.debug(f"Comparing {self} < {current}")
|
||||
if self.year < current.year:
|
||||
return True
|
||||
if self.year == current.year:
|
||||
return (
|
||||
self.semester == "WiSe" and current.semester == "SoSe"
|
||||
) # WiSe before next SoSe
|
||||
return False
|
||||
|
||||
def isFutureSemester(self, current: "Semester") -> bool:
|
||||
if self.year > current.year:
|
||||
return True
|
||||
if self.year == current.year:
|
||||
return (
|
||||
self.semester == "SoSe" and current.semester == "WiSe"
|
||||
) # SoSe after WiSe of same year
|
||||
return False
|
||||
|
||||
def isMatch(self, other: "Semester") -> bool:
|
||||
return self.year == other.year and self.semester == other.semester
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Convenience properties
|
||||
# ------------------------------------------------------------------
|
||||
@property
|
||||
def next(self) -> "Semester":
|
||||
return self.offset(1)
|
||||
|
||||
@property
|
||||
def previous(self) -> "Semester":
|
||||
return self.offset(-1)
|
||||
|
||||
@property
|
||||
def year(self) -> int:
|
||||
return self._year
|
||||
|
||||
@property
|
||||
def semester(self) -> str:
|
||||
return self._semester
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Static helpers
|
||||
# ------------------------------------------------------------------
|
||||
@staticmethod
|
||||
def generate_missing(start: "Semester", end: "Semester") -> list[str]:
|
||||
"""Return all consecutive semesters from *start* to *end* (inclusive)."""
|
||||
if not isinstance(start, Semester) or not isinstance(end, Semester):
|
||||
raise TypeError("start and end must be Semester instances")
|
||||
if start.isFutureSemester(end) and not start.isMatch(end):
|
||||
raise ValueError("'start' must not be after 'end'")
|
||||
|
||||
chain: list[Semester] = [start.value]
|
||||
current = start
|
||||
while not current.isMatch(end):
|
||||
current = current.next
|
||||
chain.append(current.value)
|
||||
if len(chain) > 1000: # sanity guard
|
||||
raise RuntimeError("generate_missing exceeded sane iteration limit")
|
||||
return chain
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Parsing helper
|
||||
# ------------------------------------------------------------------
|
||||
@classmethod
|
||||
def from_string(cls, s: str) -> "Semester":
|
||||
"""Parse a human‑readable semester label and return a :class:`Semester`.
|
||||
|
||||
Accepted formats (case‑insensitive)::
|
||||
|
||||
"SoSe <YY>" → SoSe of year YY
|
||||
"WiSe <YY>/<YY+1>" → Winter term starting in YY
|
||||
"WiSe <YY>" → Shorthand for the above (next year implied)
|
||||
|
||||
``YY`` may contain a leading zero ("06" → 6).
|
||||
"""
|
||||
if not isinstance(s, str):
|
||||
raise TypeError("s must be a string")
|
||||
|
||||
pattern = r"\s*(WiSe|SoSe)\s+(\d{1,2})(?:\s*/\s*(\d{1,2}))?\s*"
|
||||
m = re.fullmatch(pattern, s, flags=re.IGNORECASE)
|
||||
if not m:
|
||||
raise ValueError(
|
||||
"invalid semester string format – expected 'SoSe YY' or 'WiSe YY/YY' (spacing flexible)"
|
||||
)
|
||||
|
||||
term_raw, y1_str, y2_str = m.groups()
|
||||
term = term_raw.capitalize() # normalize case → "WiSe" or "SoSe"
|
||||
year = int(y1_str.lstrip("0") or "0") # "06" → 6, "0" stays 0
|
||||
|
||||
if term == "SoSe":
|
||||
if y2_str is not None:
|
||||
raise ValueError(
|
||||
"SoSe string should not contain '/' followed by a second year"
|
||||
)
|
||||
return cls(year, "SoSe")
|
||||
|
||||
# term == "WiSe"
|
||||
if y2_str is not None:
|
||||
next_year = int(y2_str.lstrip("0") or "0")
|
||||
expected_next = (year + 1) % 100
|
||||
if next_year != expected_next:
|
||||
raise ValueError("WiSe second year must equal first year + 1 (mod 100)")
|
||||
# Accept both explicit "WiSe 6/7" and shorthand "WiSe 6"
|
||||
return cls(year, "WiSe")
|
||||
|
||||
|
||||
# ------------------------- quick self‑test -------------------------
|
||||
if __name__ == "__main__":
|
||||
# Chain generation demo ------------------------------------------------
|
||||
s_start = Semester(6, "SoSe") # SoSe 6
|
||||
s_end = Semester(25, "WiSe") # WiSe 25/26
|
||||
chain = Semester.generate_missing(s_start, s_end)
|
||||
# print("generate_missing:", [str(s) for s in chain])
|
||||
|
||||
# Parsing demo ---------------------------------------------------------
|
||||
examples = [
|
||||
"SoSe 6",
|
||||
"WiSe 6/7",
|
||||
"WiSe 6",
|
||||
"SoSe 23",
|
||||
"WiSe 23/24",
|
||||
"WiSe 24",
|
||||
"WiSe 99/00",
|
||||
"SoSe 00",
|
||||
"WiSe 100/101", # test large year
|
||||
]
|
||||
for ex in examples:
|
||||
parsed = Semester.from_string(ex)
|
||||
print(f"'{ex}' → {parsed} ({parsed.year=}, {parsed.semester=})")
|
||||
@@ -1,24 +0,0 @@
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
import yaml
|
||||
|
||||
|
||||
@dataclass
|
||||
class Settings:
|
||||
"""Settings for the app."""
|
||||
|
||||
save_path: str
|
||||
database_name: str
|
||||
database_path: str
|
||||
default_apps: bool = True
|
||||
custom_applications: list[dict] = field(default_factory=list)
|
||||
|
||||
def save_settings(self) -> None:
|
||||
"""Save the settings to the config file."""
|
||||
with open("config.yaml", "w") as f:
|
||||
yaml.dump(self.__dict__, f)
|
||||
|
||||
|
||||
# open the config file and load the settings
|
||||
with open("config.yaml", "r") as f:
|
||||
data = yaml.safe_load(f)
|
||||
@@ -1,314 +0,0 @@
|
||||
from enum import Enum
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
# import sleep_and_retry decorator to retry requests
|
||||
from ratelimit import limits, sleep_and_retry
|
||||
|
||||
from src.logic.dataclass import BookData
|
||||
from src.shared.logging import log
|
||||
from src.transformers import ARRAYData, BibTeXData, COinSData, RDSData, RISData
|
||||
from src.transformers.transformers import RDS_AVAIL_DATA, RDS_GENERIC_DATA
|
||||
|
||||
# logger.add(sys.stderr, format="{time} {level} {message}", level="INFO")
|
||||
|
||||
|
||||
API_URL = "https://rds.ibs-bw.de/phfreiburg/opac/RDSIndexrecord/{}/"
|
||||
PPN_URL = "https://rds.ibs-bw.de/phfreiburg/opac/RDSIndex/Search?type0%5B%5D=allfields&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=au&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=ti&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=ct&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=isn&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=ta&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=co&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=py&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=pp&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=pu&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=si&lookfor0%5B%5D={}&join=AND&bool0%5B%5D=AND&type0%5B%5D=zr&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=cc&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND"
|
||||
BASE = "https://rds.ibs-bw.de"
|
||||
#
|
||||
TITLE = "RDS_TITLE"
|
||||
SIGNATURE = "RDS_SIGNATURE"
|
||||
EDITION = "RDS_EDITION"
|
||||
ISBN = "RDS_ISBN"
|
||||
AUTHOR = "RDS_PERSON"
|
||||
|
||||
HEADERS = {
|
||||
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 \
|
||||
(HTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36",
|
||||
"Accept-Language": "en-US, en;q=0.5",
|
||||
}
|
||||
RATE_LIMIT = 20
|
||||
RATE_PERIOD = 30
|
||||
|
||||
|
||||
class TransformerType(Enum):
|
||||
ARRAY = "ARRAY"
|
||||
COinS = "COinS"
|
||||
BibTeX = "BibTeX"
|
||||
RIS = "RIS"
|
||||
RDS = "RDS"
|
||||
|
||||
|
||||
class WebRequest:
|
||||
def __init__(self) -> None:
|
||||
"""Request data from the web, and format it depending on the mode."""
|
||||
self.apparat = None
|
||||
self.use_any = False # use any book that matches the search term
|
||||
self.signature = None
|
||||
self.ppn = None
|
||||
self.data = None
|
||||
self.timeout = 5
|
||||
log.info("Initialized WebRequest")
|
||||
|
||||
@property
|
||||
def use_any_book(self):
|
||||
"""use any book that matches the search term"""
|
||||
self.use_any = True
|
||||
log.info("Using any book")
|
||||
return self
|
||||
|
||||
def set_apparat(self, apparat: int) -> "WebRequest":
|
||||
self.apparat = apparat
|
||||
if int(self.apparat) < 10:
|
||||
self.apparat = f"0{self.apparat}"
|
||||
log.info(f"Set apparat to {self.apparat}")
|
||||
return self
|
||||
|
||||
def get_ppn(self, signature: str) -> "WebRequest":
|
||||
self.signature = signature
|
||||
if "+" in signature:
|
||||
signature = signature.replace("+", "%2B")
|
||||
if "doi.org" in signature:
|
||||
signature = signature.split("/")[-1]
|
||||
self.ppn = signature
|
||||
return self
|
||||
|
||||
@sleep_and_retry
|
||||
@limits(calls=RATE_LIMIT, period=RATE_PERIOD)
|
||||
def search_book(self, searchterm: str) -> str:
|
||||
response = requests.get(PPN_URL.format(searchterm), timeout=self.timeout)
|
||||
return response.text
|
||||
|
||||
@sleep_and_retry
|
||||
@limits(calls=RATE_LIMIT, period=RATE_PERIOD)
|
||||
def search_ppn(self, ppn: str) -> str:
|
||||
response = requests.get(API_URL.format(ppn), timeout=self.timeout)
|
||||
return response.text
|
||||
|
||||
def get_book_links(self, searchterm: str) -> list[str]:
|
||||
response: str = self.search_book(searchterm) # type:ignore
|
||||
soup = BeautifulSoup(response, "html.parser")
|
||||
links = soup.find_all("a", class_="title getFull")
|
||||
res: list[str] = []
|
||||
for link in links:
|
||||
res.append(BASE + link["href"])
|
||||
return res
|
||||
|
||||
@sleep_and_retry
|
||||
@limits(calls=RATE_LIMIT, period=RATE_PERIOD)
|
||||
def search(self, link: str) -> Optional[str]:
|
||||
try:
|
||||
response = requests.get(link, timeout=self.timeout)
|
||||
return response.text
|
||||
except requests.exceptions.RequestException as e:
|
||||
log.error(f"Request failed: {e}")
|
||||
return None
|
||||
|
||||
def get_data(self) -> Optional[list[str]]:
|
||||
links = self.get_book_links(self.ppn)
|
||||
log.debug(f"Links: {links}")
|
||||
return_data: list[str] = []
|
||||
for link in links:
|
||||
result: str = self.search(link) # type:ignore
|
||||
# in result search for class col-xs-12 rds-dl RDS_LOCATION
|
||||
# if found, return text of href
|
||||
soup = BeautifulSoup(result, "html.parser")
|
||||
locations = soup.find_all("div", class_="col-xs-12 rds-dl RDS_LOCATION")
|
||||
if locations:
|
||||
for location in locations:
|
||||
if "1. OG Semesterapparat" in location.text:
|
||||
log.success("Found Semesterapparat, adding entry")
|
||||
pre_tag = soup.find_all("pre")
|
||||
return_data = []
|
||||
if pre_tag:
|
||||
for tag in pre_tag:
|
||||
data = tag.text.strip()
|
||||
return_data.append(data)
|
||||
return return_data
|
||||
else:
|
||||
log.error("No <pre> tag found")
|
||||
return return_data
|
||||
else:
|
||||
item_location = location.find(
|
||||
"div", class_="col-xs-12 col-md-7 col-lg-8 rds-dl-panel"
|
||||
).text.strip()
|
||||
log.debug(f"Item location: {item_location}")
|
||||
if self.use_any:
|
||||
pre_tag = soup.find_all("pre")
|
||||
if pre_tag:
|
||||
for tag in pre_tag:
|
||||
data = tag.text.strip()
|
||||
return_data.append(data)
|
||||
return return_data
|
||||
else:
|
||||
log.error("No <pre> tag found")
|
||||
raise ValueError("No <pre> tag found")
|
||||
elif f"Semesterapparat-{self.apparat}" in item_location:
|
||||
pre_tag = soup.find_all("pre")
|
||||
return_data = []
|
||||
if pre_tag:
|
||||
for tag in pre_tag:
|
||||
data = tag.text.strip()
|
||||
return_data.append(data)
|
||||
return return_data
|
||||
else:
|
||||
log.error("No <pre> tag found")
|
||||
return return_data
|
||||
else:
|
||||
log.error(
|
||||
f"Signature {self.signature} not found in {item_location}"
|
||||
)
|
||||
# return_data = []
|
||||
|
||||
return return_data
|
||||
|
||||
def get_data_elsa(self) -> Optional[list[str]]:
|
||||
links = self.get_book_links(self.ppn)
|
||||
for link in links:
|
||||
result = self.search(link)
|
||||
# in result search for class col-xs-12 rds-dl RDS_LOCATION
|
||||
# if found, return text of href
|
||||
soup = BeautifulSoup(result, "html.parser")
|
||||
locations = soup.find_all("div", class_="col-xs-12 rds-dl RDS_LOCATION")
|
||||
if locations:
|
||||
for _ in locations:
|
||||
pre_tag = soup.find_all("pre")
|
||||
return_data = []
|
||||
if pre_tag:
|
||||
for tag in pre_tag:
|
||||
data = tag.text.strip()
|
||||
return_data.append(data)
|
||||
return return_data
|
||||
else:
|
||||
log.error("No <pre> tag found")
|
||||
return return_data
|
||||
|
||||
|
||||
class BibTextTransformer:
|
||||
"""Transforms data from the web into a BibText format.
|
||||
Valid Modes are ARRAY, COinS, BibTeX, RIS, RDS
|
||||
Raises:
|
||||
ValueError: Raised if mode is not in valid_modes
|
||||
"""
|
||||
|
||||
valid_modes = [
|
||||
TransformerType.ARRAY,
|
||||
TransformerType.COinS,
|
||||
TransformerType.BibTeX,
|
||||
TransformerType.RIS,
|
||||
TransformerType.RDS,
|
||||
]
|
||||
|
||||
def __init__(self, mode: TransformerType = TransformerType.ARRAY) -> None:
|
||||
self.mode = mode.value
|
||||
self.field = None
|
||||
self.signature = None
|
||||
if mode not in self.valid_modes:
|
||||
log.error(f"Mode {mode} not valid")
|
||||
raise ValueError(f"Mode {mode} not valid")
|
||||
self.data = None
|
||||
# self.bookdata = BookData(**self.data)
|
||||
|
||||
def use_signature(self, signature: str) -> "BibTextTransformer":
|
||||
"""use the exact signature to search for the book"""
|
||||
self.signature = signature
|
||||
return self
|
||||
|
||||
def get_data(self, data: Optional[list[str]] = None) -> "BibTextTransformer":
|
||||
RIS_IDENT = "TY -"
|
||||
ARRAY_IDENT = "[kid]"
|
||||
COinS_IDENT = "ctx_ver"
|
||||
BIBTEX_IDENT = "@book"
|
||||
RDS_IDENT = "RDS ---------------------------------- "
|
||||
|
||||
if data is None:
|
||||
self.data = None
|
||||
return self
|
||||
|
||||
if self.mode == "RIS":
|
||||
for line in data:
|
||||
if RIS_IDENT in line:
|
||||
self.data = line
|
||||
elif self.mode == "ARRAY":
|
||||
for line in data:
|
||||
if ARRAY_IDENT in line:
|
||||
self.data = line
|
||||
elif self.mode == "COinS":
|
||||
for line in data:
|
||||
if COinS_IDENT in line:
|
||||
self.data = line
|
||||
elif self.mode == "BibTeX":
|
||||
for line in data:
|
||||
if BIBTEX_IDENT in line:
|
||||
self.data = line
|
||||
elif self.mode == "RDS":
|
||||
for line in data:
|
||||
if RDS_IDENT in line:
|
||||
self.data = line
|
||||
return self
|
||||
|
||||
def return_data(
|
||||
self, option: Any = None
|
||||
) -> Union[
|
||||
Optional[BookData],
|
||||
Optional[RDS_GENERIC_DATA],
|
||||
Optional[RDS_AVAIL_DATA],
|
||||
None,
|
||||
dict[str, Union[RDS_AVAIL_DATA, RDS_GENERIC_DATA]],
|
||||
]:
|
||||
"""Return Data to caller.
|
||||
|
||||
Args:
|
||||
option (string, optional): Option for RDS as there are two filetypes. Use rds_availability or rds_data. Anything else gives a dict of both responses. Defaults to None.
|
||||
|
||||
Returns:
|
||||
BookData: a dataclass containing data about the book
|
||||
"""
|
||||
if self.data is None:
|
||||
return None
|
||||
match self.mode:
|
||||
case "ARRAY":
|
||||
return ARRAYData(self.signature).transform(self.data)
|
||||
case "COinS":
|
||||
return COinSData().transform(self.data)
|
||||
case "BibTeX":
|
||||
return BibTeXData().transform(self.data)
|
||||
case "RIS":
|
||||
return RISData().transform(self.data)
|
||||
case "RDS":
|
||||
return RDSData().transform(self.data).return_data(option)
|
||||
case _:
|
||||
return None
|
||||
|
||||
# if self.mode == "ARRAY":
|
||||
# return ARRAYData().transform(self.data)
|
||||
# elif self.mode == "COinS":
|
||||
# return COinSData().transform(self.data)
|
||||
# elif self.mode == "BibTeX":
|
||||
# return BibTeXData().transform(self.data)
|
||||
# elif self.mode == "RIS":
|
||||
# return RISData().transform(self.data)
|
||||
# elif self.mode == "RDS":
|
||||
# return RDSData().transform(self.data).return_data(option)
|
||||
|
||||
|
||||
def cover(isbn):
|
||||
test_url = f"https://www.buchhandel.de/cover/{isbn}/{isbn}-cover-m.jpg"
|
||||
# log.debug(test_url)
|
||||
data = requests.get(test_url, stream=True)
|
||||
return data.content
|
||||
|
||||
|
||||
def get_content(soup, css_class):
|
||||
return soup.find("div", class_=css_class).text.strip()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# log.debug("main")
|
||||
link = "CU 8500 K64"
|
||||
data = WebRequest(71).get_ppn(link).get_data()
|
||||
bib = BibTextTransformer("ARRAY").get_data().return_data()
|
||||
log.debug(bib)
|
||||
@@ -1,373 +0,0 @@
|
||||
import zipfile
|
||||
from typing import Any, Optional
|
||||
|
||||
import fitz # PyMuPDF
|
||||
import pandas as pd
|
||||
from bs4 import BeautifulSoup
|
||||
from docx import Document
|
||||
|
||||
from src.logic.dataclass import Book, SemapDocument
|
||||
from src.shared.logging import log
|
||||
|
||||
|
||||
def word_docx_to_csv(path: str) -> list[pd.DataFrame]:
|
||||
doc = Document(path)
|
||||
tables = doc.tables
|
||||
m_data = []
|
||||
for table in tables:
|
||||
data = []
|
||||
for row in table.rows:
|
||||
row_data: list[Any] = []
|
||||
for cell in row.cells:
|
||||
text = cell.text
|
||||
|
||||
text = text.replace("\n", "")
|
||||
row_data.append(text)
|
||||
# if text == "Ihr Fach:":
|
||||
# row_data.append(get_fach(path))
|
||||
data.append(row_data)
|
||||
df = pd.DataFrame(data)
|
||||
df.columns = df.iloc[0]
|
||||
df = df.iloc[1:]
|
||||
|
||||
m_data.append(df)
|
||||
|
||||
return m_data
|
||||
|
||||
|
||||
def get_fach(path: str) -> Optional[str]:
|
||||
document = zipfile.ZipFile(path)
|
||||
xml_data = document.read("word/document.xml")
|
||||
document.close()
|
||||
|
||||
soup = BeautifulSoup(xml_data, "xml")
|
||||
# text we need is in <w:p w14:paraId="12456A32" ... > -> w:r -> w:t
|
||||
paragraphs = soup.find_all("w:p")
|
||||
for para in paragraphs:
|
||||
para_id = para.get("w14:paraId")
|
||||
if para_id == "12456A32":
|
||||
# get the data in the w:t
|
||||
for run in para.find_all("w:r"):
|
||||
data = run.find("w:t")
|
||||
if data and data.contents:
|
||||
return data.contents[0]
|
||||
return None
|
||||
|
||||
|
||||
def makeDict() -> dict[str, Optional[str]]:
|
||||
return {
|
||||
"work_author": None,
|
||||
"section_author": None,
|
||||
"year": None,
|
||||
"edition": None,
|
||||
"work_title": None,
|
||||
"chapter_title": None,
|
||||
"location": None,
|
||||
"publisher": None,
|
||||
"signature": None,
|
||||
"issue": None,
|
||||
"pages": None,
|
||||
"isbn": None,
|
||||
"type": None,
|
||||
}
|
||||
|
||||
|
||||
def tuple_to_dict(tlist: tuple, type: str) -> list[dict[str, Optional[str]]]:
|
||||
ret: list[dict[str, Optional[str]]] = []
|
||||
for line in tlist:
|
||||
data = makeDict()
|
||||
if type == "Monografien":
|
||||
data["type"] = type
|
||||
data["work_author"] = line[0]
|
||||
data["year"] = line[1]
|
||||
data["edition"] = line[2]
|
||||
data["work_title"] = line[3]
|
||||
data["location"] = line[4]
|
||||
data["publisher"] = line[5]
|
||||
data["signature"] = line[6]
|
||||
data["pages"] = line[7]
|
||||
elif type == "Herausgeberwerke":
|
||||
data["type"] = type
|
||||
data["section_author"] = line[0]
|
||||
data["year"] = line[1]
|
||||
data["edition"] = line[2]
|
||||
data["chapter_title"] = line[3]
|
||||
data["work_author"] = line[4]
|
||||
data["work_title"] = line[5]
|
||||
data["location"] = line[6]
|
||||
data["publisher"] = line[7]
|
||||
data["signature"] = line[9]
|
||||
data["pages"] = line[8]
|
||||
elif type == "Zeitschriftenaufsätze":
|
||||
data["type"] = type
|
||||
data["section_author"] = line[0]
|
||||
data["year"] = line[1]
|
||||
data["issue"] = line[2]
|
||||
data["chapter_title"] = line[3]
|
||||
data["work_title"] = line[4]
|
||||
data["location"] = line[5]
|
||||
data["publisher"] = line[6]
|
||||
data["signature"] = line[8]
|
||||
data["pages"] = line[7]
|
||||
ret.append(data)
|
||||
return ret
|
||||
|
||||
|
||||
def elsa_word_to_csv(path: str) -> tuple[list[dict[str, Optional[str]]], str]:
|
||||
doc = Document(path)
|
||||
# # print all lines in doc
|
||||
doctype = [para.text for para in doc.paragraphs if para.text != ""][-1]
|
||||
tuples = {
|
||||
"Monografien": ("", "", "", "", "", "", "", "", ""),
|
||||
"Herausgeberwerke": ("", "", "", "", "", "", "", "", "", "", ""),
|
||||
"Zeitschriftenaufsätze": ("", "", "", "", "", "", "", "", "", ""),
|
||||
}
|
||||
tables = doc.tables
|
||||
|
||||
m_data: list[pd.DataFrame] = []
|
||||
for table in tables:
|
||||
data: list[list[str]] = []
|
||||
for row in table.rows:
|
||||
row_data: list[str] = []
|
||||
for cell in row.cells:
|
||||
text = cell.text
|
||||
text = text.replace("\n", "")
|
||||
text = text.replace("\u2002", "")
|
||||
row_data.append(text)
|
||||
data.append(row_data)
|
||||
df = pd.DataFrame(data)
|
||||
df.columns = df.iloc[0]
|
||||
df = df.iloc[1:]
|
||||
m_data.append(df)
|
||||
df = m_data[0]
|
||||
# split df to rows
|
||||
data = [
|
||||
row for row in df.itertuples(index=False, name=None) if row != tuples[doctype]
|
||||
]
|
||||
# log.debug(data)
|
||||
return tuple_to_dict(data, doctype), doctype
|
||||
|
||||
|
||||
def word_to_semap(word_path: str, ai: bool = True) -> SemapDocument:
|
||||
log.info("Parsing Word Document {}", word_path)
|
||||
semap = SemapDocument()
|
||||
df = word_docx_to_csv(word_path)
|
||||
apparatdata = df[0]
|
||||
apparatdata = apparatdata.to_dict()
|
||||
keys = list(apparatdata.keys())
|
||||
# print(apparatdata, keys)
|
||||
|
||||
appdata = {keys[i]: keys[i + 1] for i in range(0, len(keys) - 1, 2)}
|
||||
semap.phoneNumber = appdata["Telefon:"]
|
||||
semap.subject = appdata["Ihr Fach:"]
|
||||
semap.mail = appdata["Mailadresse:"]
|
||||
semap.personName = ",".join(appdata["Ihr Name und Titel:"].split(",")[:-1])
|
||||
semap.personTitle = ",".join(appdata["Ihr Name und Titel:"].split(",")[-1:]).strip()
|
||||
apparatdata = df[1]
|
||||
apparatdata = apparatdata.to_dict()
|
||||
keys = list(apparatdata.keys())
|
||||
appdata = {keys[i]: keys[i + 1] for i in range(0, len(keys), 2)}
|
||||
semap.title = appdata["Veranstaltung:"]
|
||||
semap.semester = appdata["Semester:"]
|
||||
if ai:
|
||||
semap.renameSemester
|
||||
semap.nameSetter
|
||||
|
||||
books = df[2]
|
||||
booklist = []
|
||||
for i in range(len(books)):
|
||||
if books.iloc[i].isnull().all():
|
||||
continue
|
||||
data = books.iloc[i].to_dict()
|
||||
book = Book()
|
||||
book.from_dict(data)
|
||||
if book.is_empty:
|
||||
continue
|
||||
elif not book.has_signature:
|
||||
continue
|
||||
else:
|
||||
booklist.append(book)
|
||||
log.info("Found {} books", len(booklist))
|
||||
semap.books = booklist
|
||||
return semap
|
||||
|
||||
|
||||
def pdf_to_semap(pdf_path: str, ai: bool = True) -> SemapDocument:
|
||||
"""
|
||||
Parse a Semesterapparat PDF like the sample you provided and return a SemapDocument.
|
||||
- No external programs, only PyMuPDF.
|
||||
- Robust to multi-line field values (e.g., hyphenated emails) and multi-line table cells.
|
||||
- Works across multiple pages; headers only need to exist on the first page.
|
||||
"""
|
||||
doc = fitz.open(pdf_path)
|
||||
semap = SemapDocument()
|
||||
|
||||
# ---------- helpers ----------
|
||||
def _join_tokens(tokens: list[str]) -> str:
|
||||
"""Join tokens, preserving hyphen/URL joins across line wraps."""
|
||||
parts = []
|
||||
for tok in tokens:
|
||||
if parts and (
|
||||
parts[-1].endswith("-")
|
||||
or parts[-1].endswith("/")
|
||||
or parts[-1].endswith(":")
|
||||
):
|
||||
parts[-1] = parts[-1] + tok # no space after '-', '/' or ':'
|
||||
else:
|
||||
parts.append(tok)
|
||||
return " ".join(parts).strip()
|
||||
|
||||
def _extract_row_values_multiline(
|
||||
page, labels: list[str], y_window: float = 24
|
||||
) -> dict[str, str]:
|
||||
"""For a row of inline labels (e.g., Name/Fach/Telefon/Mail), grab text to the right of each label."""
|
||||
rects = []
|
||||
for lab in labels:
|
||||
hits = page.search_for(lab)
|
||||
if hits:
|
||||
rects.append((lab, hits[0]))
|
||||
if not rects:
|
||||
return {}
|
||||
|
||||
rects.sort(key=lambda t: t[1].x0)
|
||||
words = page.get_text("words")
|
||||
out = {}
|
||||
for i, (lab, r) in enumerate(rects):
|
||||
x0 = r.x1 + 1
|
||||
x1 = rects[i + 1][1].x0 - 1 if i + 1 < len(rects) else page.rect.width - 5
|
||||
y0 = r.y0 - 3
|
||||
y1 = r.y0 + y_window
|
||||
toks = [w for w in words if x0 <= w[0] <= x1 and y0 <= w[1] <= y1]
|
||||
toks.sort(key=lambda w: (w[1], w[0])) # line, then x
|
||||
out[lab] = _join_tokens([w[4] for w in toks])
|
||||
return out
|
||||
|
||||
def _compute_columns_from_headers(page0):
|
||||
"""Find column headers (once) and derive column centers + header baseline."""
|
||||
headers = [
|
||||
("Autorenname(n):", "Autorenname(n):Nachname, Vorname"),
|
||||
("Jahr/Auflage", "Jahr/Auflage"),
|
||||
("Titel", "Titel"),
|
||||
("Ort und Verlag", "Ort und Verlag"),
|
||||
("Standnummer", "Standnummer"),
|
||||
("Interne Vermerke", "Interne Vermerke"),
|
||||
]
|
||||
found = []
|
||||
for label, canon in headers:
|
||||
rects = [
|
||||
r for r in page0.search_for(label) if r.y0 > 200
|
||||
] # skip top-of-form duplicates
|
||||
if rects:
|
||||
found.append((canon, rects[0]))
|
||||
found.sort(key=lambda t: t[1].x0)
|
||||
cols = [(canon, r.x0, r.x1, (r.x0 + r.x1) / 2.0) for canon, r in found]
|
||||
header_y = min(r.y0 for _, r in found) if found else 0
|
||||
return cols, header_y
|
||||
|
||||
def _extract_table_rows_from_page(
|
||||
page, cols, header_y, y_top_margin=5, y_bottom_margin=40, y_tol=26.0
|
||||
):
|
||||
"""
|
||||
Group words into logical rows (tolerant to wrapped lines), then map each word
|
||||
to the nearest column by x-center and join tokens per column.
|
||||
"""
|
||||
words = [
|
||||
w
|
||||
for w in page.get_text("words")
|
||||
if w[1] > header_y + y_top_margin
|
||||
and w[3] < page.rect.height - y_bottom_margin
|
||||
]
|
||||
|
||||
# group into row bands by y (tolerance big enough to capture wrapped lines, but below next row gap)
|
||||
rows = []
|
||||
for w in sorted(words, key=lambda w: w[1]):
|
||||
y = w[1]
|
||||
for row in rows:
|
||||
if abs(row["y_mean"] - y) <= y_tol:
|
||||
row["ys"].append(y)
|
||||
row["y_mean"] = sum(row["ys"]) / len(row["ys"])
|
||||
row["words"].append(w)
|
||||
break
|
||||
else:
|
||||
rows.append({"y_mean": y, "ys": [y], "words": [w]})
|
||||
|
||||
# map to columns + join
|
||||
joined_rows = []
|
||||
for row in rows:
|
||||
rowdict = {canon: "" for canon, *_ in cols}
|
||||
words_by_col = {canon: [] for canon, *_ in cols}
|
||||
for w in sorted(row["words"], key=lambda w: (w[1], w[0])):
|
||||
xmid = (w[0] + w[2]) / 2.0
|
||||
canon = min(cols, key=lambda c: abs(xmid - c[3]))[0]
|
||||
words_by_col[canon].append(w[4])
|
||||
for canon, toks in words_by_col.items():
|
||||
rowdict[canon] = _join_tokens(toks)
|
||||
if any(v for v in rowdict.values()):
|
||||
joined_rows.append(rowdict)
|
||||
return joined_rows
|
||||
|
||||
# ---------- top-of-form fields ----------
|
||||
p0 = doc[0]
|
||||
row1 = _extract_row_values_multiline(
|
||||
p0,
|
||||
["Ihr Name und Titel:", "Ihr Fach:", "Telefon:", "Mailadresse:"],
|
||||
y_window=22,
|
||||
)
|
||||
row2 = _extract_row_values_multiline(
|
||||
p0, ["Veranstaltung:", "Semester:"], y_window=20
|
||||
)
|
||||
|
||||
name_title = row1.get("Ihr Name und Titel:", "") or ""
|
||||
semap.subject = row1.get("Ihr Fach:", None)
|
||||
semap.phoneNumber = row1.get("Telefon:", None) # keep as-is (string like "682-308")
|
||||
semap.mail = row1.get("Mailadresse:", None)
|
||||
semap.personName = ",".join(name_title.split(",")[:-1]) if name_title else None
|
||||
semap.personTitle = (
|
||||
",".join(name_title.split(",")[-1:]).strip() if name_title else None
|
||||
)
|
||||
|
||||
semap.title = row2.get("Veranstaltung:", None)
|
||||
semap.semester = row2.get("Semester:", None)
|
||||
|
||||
# ---------- table extraction (all pages) ----------
|
||||
cols, header_y = _compute_columns_from_headers(p0)
|
||||
all_rows: list[dict[str, Any]] = []
|
||||
for pn in range(len(doc)):
|
||||
all_rows.extend(_extract_table_rows_from_page(doc[pn], cols, header_y))
|
||||
|
||||
# drop the sub-header line "Nachname, Vorname" etc.
|
||||
filtered = []
|
||||
for r in all_rows:
|
||||
if r.get("Autorenname(n):Nachname, Vorname", "").strip() in (
|
||||
"",
|
||||
"Nachname, Vorname",
|
||||
):
|
||||
# skip if it's just the sub-header line
|
||||
if all(not r[c] for c in r if c != "Autorenname(n):Nachname, Vorname"):
|
||||
continue
|
||||
filtered.append(r)
|
||||
|
||||
# build Book objects (same filters as your word parser)
|
||||
booklist: list[Book] = []
|
||||
for row in filtered:
|
||||
b = Book()
|
||||
b.from_dict(row)
|
||||
if b.is_empty:
|
||||
continue
|
||||
if not b.has_signature:
|
||||
continue
|
||||
booklist.append(b)
|
||||
|
||||
semap.books = booklist
|
||||
|
||||
# keep parity with your post-processing
|
||||
if ai:
|
||||
_ = semap.renameSemester
|
||||
_ = semap.nameSetter
|
||||
|
||||
return semap
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
else_df = pdf_to_semap("C:/Users/aky547/Dokumente/testsemap.pdf")
|
||||
# print(else_df)
|
||||
@@ -1,67 +0,0 @@
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
from src.logic.dataclass import Apparat, BookData, SemapDocument, XMLMailSubmission
|
||||
from src.logic.semester import Semester
|
||||
|
||||
|
||||
def parse_xml_submission(xml_string: str) -> XMLMailSubmission:
|
||||
"""
|
||||
Parse an XML string representing a mail submission and return an XMLMailSubmission object.
|
||||
"""
|
||||
submission = XMLMailSubmission()
|
||||
root = ET.fromstring(xml_string)
|
||||
static_data = root.find("static")
|
||||
static_info = {child.tag: child.text for child in static_data}
|
||||
books = root.find("books")
|
||||
books_info = []
|
||||
for book in books:
|
||||
book_details = {detail.tag: detail.text for detail in book}
|
||||
book = BookData(
|
||||
author=book_details.get("authorname"),
|
||||
year=book_details.get("year").split("/")[0]
|
||||
if "/" in book_details.get("year")
|
||||
else book_details.get("year"),
|
||||
edition=book_details.get("year").split("/")[1]
|
||||
if "/" in book_details.get("year")
|
||||
else None,
|
||||
title=book_details.get("title"),
|
||||
signature=book_details.get("signature"),
|
||||
)
|
||||
books_info.append(book)
|
||||
# Extract static data
|
||||
submission.name = static_info.get("name")
|
||||
submission.lastname = static_info.get("lastname")
|
||||
submission.title = static_info.get("title")
|
||||
submission.telno = int(static_info.get("telno"))
|
||||
submission.email = static_info.get("mail")
|
||||
submission.app_name = static_info.get("apparatsname")
|
||||
submission.subject = static_info.get("subject")
|
||||
sem_year = static_info.get("semester").split()[1]
|
||||
sem_term = static_info.get("semester").split()[0]
|
||||
submission.semester = Semester(semester=sem_term, year=int(sem_year))
|
||||
submission.books = books_info
|
||||
# Extract book information
|
||||
# book_info = []
|
||||
# for book in books:
|
||||
# book_details = {detail.tag: detail.text for detail in book}
|
||||
# book_info.append(book_details)
|
||||
return submission
|
||||
|
||||
|
||||
def eml_parser(path: str) -> XMLMailSubmission:
|
||||
with open(path, "r", encoding="utf-8") as file:
|
||||
xml_content = file.read().split("\n\n", 1)[1] # Skip headers
|
||||
print("EML content loaded, parsing XML...")
|
||||
print(xml_content)
|
||||
return parse_xml_submission(xml_content)
|
||||
|
||||
|
||||
def eml_to_semap(xml_mail: XMLMailSubmission) -> SemapDocument:
|
||||
submission = eml_parser(xml_mail)
|
||||
semap_doc = SemapDocument(
|
||||
# prof=Prof(name=submission.name, lastname=submission.lastname, email=submission.email),
|
||||
apparat=Apparat(name=submission.app_name, subject=submission.subject),
|
||||
semester=submission.semester,
|
||||
books=submission.books,
|
||||
)
|
||||
return semap_doc
|
||||
@@ -1,340 +0,0 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
from pyzotero import zotero
|
||||
|
||||
from src import settings
|
||||
from src.logic.webrequest import BibTextTransformer, WebRequest
|
||||
from src.shared.logging import log
|
||||
|
||||
|
||||
@dataclass
|
||||
class Creator:
|
||||
firstName: str = None
|
||||
lastName: str = None
|
||||
creatorType: str = "author"
|
||||
|
||||
def from_dict(self, data: dict) -> None:
|
||||
for key, value in data.items():
|
||||
setattr(self, key, value)
|
||||
|
||||
def from_string(self, data: str) -> "Creator":
|
||||
if "," in data:
|
||||
self.firstName = data.split(",")[1]
|
||||
self.lastName = data.split(",")[0]
|
||||
|
||||
return self
|
||||
|
||||
# set __dict__ object to be used in json
|
||||
|
||||
|
||||
@dataclass
|
||||
class Book:
|
||||
itemType: str = "book"
|
||||
creators: list[Creator] = None
|
||||
tags: list = None
|
||||
collections: list = None
|
||||
relations: dict = None
|
||||
title: str = None
|
||||
abstractNote: str = None
|
||||
series: str = None
|
||||
seriesNumber: str = None
|
||||
volume: str = None
|
||||
numberOfVolumes: str = None
|
||||
edition: str = None
|
||||
place: str = None
|
||||
publisher: str = None
|
||||
date: str = None
|
||||
numPages: str = None
|
||||
language: str = None
|
||||
ISBN: str = None
|
||||
shortTitle: str = None
|
||||
url: str = None
|
||||
accessDate: str = None
|
||||
archive: str = None
|
||||
archiveLocation: str = None
|
||||
libraryCatalog: str = None
|
||||
callNumber: str = None
|
||||
rights: str = None
|
||||
extra: str = None
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
ret = {}
|
||||
for key, value in self.__dict__.items():
|
||||
if value:
|
||||
ret[key] = value
|
||||
return ret
|
||||
|
||||
|
||||
@dataclass
|
||||
class BookSection:
|
||||
itemType: str = "bookSection"
|
||||
title: str = None
|
||||
creators: list[Creator] = None
|
||||
abstractNote: str = None
|
||||
bookTitle: str = None
|
||||
series: str = None
|
||||
seriesNumber: str = None
|
||||
volume: str = None
|
||||
numberOfVolumes: str = None
|
||||
edition: str = None
|
||||
place: str = None
|
||||
publisher: str = None
|
||||
date: str = None
|
||||
pages: str = None
|
||||
language: str = None
|
||||
ISBN: str = None
|
||||
shortTitle: str = None
|
||||
url: str = None
|
||||
accessDate: str = None
|
||||
archive: str = None
|
||||
archiveLocation: str = None
|
||||
libraryCatalog: str = None
|
||||
callNumber: str = None
|
||||
rights: str = None
|
||||
extra: str = None
|
||||
tags = list
|
||||
collections = list
|
||||
relations = dict
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
ret = {}
|
||||
for key, value in self.__dict__.items():
|
||||
if value:
|
||||
ret[key] = value
|
||||
return ret
|
||||
|
||||
def assign(self, book) -> None:
|
||||
for key, value in book.__dict__.items():
|
||||
if key in self.__dict__.keys():
|
||||
try:
|
||||
setattr(self, key, value)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
|
||||
@dataclass
|
||||
class JournalArticle:
|
||||
itemType = "journalArticle"
|
||||
title: str = None
|
||||
creators: list[Creator] = None
|
||||
abstractNote: str = None
|
||||
publicationTitle: str = None
|
||||
volume: str = None
|
||||
issue: str = None
|
||||
pages: str = None
|
||||
date: str = None
|
||||
series: str = None
|
||||
seriesTitle: str = None
|
||||
seriesText: str = None
|
||||
journalAbbreviation: str = None
|
||||
language: str = None
|
||||
DOI: str = None
|
||||
ISSN: str = None
|
||||
shortTitle: str = None
|
||||
url: str = None
|
||||
accessDate: str = None
|
||||
archive: str = None
|
||||
archiveLocation: str = None
|
||||
libraryCatalog: str = None
|
||||
callNumber: str = None
|
||||
rights: str = None
|
||||
extra: str = None
|
||||
tags = list
|
||||
collections = list
|
||||
relations = dict
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
ret = {}
|
||||
for key, value in self.__dict__.items():
|
||||
if value:
|
||||
ret[key] = value
|
||||
return ret
|
||||
|
||||
def assign(self, book: dict) -> None:
|
||||
for key, value in book.__dict__.items():
|
||||
if key in self.__dict__.keys():
|
||||
try:
|
||||
setattr(self, key, value)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
|
||||
class ZoteroController:
|
||||
zoterocfg = settings.zotero
|
||||
|
||||
def __init__(self):
|
||||
if self.zoterocfg.library_id is None:
|
||||
return
|
||||
self.zot = zotero.Zotero( # type: ignore
|
||||
self.zoterocfg.library_id,
|
||||
self.zoterocfg.library_type,
|
||||
self.zoterocfg.api_key,
|
||||
)
|
||||
|
||||
def get_books(self) -> list:
|
||||
ret = []
|
||||
items = self.zot.top() # type: ignore
|
||||
for item in items:
|
||||
if item["data"]["itemType"] == "book":
|
||||
ret.append(item)
|
||||
return ret
|
||||
|
||||
# create item in zotero
|
||||
# item is a part of a book
|
||||
def __get_data(self, isbn) -> dict:
|
||||
web = WebRequest()
|
||||
web.get_ppn(isbn)
|
||||
data = web.get_data_elsa()
|
||||
bib = BibTextTransformer()
|
||||
bib.get_data(data)
|
||||
book = bib.return_data()
|
||||
return book
|
||||
|
||||
# # #print(zot.item_template("bookSection"))
|
||||
def createBook(self, isbn) -> Book:
|
||||
book = self.__get_data(isbn)
|
||||
|
||||
bookdata = Book()
|
||||
bookdata.title = book.title.split(":")[0]
|
||||
bookdata.ISBN = book.isbn
|
||||
bookdata.language = book.language
|
||||
bookdata.date = book.year
|
||||
bookdata.publisher = book.publisher
|
||||
bookdata.url = book.link
|
||||
bookdata.edition = book.edition
|
||||
bookdata.place = book.place
|
||||
bookdata.numPages = book.pages
|
||||
authors = [
|
||||
Creator().from_string(author).__dict__ for author in book.author.split(";")
|
||||
]
|
||||
authors = [author for author in authors if author["lastName"] is not None]
|
||||
bookdata.creators = authors
|
||||
return bookdata
|
||||
|
||||
def createItem(self, item) -> Optional[str]:
|
||||
resp = self.zot.create_items([item]) # type: ignore
|
||||
if "successful" in resp.keys():
|
||||
log.debug(resp)
|
||||
return resp["successful"]["0"]["key"]
|
||||
else:
|
||||
return None
|
||||
|
||||
def deleteItem(self, key) -> None:
|
||||
items = self.zot.items()
|
||||
for item in items:
|
||||
if item["key"] == key:
|
||||
self.zot.delete_item(item) # type: ignore
|
||||
# #print(item)
|
||||
break
|
||||
|
||||
def createHGSection(self, book: Book, data: dict) -> Optional[str]:
|
||||
log.debug(book)
|
||||
chapter = BookSection()
|
||||
chapter.assign(book)
|
||||
chapter.pages = data["pages"]
|
||||
chapter.itemType = "bookSection"
|
||||
chapter.ISBN = ""
|
||||
chapter.url = ""
|
||||
chapter.title = data["chapter_title"]
|
||||
creators = chapter.creators
|
||||
for creator in creators:
|
||||
creator["creatorType"] = "editor"
|
||||
chapter.creators = creators
|
||||
authors = [
|
||||
Creator().from_string(author).__dict__
|
||||
for author in data["section_author"].split(";")
|
||||
]
|
||||
chapter.creators += authors
|
||||
|
||||
log.debug(chapter.to_dict())
|
||||
return self.createItem(chapter.to_dict())
|
||||
pass
|
||||
|
||||
def createBookSection(self, book: Book, data: dict) -> Optional[str]:
|
||||
chapter = BookSection()
|
||||
chapter.assign(book)
|
||||
chapter.pages = data["pages"]
|
||||
chapter.itemType = "bookSection"
|
||||
chapter.ISBN = ""
|
||||
chapter.url = ""
|
||||
chapter.title = ""
|
||||
return self.createItem(chapter.to_dict())
|
||||
# chapter.creators
|
||||
|
||||
def createJournalArticle(self, journal, article) -> Optional[str]:
|
||||
# #print(type(article))
|
||||
journalarticle = JournalArticle()
|
||||
journalarticle.assign(journal)
|
||||
journalarticle.itemType = "journalArticle"
|
||||
journalarticle.creators = [
|
||||
Creator().from_string(author).__dict__
|
||||
for author in article["section_author"].split(";")
|
||||
]
|
||||
journalarticle.date = article["year"]
|
||||
journalarticle.title = article["chapter_title"]
|
||||
journalarticle.publicationTitle = article["work_title"].split(":")[0].strip()
|
||||
journalarticle.pages = article["pages"]
|
||||
journalarticle.ISSN = article["isbn"]
|
||||
journalarticle.issue = article["issue"]
|
||||
journalarticle.url = article["isbn"]
|
||||
|
||||
# #print(journalarticle.to_dict())
|
||||
|
||||
return self.createItem(journalarticle.to_dict())
|
||||
|
||||
def get_citation(self, item) -> str:
|
||||
title = self.zot.item( # type: ignore
|
||||
item,
|
||||
content="bib",
|
||||
style="deutsche-gesellschaft-fur-psychologie",
|
||||
)[0]
|
||||
# title = title[0]
|
||||
title = (
|
||||
title.replace("<i>", "")
|
||||
.replace("</i>", "")
|
||||
.replace('<div class="csl-entry">', "")
|
||||
.replace("</div>", "")
|
||||
.replace("&", "&")
|
||||
)
|
||||
return title
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
zot = ZoteroController()
|
||||
book = zot.createBook("DV 3000 D649 (4)")
|
||||
row = "Döbert, Hans & Hörner, Wolfgang & Kopp, Bortho von & Reuter, Lutz R."
|
||||
zot.createBookSection()
|
||||
|
||||
# book = Book()
|
||||
# # # book.
|
||||
# ISBN = "9783801718718"
|
||||
# book = createBook(isbn=ISBN)
|
||||
# chapter = BookSection()
|
||||
# chapter.title = "Geistige Behinderung"
|
||||
# chapter.bookTitle = book.title
|
||||
# chapter.pages = "511 - 538"
|
||||
# chapter.publisher = book.publisher
|
||||
# authors = [
|
||||
# Creator("Jennifer M.", "Phillips").__dict__,
|
||||
# Creator("Hower", "Kwon").__dict__,
|
||||
# Creator("Carl", "Feinstein").__dict__,
|
||||
# Creator("Inco", "Spintczok von Brisinski").__dict__,
|
||||
# ]
|
||||
# publishers = book.author
|
||||
# if isinstance(publishers, str):
|
||||
# publishers = [publishers]
|
||||
# for publisher in publishers:
|
||||
# # #print(publisher)
|
||||
# creator = Creator().from_string(publisher)
|
||||
# creator.creatorType = "editor"
|
||||
# authors.append(creator.__dict__)
|
||||
|
||||
# chapter.creators = authors
|
||||
# chapter.publisher = book.publisher
|
||||
# # #print(chapter.to_dict())
|
||||
# createBookSection(chapter.to_dict())
|
||||
# get_citation("9ZXH8DDE")
|
||||
# # # #print()
|
||||
# # #print(get_books())
|
||||
# # #print(zot.item_creator_types("bookSection"))
|
||||
@@ -1,13 +1,15 @@
|
||||
__all__ = [
|
||||
"csv_to_list",
|
||||
"pdf_to_csv",
|
||||
"word_to_semap",
|
||||
"elsa_word_to_csv",
|
||||
"eml_parser",
|
||||
"eml_to_semap",
|
||||
"pdf_to_csv",
|
||||
"pdf_to_semap",
|
||||
"word_to_semap",
|
||||
]
|
||||
|
||||
|
||||
from .csv_parser import csv_to_list
|
||||
from .pdf_parser import pdf_to_csv
|
||||
from .word_parser import word_to_semap
|
||||
from .word_parser import elsa_word_to_csv, pdf_to_semap, word_to_semap
|
||||
from .xml_parser import eml_parser, eml_to_semap
|
||||
|
||||
@@ -1,23 +1,27 @@
|
||||
import csv
|
||||
from pathlib import Path
|
||||
|
||||
from charset_normalizer import detect
|
||||
|
||||
from src.core.models import Book, SemapDocument
|
||||
|
||||
def csv_to_list(path: str) -> list[str]:
|
||||
"""
|
||||
Extracts the data from a csv file and returns it as a pandas dataframe
|
||||
"""
|
||||
|
||||
def csv_to_list(path: str) -> SemapDocument:
|
||||
"""Extract the data from a csv file and return it as a minimal SemapDocument."""
|
||||
encoding = detect(open(path, "rb").read())["encoding"]
|
||||
with open(path, newline="", encoding=encoding) as csvfile:
|
||||
with Path(path).open(newline="", encoding=encoding) as csvfile:
|
||||
# if decoder fails to map, assign ""
|
||||
reader = csv.reader(csvfile, delimiter=";", quotechar="|")
|
||||
ret = []
|
||||
for row in reader:
|
||||
ret.append(row[0].replace('"', ""))
|
||||
return ret
|
||||
|
||||
books = [Book(signature=row) for row in ret]
|
||||
|
||||
return SemapDocument(books=books)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
text = csv_to_list("C:/Users/aky547/Desktop/semap/71.csv")
|
||||
text = csv_to_list("C:/Users/aky547/Desktop/semap/sap71.csv")
|
||||
# remove linebreaks
|
||||
# #print(text)
|
||||
print(text)
|
||||
|
||||
@@ -10,7 +10,7 @@ from typing import Any, List
|
||||
import loguru
|
||||
|
||||
from src import LOG_DIR
|
||||
from src.logic.dataclass import BookData
|
||||
from src.core.models import BookData
|
||||
|
||||
log = loguru.logger
|
||||
log.remove()
|
||||
@@ -124,7 +124,6 @@ class BaseStruct:
|
||||
class ARRAYData:
|
||||
def __init__(self, signature=None) -> None:
|
||||
self.signature = None
|
||||
pass
|
||||
|
||||
def transform(self, data: str) -> BookData:
|
||||
def _get_line(source: str, search: str) -> str:
|
||||
@@ -223,7 +222,9 @@ class ARRAYData:
|
||||
|
||||
def _get_adis_idn(data, signature):
|
||||
loksatz_match = re.search(
|
||||
r"\[loksatz\] => Array\s*\((.*?)\)", data, re.DOTALL
|
||||
r"\[loksatz\] => Array\s*\((.*?)\)",
|
||||
data,
|
||||
re.DOTALL,
|
||||
)
|
||||
if loksatz_match:
|
||||
loksatz_content = loksatz_match.group(1)
|
||||
@@ -238,7 +239,9 @@ class ARRAYData:
|
||||
|
||||
def _get_in_apparat(data):
|
||||
loksatz_match = re.search(
|
||||
r"\[loksatz\] => Array\s*\((.*?)\)", data, re.DOTALL
|
||||
r"\[loksatz\] => Array\s*\((.*?)\)",
|
||||
data,
|
||||
re.DOTALL,
|
||||
)
|
||||
if loksatz_match:
|
||||
loksatz_content = loksatz_match.group(1)
|
||||
@@ -250,8 +253,7 @@ class ARRAYData:
|
||||
data = eval(obj)
|
||||
if data["ausleihcode"] == "R" and data["standort"] == "40":
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
return False
|
||||
|
||||
ppn = _get_line(data, "[kid]")
|
||||
title = _get_title(data).strip()
|
||||
@@ -412,10 +414,9 @@ class RDSData:
|
||||
def return_data(self, option=None):
|
||||
if option == "rds_availability":
|
||||
return self.retlist[0]
|
||||
elif option == "rds_data":
|
||||
if option == "rds_data":
|
||||
return self.retlist[1]
|
||||
else:
|
||||
return {"rds_availability": self.retlist[0], "rds_data": self.retlist[1]}
|
||||
return {"rds_availability": self.retlist[0], "rds_data": self.retlist[1]}
|
||||
|
||||
|
||||
class DictToTable:
|
||||
@@ -462,12 +463,11 @@ class DictToTable:
|
||||
self.reset()
|
||||
if mode == "book":
|
||||
return self.book_assign(data)
|
||||
elif mode == "hg":
|
||||
if mode == "hg":
|
||||
return self.hg_assign(data)
|
||||
elif mode == "zs":
|
||||
if mode == "zs":
|
||||
return self.zs_assign(data)
|
||||
else:
|
||||
return None
|
||||
return None
|
||||
|
||||
def book_assign(self, data):
|
||||
self.type = "book"
|
||||
@@ -514,7 +514,7 @@ class DictToTable:
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
with open("daiadata", "r") as f:
|
||||
with open("daiadata") as f:
|
||||
data = f.read()
|
||||
|
||||
ret = RDSData().transform(data)
|
||||
|
||||
@@ -10,7 +10,7 @@ from typing import Any, List
|
||||
import loguru
|
||||
|
||||
from src import LOG_DIR
|
||||
from src.logic.dataclass import BookData
|
||||
from src.core.models import BookData
|
||||
|
||||
log = loguru.logger
|
||||
log.remove()
|
||||
@@ -124,7 +124,6 @@ class BaseStruct:
|
||||
class ARRAYData:
|
||||
def __init__(self, signature=None) -> None:
|
||||
self.signature = None
|
||||
pass
|
||||
|
||||
def transform(self, data: str) -> BookData:
|
||||
def _get_line(source: str, search: str) -> str:
|
||||
@@ -223,7 +222,9 @@ class ARRAYData:
|
||||
|
||||
def _get_adis_idn(data, signature):
|
||||
loksatz_match = re.search(
|
||||
r"\[loksatz\] => Array\s*\((.*?)\)", data, re.DOTALL
|
||||
r"\[loksatz\] => Array\s*\((.*?)\)",
|
||||
data,
|
||||
re.DOTALL,
|
||||
)
|
||||
if loksatz_match:
|
||||
loksatz_content = loksatz_match.group(1)
|
||||
@@ -238,7 +239,9 @@ class ARRAYData:
|
||||
|
||||
def _get_in_apparat(data):
|
||||
loksatz_match = re.search(
|
||||
r"\[loksatz\] => Array\s*\((.*?)\)", data, re.DOTALL
|
||||
r"\[loksatz\] => Array\s*\((.*?)\)",
|
||||
data,
|
||||
re.DOTALL,
|
||||
)
|
||||
if loksatz_match:
|
||||
loksatz_content = loksatz_match.group(1)
|
||||
@@ -250,8 +253,7 @@ class ARRAYData:
|
||||
data = eval(obj)
|
||||
if data["ausleihcode"] == "R" and data["standort"] == "40":
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
return False
|
||||
|
||||
ppn = _get_line(data, "[kid]")
|
||||
title = _get_title(data).strip()
|
||||
@@ -412,10 +414,9 @@ class RDSData:
|
||||
def return_data(self, option=None):
|
||||
if option == "rds_availability":
|
||||
return self.retlist[0]
|
||||
elif option == "rds_data":
|
||||
if option == "rds_data":
|
||||
return self.retlist[1]
|
||||
else:
|
||||
return {"rds_availability": self.retlist[0], "rds_data": self.retlist[1]}
|
||||
return {"rds_availability": self.retlist[0], "rds_data": self.retlist[1]}
|
||||
|
||||
|
||||
class DictToTable:
|
||||
@@ -462,12 +463,11 @@ class DictToTable:
|
||||
self.reset()
|
||||
if mode == "book":
|
||||
return self.book_assign(data)
|
||||
elif mode == "hg":
|
||||
if mode == "hg":
|
||||
return self.hg_assign(data)
|
||||
elif mode == "zs":
|
||||
if mode == "zs":
|
||||
return self.zs_assign(data)
|
||||
else:
|
||||
return None
|
||||
return None
|
||||
|
||||
def book_assign(self, data):
|
||||
self.type = "book"
|
||||
@@ -514,7 +514,7 @@ class DictToTable:
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
with open("daiadata", "r") as f:
|
||||
with open("daiadata") as f:
|
||||
data = f.read()
|
||||
|
||||
ret = RDSData().transform(data)
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import PySide6
|
||||
from PySide6 import QtWidgets
|
||||
|
||||
from src import Icon, __author__, __version__
|
||||
from src import __author__, __version__
|
||||
from src.utils.icon import Icon
|
||||
|
||||
from .dialog_sources.about_ui import Ui_about
|
||||
|
||||
@@ -33,8 +34,6 @@ svgrepo (https://www.svgrepo.com)""",
|
||||
description += f"{key}: {value}\n"
|
||||
self.description.setText(description)
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def launch_about():
|
||||
app = QtWidgets.QApplication([])
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from PySide6 import QtWidgets
|
||||
|
||||
from src import Icon
|
||||
from src.utils.icon import Icon
|
||||
|
||||
from .dialog_sources.apparat_extend_ui import Ui_Dialog
|
||||
|
||||
|
||||
@@ -2,8 +2,8 @@ from typing import Any
|
||||
|
||||
from PySide6 import QtCore, QtWidgets
|
||||
|
||||
from src import Icon
|
||||
from src.database import Database
|
||||
from src.utils.icon import Icon
|
||||
|
||||
from .dialog_sources.deletedialog_ui import Ui_Dialog
|
||||
|
||||
@@ -57,7 +57,7 @@ class DeleteDialog(QtWidgets.QDialog, Ui_Dialog):
|
||||
"title": title,
|
||||
"signature": signature,
|
||||
"edition": edition,
|
||||
}
|
||||
},
|
||||
)
|
||||
return result
|
||||
|
||||
@@ -74,7 +74,9 @@ class DeleteDialog(QtWidgets.QDialog, Ui_Dialog):
|
||||
if searchterm in title.lower() or searchterm in signature.lower():
|
||||
self.tableWidget.insertRow(self.tableWidget.rowCount())
|
||||
self.tableWidget.setCellWidget(
|
||||
self.tableWidget.rowCount() - 1, 0, checkbox
|
||||
self.tableWidget.rowCount() - 1,
|
||||
0,
|
||||
checkbox,
|
||||
)
|
||||
self.tableWidget.setItem(
|
||||
self.tableWidget.rowCount() - 1,
|
||||
@@ -117,7 +119,7 @@ class DeleteDialog(QtWidgets.QDialog, Ui_Dialog):
|
||||
self.tableWidget.setColumnWidth(0, 50)
|
||||
# horizontal header 0 should be centered
|
||||
self.tableWidget.horizontalHeader().setDefaultAlignment(
|
||||
QtCore.Qt.AlignmentFlag.AlignCenter
|
||||
QtCore.Qt.AlignmentFlag.AlignCenter,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -8,298 +8,313 @@
|
||||
## WARNING! All changes made in this file will be lost when recompiling UI file!
|
||||
################################################################################
|
||||
|
||||
from PySide6.QtCore import (QCoreApplication, QMetaObject, Qt)
|
||||
from PySide6.QtWidgets import (QDialogButtonBox,
|
||||
QGridLayout, QGroupBox, QHBoxLayout, QLabel,
|
||||
QLineEdit, QPushButton, QRadioButton, QSizePolicy,
|
||||
QSpacerItem, QStackedWidget, QTextEdit, QToolButton,
|
||||
QVBoxLayout, QWidget)
|
||||
from PySide6.QtCore import QCoreApplication, QMetaObject, Qt
|
||||
from PySide6.QtWidgets import (
|
||||
QDialogButtonBox,
|
||||
QGridLayout,
|
||||
QGroupBox,
|
||||
QHBoxLayout,
|
||||
QLabel,
|
||||
QLineEdit,
|
||||
QPushButton,
|
||||
QRadioButton,
|
||||
QSizePolicy,
|
||||
QSpacerItem,
|
||||
QStackedWidget,
|
||||
QTextEdit,
|
||||
QToolButton,
|
||||
QVBoxLayout,
|
||||
QWidget,
|
||||
)
|
||||
|
||||
|
||||
class Ui_Dialog(object):
|
||||
def setupUi(self, Dialog):
|
||||
if not Dialog.objectName():
|
||||
Dialog.setObjectName(u"Dialog")
|
||||
Dialog.setObjectName("Dialog")
|
||||
Dialog.resize(529, 484)
|
||||
self.verticalLayout = QVBoxLayout(Dialog)
|
||||
self.verticalLayout.setObjectName(u"verticalLayout")
|
||||
self.verticalLayout.setObjectName("verticalLayout")
|
||||
self.groupBox = QGroupBox(Dialog)
|
||||
self.groupBox.setObjectName(u"groupBox")
|
||||
self.groupBox.setObjectName("groupBox")
|
||||
self.groupBox.setFlat(True)
|
||||
self.groupBox.setCheckable(False)
|
||||
self.gridLayout_4 = QGridLayout(self.groupBox)
|
||||
self.gridLayout_4.setObjectName(u"gridLayout_4")
|
||||
self.horizontalSpacer = QSpacerItem(40, 20, QSizePolicy.Policy.Expanding, QSizePolicy.Policy.Minimum)
|
||||
self.gridLayout_4.setObjectName("gridLayout_4")
|
||||
self.horizontalSpacer = QSpacerItem(
|
||||
40, 20, QSizePolicy.Policy.Expanding, QSizePolicy.Policy.Minimum
|
||||
)
|
||||
|
||||
self.gridLayout_4.addItem(self.horizontalSpacer, 0, 3, 1, 1)
|
||||
|
||||
self.btn_mono = QRadioButton(self.groupBox)
|
||||
self.btn_mono.setObjectName(u"btn_mono")
|
||||
self.btn_mono.setObjectName("btn_mono")
|
||||
self.btn_mono.setChecked(False)
|
||||
|
||||
self.gridLayout_4.addWidget(self.btn_mono, 0, 0, 1, 1)
|
||||
|
||||
self.btn_zs = QRadioButton(self.groupBox)
|
||||
self.btn_zs.setObjectName(u"btn_zs")
|
||||
self.btn_zs.setObjectName("btn_zs")
|
||||
|
||||
self.gridLayout_4.addWidget(self.btn_zs, 0, 2, 1, 1)
|
||||
|
||||
self.btn_hg = QRadioButton(self.groupBox)
|
||||
self.btn_hg.setObjectName(u"btn_hg")
|
||||
self.btn_hg.setObjectName("btn_hg")
|
||||
|
||||
self.gridLayout_4.addWidget(self.btn_hg, 0, 1, 1, 1)
|
||||
|
||||
|
||||
self.verticalLayout.addWidget(self.groupBox)
|
||||
|
||||
self.horizontalLayout_2 = QHBoxLayout()
|
||||
self.horizontalLayout_2.setObjectName(u"horizontalLayout_2")
|
||||
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
|
||||
self.label_2 = QLabel(Dialog)
|
||||
self.label_2.setObjectName(u"label_2")
|
||||
self.label_2.setObjectName("label_2")
|
||||
|
||||
self.horizontalLayout_2.addWidget(self.label_2)
|
||||
|
||||
self.searchIdent = QLineEdit(Dialog)
|
||||
self.searchIdent.setObjectName(u"searchIdent")
|
||||
self.searchIdent.setObjectName("searchIdent")
|
||||
|
||||
self.horizontalLayout_2.addWidget(self.searchIdent)
|
||||
|
||||
self.btn_search = QPushButton(Dialog)
|
||||
self.btn_search.setObjectName(u"btn_search")
|
||||
self.btn_search.setObjectName("btn_search")
|
||||
|
||||
self.horizontalLayout_2.addWidget(self.btn_search)
|
||||
|
||||
self.horizontalSpacer_2 = QSpacerItem(40, 20, QSizePolicy.Policy.Expanding, QSizePolicy.Policy.Minimum)
|
||||
self.horizontalSpacer_2 = QSpacerItem(
|
||||
40, 20, QSizePolicy.Policy.Expanding, QSizePolicy.Policy.Minimum
|
||||
)
|
||||
|
||||
self.horizontalLayout_2.addItem(self.horizontalSpacer_2)
|
||||
|
||||
self.make_quote = QPushButton(Dialog)
|
||||
self.make_quote.setObjectName(u"make_quote")
|
||||
self.make_quote.setObjectName("make_quote")
|
||||
|
||||
self.horizontalLayout_2.addWidget(self.make_quote)
|
||||
|
||||
|
||||
self.verticalLayout.addLayout(self.horizontalLayout_2)
|
||||
|
||||
self.stackedWidget = QStackedWidget(Dialog)
|
||||
self.stackedWidget.setObjectName(u"stackedWidget")
|
||||
self.stackedWidget.setObjectName("stackedWidget")
|
||||
self.mono = QWidget()
|
||||
self.mono.setObjectName(u"mono")
|
||||
self.mono.setObjectName("mono")
|
||||
self.gridLayout_2 = QGridLayout(self.mono)
|
||||
self.gridLayout_2.setObjectName(u"gridLayout_2")
|
||||
self.gridLayout_2.setObjectName("gridLayout_2")
|
||||
self.label = QLabel(self.mono)
|
||||
self.label.setObjectName(u"label")
|
||||
self.label.setObjectName("label")
|
||||
|
||||
self.gridLayout_2.addWidget(self.label, 0, 0, 1, 1)
|
||||
|
||||
self.book_author = QLineEdit(self.mono)
|
||||
self.book_author.setObjectName(u"book_author")
|
||||
self.book_author.setObjectName("book_author")
|
||||
|
||||
self.gridLayout_2.addWidget(self.book_author, 0, 1, 1, 1)
|
||||
|
||||
self.label_3 = QLabel(self.mono)
|
||||
self.label_3.setObjectName(u"label_3")
|
||||
self.label_3.setObjectName("label_3")
|
||||
|
||||
self.gridLayout_2.addWidget(self.label_3, 1, 0, 1, 1)
|
||||
|
||||
self.book_year = QLineEdit(self.mono)
|
||||
self.book_year.setObjectName(u"book_year")
|
||||
self.book_year.setObjectName("book_year")
|
||||
|
||||
self.gridLayout_2.addWidget(self.book_year, 1, 1, 1, 1)
|
||||
|
||||
self.label_4 = QLabel(self.mono)
|
||||
self.label_4.setObjectName(u"label_4")
|
||||
self.label_4.setObjectName("label_4")
|
||||
|
||||
self.gridLayout_2.addWidget(self.label_4, 2, 0, 1, 1)
|
||||
|
||||
self.book_edition = QLineEdit(self.mono)
|
||||
self.book_edition.setObjectName(u"book_edition")
|
||||
self.book_edition.setObjectName("book_edition")
|
||||
|
||||
self.gridLayout_2.addWidget(self.book_edition, 2, 1, 1, 1)
|
||||
|
||||
self.label_5 = QLabel(self.mono)
|
||||
self.label_5.setObjectName(u"label_5")
|
||||
self.label_5.setObjectName("label_5")
|
||||
|
||||
self.gridLayout_2.addWidget(self.label_5, 3, 0, 1, 1)
|
||||
|
||||
self.book_title = QLineEdit(self.mono)
|
||||
self.book_title.setObjectName(u"book_title")
|
||||
self.book_title.setObjectName("book_title")
|
||||
|
||||
self.gridLayout_2.addWidget(self.book_title, 3, 1, 1, 1)
|
||||
|
||||
self.label_6 = QLabel(self.mono)
|
||||
self.label_6.setObjectName(u"label_6")
|
||||
self.label_6.setObjectName("label_6")
|
||||
|
||||
self.gridLayout_2.addWidget(self.label_6, 4, 0, 1, 1)
|
||||
|
||||
self.book_place = QLineEdit(self.mono)
|
||||
self.book_place.setObjectName(u"book_place")
|
||||
self.book_place.setObjectName("book_place")
|
||||
|
||||
self.gridLayout_2.addWidget(self.book_place, 4, 1, 1, 1)
|
||||
|
||||
self.label_7 = QLabel(self.mono)
|
||||
self.label_7.setObjectName(u"label_7")
|
||||
self.label_7.setObjectName("label_7")
|
||||
|
||||
self.gridLayout_2.addWidget(self.label_7, 5, 0, 1, 1)
|
||||
|
||||
self.book_publisher = QLineEdit(self.mono)
|
||||
self.book_publisher.setObjectName(u"book_publisher")
|
||||
self.book_publisher.setObjectName("book_publisher")
|
||||
|
||||
self.gridLayout_2.addWidget(self.book_publisher, 5, 1, 1, 1)
|
||||
|
||||
self.label_8 = QLabel(self.mono)
|
||||
self.label_8.setObjectName(u"label_8")
|
||||
self.label_8.setObjectName("label_8")
|
||||
|
||||
self.gridLayout_2.addWidget(self.label_8, 6, 0, 1, 1)
|
||||
|
||||
self.book_signature = QLineEdit(self.mono)
|
||||
self.book_signature.setObjectName(u"book_signature")
|
||||
self.book_signature.setObjectName("book_signature")
|
||||
|
||||
self.gridLayout_2.addWidget(self.book_signature, 6, 1, 1, 1)
|
||||
|
||||
self.label_9 = QLabel(self.mono)
|
||||
self.label_9.setObjectName(u"label_9")
|
||||
self.label_9.setObjectName("label_9")
|
||||
|
||||
self.gridLayout_2.addWidget(self.label_9, 7, 0, 1, 1)
|
||||
|
||||
self.book_pages = QLineEdit(self.mono)
|
||||
self.book_pages.setObjectName(u"book_pages")
|
||||
self.book_pages.setObjectName("book_pages")
|
||||
|
||||
self.gridLayout_2.addWidget(self.book_pages, 7, 1, 1, 1)
|
||||
|
||||
self.page_warn_2 = QToolButton(self.mono)
|
||||
self.page_warn_2.setObjectName(u"page_warn_2")
|
||||
self.page_warn_2.setObjectName("page_warn_2")
|
||||
self.page_warn_2.setFocusPolicy(Qt.NoFocus)
|
||||
self.page_warn_2.setAutoRaise(True)
|
||||
|
||||
self.gridLayout_2.addWidget(self.page_warn_2, 7, 2, 1, 1)
|
||||
|
||||
self.label_29 = QLabel(self.mono)
|
||||
self.label_29.setObjectName(u"label_29")
|
||||
self.label_29.setObjectName("label_29")
|
||||
|
||||
self.gridLayout_2.addWidget(self.label_29, 8, 0, 1, 1)
|
||||
|
||||
self.book_isbn = QLineEdit(self.mono)
|
||||
self.book_isbn.setObjectName(u"book_isbn")
|
||||
self.book_isbn.setObjectName("book_isbn")
|
||||
|
||||
self.gridLayout_2.addWidget(self.book_isbn, 8, 1, 1, 1)
|
||||
|
||||
self.stackedWidget.addWidget(self.mono)
|
||||
self.hg = QWidget()
|
||||
self.hg.setObjectName(u"hg")
|
||||
self.hg.setObjectName("hg")
|
||||
self.gridLayout_3 = QGridLayout(self.hg)
|
||||
self.gridLayout_3.setObjectName(u"gridLayout_3")
|
||||
self.gridLayout_3.setObjectName("gridLayout_3")
|
||||
self.hg_editor = QLineEdit(self.hg)
|
||||
self.hg_editor.setObjectName(u"hg_editor")
|
||||
self.hg_editor.setObjectName("hg_editor")
|
||||
|
||||
self.gridLayout_3.addWidget(self.hg_editor, 4, 1, 1, 1)
|
||||
|
||||
self.label_26 = QLabel(self.hg)
|
||||
self.label_26.setObjectName(u"label_26")
|
||||
self.label_26.setObjectName("label_26")
|
||||
|
||||
self.gridLayout_3.addWidget(self.label_26, 7, 0, 1, 1)
|
||||
|
||||
self.hg_edition = QLineEdit(self.hg)
|
||||
self.hg_edition.setObjectName(u"hg_edition")
|
||||
self.hg_edition.setObjectName("hg_edition")
|
||||
|
||||
self.gridLayout_3.addWidget(self.hg_edition, 2, 1, 1, 1)
|
||||
|
||||
self.label_20 = QLabel(self.hg)
|
||||
self.label_20.setObjectName(u"label_20")
|
||||
self.label_20.setObjectName("label_20")
|
||||
|
||||
self.gridLayout_3.addWidget(self.label_20, 1, 0, 1, 1)
|
||||
|
||||
self.label_24 = QLabel(self.hg)
|
||||
self.label_24.setObjectName(u"label_24")
|
||||
self.label_24.setObjectName("label_24")
|
||||
|
||||
self.gridLayout_3.addWidget(self.label_24, 3, 0, 1, 1)
|
||||
|
||||
self.label_27 = QLabel(self.hg)
|
||||
self.label_27.setObjectName(u"label_27")
|
||||
self.label_27.setObjectName("label_27")
|
||||
|
||||
self.gridLayout_3.addWidget(self.label_27, 8, 0, 1, 1)
|
||||
|
||||
self.label_28 = QLabel(self.hg)
|
||||
self.label_28.setObjectName(u"label_28")
|
||||
self.label_28.setObjectName("label_28")
|
||||
|
||||
self.gridLayout_3.addWidget(self.label_28, 9, 0, 1, 1)
|
||||
|
||||
self.label_23 = QLabel(self.hg)
|
||||
self.label_23.setObjectName(u"label_23")
|
||||
self.label_23.setObjectName("label_23")
|
||||
|
||||
self.gridLayout_3.addWidget(self.label_23, 5, 0, 1, 1)
|
||||
|
||||
self.label_21 = QLabel(self.hg)
|
||||
self.label_21.setObjectName(u"label_21")
|
||||
self.label_21.setObjectName("label_21")
|
||||
|
||||
self.gridLayout_3.addWidget(self.label_21, 2, 0, 1, 1)
|
||||
|
||||
self.hg_pages = QLineEdit(self.hg)
|
||||
self.hg_pages.setObjectName(u"hg_pages")
|
||||
self.hg_pages.setObjectName("hg_pages")
|
||||
|
||||
self.gridLayout_3.addWidget(self.hg_pages, 8, 1, 1, 1)
|
||||
|
||||
self.label_19 = QLabel(self.hg)
|
||||
self.label_19.setObjectName(u"label_19")
|
||||
self.label_19.setObjectName("label_19")
|
||||
|
||||
self.gridLayout_3.addWidget(self.label_19, 0, 0, 1, 1)
|
||||
|
||||
self.hg_signature = QLineEdit(self.hg)
|
||||
self.hg_signature.setObjectName(u"hg_signature")
|
||||
self.hg_signature.setObjectName("hg_signature")
|
||||
|
||||
self.gridLayout_3.addWidget(self.hg_signature, 9, 1, 1, 1)
|
||||
|
||||
self.label_30 = QLabel(self.hg)
|
||||
self.label_30.setObjectName(u"label_30")
|
||||
self.label_30.setObjectName("label_30")
|
||||
|
||||
self.gridLayout_3.addWidget(self.label_30, 10, 0, 1, 1)
|
||||
|
||||
self.label_25 = QLabel(self.hg)
|
||||
self.label_25.setObjectName(u"label_25")
|
||||
self.label_25.setObjectName("label_25")
|
||||
|
||||
self.gridLayout_3.addWidget(self.label_25, 6, 0, 1, 1)
|
||||
|
||||
self.hg_year = QLineEdit(self.hg)
|
||||
self.hg_year.setObjectName(u"hg_year")
|
||||
self.hg_year.setObjectName("hg_year")
|
||||
|
||||
self.gridLayout_3.addWidget(self.hg_year, 1, 1, 1, 1)
|
||||
|
||||
self.label_22 = QLabel(self.hg)
|
||||
self.label_22.setObjectName(u"label_22")
|
||||
self.label_22.setObjectName("label_22")
|
||||
|
||||
self.gridLayout_3.addWidget(self.label_22, 4, 0, 1, 1)
|
||||
|
||||
self.hg_title = QLineEdit(self.hg)
|
||||
self.hg_title.setObjectName(u"hg_title")
|
||||
self.hg_title.setObjectName("hg_title")
|
||||
|
||||
self.gridLayout_3.addWidget(self.hg_title, 5, 1, 1, 1)
|
||||
|
||||
self.hg_chaptertitle = QLineEdit(self.hg)
|
||||
self.hg_chaptertitle.setObjectName(u"hg_chaptertitle")
|
||||
self.hg_chaptertitle.setObjectName("hg_chaptertitle")
|
||||
|
||||
self.gridLayout_3.addWidget(self.hg_chaptertitle, 3, 1, 1, 1)
|
||||
|
||||
self.hg_author = QLineEdit(self.hg)
|
||||
self.hg_author.setObjectName(u"hg_author")
|
||||
self.hg_author.setObjectName("hg_author")
|
||||
|
||||
self.gridLayout_3.addWidget(self.hg_author, 0, 1, 1, 1)
|
||||
|
||||
self.hg_isbn = QLineEdit(self.hg)
|
||||
self.hg_isbn.setObjectName(u"hg_isbn")
|
||||
self.hg_isbn.setObjectName("hg_isbn")
|
||||
|
||||
self.gridLayout_3.addWidget(self.hg_isbn, 10, 1, 1, 1)
|
||||
|
||||
self.hg_publisher = QLineEdit(self.hg)
|
||||
self.hg_publisher.setObjectName(u"hg_publisher")
|
||||
self.hg_publisher.setObjectName("hg_publisher")
|
||||
|
||||
self.gridLayout_3.addWidget(self.hg_publisher, 7, 1, 1, 1)
|
||||
|
||||
self.hg_place = QLineEdit(self.hg)
|
||||
self.hg_place.setObjectName(u"hg_place")
|
||||
self.hg_place.setObjectName("hg_place")
|
||||
|
||||
self.gridLayout_3.addWidget(self.hg_place, 6, 1, 1, 1)
|
||||
|
||||
self.page_warn_3 = QToolButton(self.hg)
|
||||
self.page_warn_3.setObjectName(u"page_warn_3")
|
||||
self.page_warn_3.setObjectName("page_warn_3")
|
||||
self.page_warn_3.setFocusPolicy(Qt.NoFocus)
|
||||
self.page_warn_3.setAutoRaise(True)
|
||||
|
||||
@@ -307,111 +322,111 @@ class Ui_Dialog(object):
|
||||
|
||||
self.stackedWidget.addWidget(self.hg)
|
||||
self.zs = QWidget()
|
||||
self.zs.setObjectName(u"zs")
|
||||
self.zs.setObjectName("zs")
|
||||
self.gridLayout = QGridLayout(self.zs)
|
||||
self.gridLayout.setObjectName(u"gridLayout")
|
||||
self.gridLayout.setObjectName("gridLayout")
|
||||
self.label_10 = QLabel(self.zs)
|
||||
self.label_10.setObjectName(u"label_10")
|
||||
self.label_10.setObjectName("label_10")
|
||||
|
||||
self.gridLayout.addWidget(self.label_10, 0, 0, 1, 1)
|
||||
|
||||
self.zs_publisher = QLineEdit(self.zs)
|
||||
self.zs_publisher.setObjectName(u"zs_publisher")
|
||||
self.zs_publisher.setObjectName("zs_publisher")
|
||||
|
||||
self.gridLayout.addWidget(self.zs_publisher, 6, 1, 1, 1)
|
||||
|
||||
self.zs_place = QLineEdit(self.zs)
|
||||
self.zs_place.setObjectName(u"zs_place")
|
||||
self.zs_place.setObjectName("zs_place")
|
||||
|
||||
self.gridLayout.addWidget(self.zs_place, 5, 1, 1, 1)
|
||||
|
||||
self.label_14 = QLabel(self.zs)
|
||||
self.label_14.setObjectName(u"label_14")
|
||||
self.label_14.setObjectName("label_14")
|
||||
|
||||
self.gridLayout.addWidget(self.label_14, 4, 0, 1, 1)
|
||||
|
||||
self.label_11 = QLabel(self.zs)
|
||||
self.label_11.setObjectName(u"label_11")
|
||||
self.label_11.setObjectName("label_11")
|
||||
|
||||
self.gridLayout.addWidget(self.label_11, 1, 0, 1, 1)
|
||||
|
||||
self.zs_year = QLineEdit(self.zs)
|
||||
self.zs_year.setObjectName(u"zs_year")
|
||||
self.zs_year.setObjectName("zs_year")
|
||||
|
||||
self.gridLayout.addWidget(self.zs_year, 1, 1, 1, 1)
|
||||
|
||||
self.label_17 = QLabel(self.zs)
|
||||
self.label_17.setObjectName(u"label_17")
|
||||
self.label_17.setObjectName("label_17")
|
||||
|
||||
self.gridLayout.addWidget(self.label_17, 7, 0, 1, 1)
|
||||
|
||||
self.label_16 = QLabel(self.zs)
|
||||
self.label_16.setObjectName(u"label_16")
|
||||
self.label_16.setObjectName("label_16")
|
||||
|
||||
self.gridLayout.addWidget(self.label_16, 6, 0, 1, 1)
|
||||
|
||||
self.zs_issue = QLineEdit(self.zs)
|
||||
self.zs_issue.setObjectName(u"zs_issue")
|
||||
self.zs_issue.setObjectName("zs_issue")
|
||||
|
||||
self.gridLayout.addWidget(self.zs_issue, 2, 1, 1, 1)
|
||||
|
||||
self.zs_chapter_title = QLineEdit(self.zs)
|
||||
self.zs_chapter_title.setObjectName(u"zs_chapter_title")
|
||||
self.zs_chapter_title.setObjectName("zs_chapter_title")
|
||||
|
||||
self.gridLayout.addWidget(self.zs_chapter_title, 3, 1, 1, 1)
|
||||
|
||||
self.zs_isbn = QLineEdit(self.zs)
|
||||
self.zs_isbn.setObjectName(u"zs_isbn")
|
||||
self.zs_isbn.setObjectName("zs_isbn")
|
||||
|
||||
self.gridLayout.addWidget(self.zs_isbn, 9, 1, 1, 1)
|
||||
|
||||
self.label_12 = QLabel(self.zs)
|
||||
self.label_12.setObjectName(u"label_12")
|
||||
self.label_12.setObjectName("label_12")
|
||||
|
||||
self.gridLayout.addWidget(self.label_12, 2, 0, 1, 1)
|
||||
|
||||
self.label_31 = QLabel(self.zs)
|
||||
self.label_31.setObjectName(u"label_31")
|
||||
self.label_31.setObjectName("label_31")
|
||||
|
||||
self.gridLayout.addWidget(self.label_31, 9, 0, 1, 1)
|
||||
|
||||
self.label_15 = QLabel(self.zs)
|
||||
self.label_15.setObjectName(u"label_15")
|
||||
self.label_15.setObjectName("label_15")
|
||||
|
||||
self.gridLayout.addWidget(self.label_15, 5, 0, 1, 1)
|
||||
|
||||
self.zs_signature = QLineEdit(self.zs)
|
||||
self.zs_signature.setObjectName(u"zs_signature")
|
||||
self.zs_signature.setObjectName("zs_signature")
|
||||
|
||||
self.gridLayout.addWidget(self.zs_signature, 8, 1, 1, 1)
|
||||
|
||||
self.zs_pages = QLineEdit(self.zs)
|
||||
self.zs_pages.setObjectName(u"zs_pages")
|
||||
self.zs_pages.setObjectName("zs_pages")
|
||||
|
||||
self.gridLayout.addWidget(self.zs_pages, 7, 1, 1, 1)
|
||||
|
||||
self.label_13 = QLabel(self.zs)
|
||||
self.label_13.setObjectName(u"label_13")
|
||||
self.label_13.setObjectName("label_13")
|
||||
|
||||
self.gridLayout.addWidget(self.label_13, 3, 0, 1, 1)
|
||||
|
||||
self.label_18 = QLabel(self.zs)
|
||||
self.label_18.setObjectName(u"label_18")
|
||||
self.label_18.setObjectName("label_18")
|
||||
|
||||
self.gridLayout.addWidget(self.label_18, 8, 0, 1, 1)
|
||||
|
||||
self.zs_author = QLineEdit(self.zs)
|
||||
self.zs_author.setObjectName(u"zs_author")
|
||||
self.zs_author.setObjectName("zs_author")
|
||||
|
||||
self.gridLayout.addWidget(self.zs_author, 0, 1, 1, 1)
|
||||
|
||||
self.zs_title = QLineEdit(self.zs)
|
||||
self.zs_title.setObjectName(u"zs_title")
|
||||
self.zs_title.setObjectName("zs_title")
|
||||
|
||||
self.gridLayout.addWidget(self.zs_title, 4, 1, 1, 1)
|
||||
|
||||
self.page_warn = QToolButton(self.zs)
|
||||
self.page_warn.setObjectName(u"page_warn")
|
||||
self.page_warn.setObjectName("page_warn")
|
||||
self.page_warn.setFocusPolicy(Qt.NoFocus)
|
||||
self.page_warn.setAutoRaise(True)
|
||||
|
||||
@@ -419,119 +434,130 @@ class Ui_Dialog(object):
|
||||
|
||||
self.stackedWidget.addWidget(self.zs)
|
||||
self.page = QWidget()
|
||||
self.page.setObjectName(u"page")
|
||||
self.page.setObjectName("page")
|
||||
self.page.setLayoutDirection(Qt.LeftToRight)
|
||||
self.gridLayout_5 = QGridLayout(self.page)
|
||||
self.gridLayout_5.setObjectName(u"gridLayout_5")
|
||||
self.gridLayout_5.setObjectName("gridLayout_5")
|
||||
self.label_32 = QLabel(self.page)
|
||||
self.label_32.setObjectName(u"label_32")
|
||||
self.label_32.setObjectName("label_32")
|
||||
|
||||
self.gridLayout_5.addWidget(self.label_32, 0, 0, 1, 1)
|
||||
|
||||
self.verticalSpacer = QSpacerItem(20, 40, QSizePolicy.Policy.Minimum, QSizePolicy.Policy.Expanding)
|
||||
self.verticalSpacer = QSpacerItem(
|
||||
20, 40, QSizePolicy.Policy.Minimum, QSizePolicy.Policy.Expanding
|
||||
)
|
||||
|
||||
self.gridLayout_5.addItem(self.verticalSpacer, 7, 0, 1, 1)
|
||||
|
||||
self.file_desc_edit = QTextEdit(self.page)
|
||||
self.file_desc_edit.setObjectName(u"file_desc_edit")
|
||||
self.file_desc_edit.setObjectName("file_desc_edit")
|
||||
self.file_desc_edit.setFocusPolicy(Qt.NoFocus)
|
||||
self.file_desc_edit.setReadOnly(True)
|
||||
|
||||
self.gridLayout_5.addWidget(self.file_desc_edit, 6, 0, 1, 1)
|
||||
|
||||
self.label_34 = QLabel(self.page)
|
||||
self.label_34.setObjectName(u"label_34")
|
||||
self.label_34.setObjectName("label_34")
|
||||
|
||||
self.gridLayout_5.addWidget(self.label_34, 3, 0, 1, 1)
|
||||
|
||||
self.filename_edit = QTextEdit(self.page)
|
||||
self.filename_edit.setObjectName(u"filename_edit")
|
||||
self.filename_edit.setObjectName("filename_edit")
|
||||
self.filename_edit.setFocusPolicy(Qt.NoFocus)
|
||||
self.filename_edit.setReadOnly(True)
|
||||
|
||||
self.gridLayout_5.addWidget(self.filename_edit, 1, 0, 1, 1)
|
||||
|
||||
self.label_33 = QLabel(self.page)
|
||||
self.label_33.setObjectName(u"label_33")
|
||||
self.label_33.setObjectName("label_33")
|
||||
|
||||
self.gridLayout_5.addWidget(self.label_33, 5, 0, 1, 1)
|
||||
|
||||
self.ilias_filename = QTextEdit(self.page)
|
||||
self.ilias_filename.setObjectName(u"ilias_filename")
|
||||
self.ilias_filename.setObjectName("ilias_filename")
|
||||
self.ilias_filename.setFocusPolicy(Qt.NoFocus)
|
||||
self.ilias_filename.setReadOnly(True)
|
||||
|
||||
self.gridLayout_5.addWidget(self.ilias_filename, 4, 0, 1, 1)
|
||||
|
||||
self.verticalLayout_2 = QVBoxLayout()
|
||||
self.verticalLayout_2.setObjectName(u"verticalLayout_2")
|
||||
self.verticalSpacer_3 = QSpacerItem(20, 40, QSizePolicy.Policy.Minimum, QSizePolicy.Policy.Expanding)
|
||||
self.verticalLayout_2.setObjectName("verticalLayout_2")
|
||||
self.verticalSpacer_3 = QSpacerItem(
|
||||
20, 40, QSizePolicy.Policy.Minimum, QSizePolicy.Policy.Expanding
|
||||
)
|
||||
|
||||
self.verticalLayout_2.addItem(self.verticalSpacer_3)
|
||||
|
||||
self.copy_filename = QToolButton(self.page)
|
||||
self.copy_filename.setObjectName(u"copy_filename")
|
||||
self.copy_filename.setObjectName("copy_filename")
|
||||
self.copy_filename.setLayoutDirection(Qt.LeftToRight)
|
||||
self.copy_filename.setAutoFillBackground(False)
|
||||
|
||||
self.verticalLayout_2.addWidget(self.copy_filename)
|
||||
|
||||
self.filename_edit_label = QLabel(self.page)
|
||||
self.filename_edit_label.setObjectName(u"filename_edit_label")
|
||||
self.filename_edit_label.setObjectName("filename_edit_label")
|
||||
|
||||
self.verticalLayout_2.addWidget(self.filename_edit_label)
|
||||
|
||||
self.verticalSpacer_2 = QSpacerItem(20, 40, QSizePolicy.Policy.Minimum, QSizePolicy.Policy.Expanding)
|
||||
self.verticalSpacer_2 = QSpacerItem(
|
||||
20, 40, QSizePolicy.Policy.Minimum, QSizePolicy.Policy.Expanding
|
||||
)
|
||||
|
||||
self.verticalLayout_2.addItem(self.verticalSpacer_2)
|
||||
|
||||
|
||||
self.gridLayout_5.addLayout(self.verticalLayout_2, 1, 1, 1, 1)
|
||||
|
||||
self.verticalLayout_3 = QVBoxLayout()
|
||||
self.verticalLayout_3.setObjectName(u"verticalLayout_3")
|
||||
self.verticalSpacer_5 = QSpacerItem(20, 40, QSizePolicy.Policy.Minimum, QSizePolicy.Policy.Expanding)
|
||||
self.verticalLayout_3.setObjectName("verticalLayout_3")
|
||||
self.verticalSpacer_5 = QSpacerItem(
|
||||
20, 40, QSizePolicy.Policy.Minimum, QSizePolicy.Policy.Expanding
|
||||
)
|
||||
|
||||
self.verticalLayout_3.addItem(self.verticalSpacer_5)
|
||||
|
||||
self.copy_ilias_filename = QToolButton(self.page)
|
||||
self.copy_ilias_filename.setObjectName(u"copy_ilias_filename")
|
||||
self.copy_ilias_filename.setObjectName("copy_ilias_filename")
|
||||
|
||||
self.verticalLayout_3.addWidget(self.copy_ilias_filename)
|
||||
|
||||
self.ilias_filename_label = QLabel(self.page)
|
||||
self.ilias_filename_label.setObjectName(u"ilias_filename_label")
|
||||
self.ilias_filename_label.setObjectName("ilias_filename_label")
|
||||
|
||||
self.verticalLayout_3.addWidget(self.ilias_filename_label)
|
||||
|
||||
self.verticalSpacer_4 = QSpacerItem(20, 40, QSizePolicy.Policy.Minimum, QSizePolicy.Policy.Expanding)
|
||||
self.verticalSpacer_4 = QSpacerItem(
|
||||
20, 40, QSizePolicy.Policy.Minimum, QSizePolicy.Policy.Expanding
|
||||
)
|
||||
|
||||
self.verticalLayout_3.addItem(self.verticalSpacer_4)
|
||||
|
||||
|
||||
self.gridLayout_5.addLayout(self.verticalLayout_3, 4, 1, 1, 1)
|
||||
|
||||
self.verticalLayout_4 = QVBoxLayout()
|
||||
self.verticalLayout_4.setObjectName(u"verticalLayout_4")
|
||||
self.verticalSpacer_7 = QSpacerItem(20, 40, QSizePolicy.Policy.Minimum, QSizePolicy.Policy.Expanding)
|
||||
self.verticalLayout_4.setObjectName("verticalLayout_4")
|
||||
self.verticalSpacer_7 = QSpacerItem(
|
||||
20, 40, QSizePolicy.Policy.Minimum, QSizePolicy.Policy.Expanding
|
||||
)
|
||||
|
||||
self.verticalLayout_4.addItem(self.verticalSpacer_7)
|
||||
|
||||
self.copy_qoute = QToolButton(self.page)
|
||||
self.copy_qoute.setObjectName(u"copy_qoute")
|
||||
self.copy_quote = QToolButton(self.page)
|
||||
self.copy_quote.setObjectName("copy_quote")
|
||||
|
||||
self.verticalLayout_4.addWidget(self.copy_qoute)
|
||||
self.verticalLayout_4.addWidget(self.copy_quote)
|
||||
|
||||
self.file_desc_edit_label = QLabel(self.page)
|
||||
self.file_desc_edit_label.setObjectName(u"file_desc_edit_label")
|
||||
self.file_desc_edit_label.setObjectName("file_desc_edit_label")
|
||||
|
||||
self.verticalLayout_4.addWidget(self.file_desc_edit_label)
|
||||
|
||||
self.verticalSpacer_6 = QSpacerItem(20, 40, QSizePolicy.Policy.Minimum, QSizePolicy.Policy.Expanding)
|
||||
self.verticalSpacer_6 = QSpacerItem(
|
||||
20, 40, QSizePolicy.Policy.Minimum, QSizePolicy.Policy.Expanding
|
||||
)
|
||||
|
||||
self.verticalLayout_4.addItem(self.verticalSpacer_6)
|
||||
|
||||
|
||||
self.gridLayout_5.addLayout(self.verticalLayout_4, 6, 1, 1, 1)
|
||||
|
||||
self.stackedWidget.addWidget(self.page)
|
||||
@@ -539,19 +565,20 @@ class Ui_Dialog(object):
|
||||
self.verticalLayout.addWidget(self.stackedWidget)
|
||||
|
||||
self.horizontalLayout = QHBoxLayout()
|
||||
self.horizontalLayout.setObjectName(u"horizontalLayout")
|
||||
self.horizontalLayout.setObjectName("horizontalLayout")
|
||||
self.buttonBox = QDialogButtonBox(Dialog)
|
||||
self.buttonBox.setObjectName(u"buttonBox")
|
||||
self.buttonBox.setStandardButtons(QDialogButtonBox.Cancel|QDialogButtonBox.Discard|QDialogButtonBox.Ok)
|
||||
self.buttonBox.setObjectName("buttonBox")
|
||||
self.buttonBox.setStandardButtons(
|
||||
QDialogButtonBox.Cancel | QDialogButtonBox.Discard | QDialogButtonBox.Ok
|
||||
)
|
||||
|
||||
self.horizontalLayout.addWidget(self.buttonBox)
|
||||
|
||||
self.retryButton = QPushButton(Dialog)
|
||||
self.retryButton.setObjectName(u"retryButton")
|
||||
self.retryButton.setObjectName("retryButton")
|
||||
|
||||
self.horizontalLayout.addWidget(self.retryButton)
|
||||
|
||||
|
||||
self.verticalLayout.addLayout(self.horizontalLayout)
|
||||
|
||||
QWidget.setTabOrder(self.btn_mono, self.btn_hg)
|
||||
@@ -594,88 +621,144 @@ class Ui_Dialog(object):
|
||||
QWidget.setTabOrder(self.filename_edit, self.ilias_filename)
|
||||
QWidget.setTabOrder(self.ilias_filename, self.copy_filename)
|
||||
QWidget.setTabOrder(self.copy_filename, self.copy_ilias_filename)
|
||||
QWidget.setTabOrder(self.copy_ilias_filename, self.copy_qoute)
|
||||
QWidget.setTabOrder(self.copy_qoute, self.retryButton)
|
||||
QWidget.setTabOrder(self.copy_ilias_filename, self.copy_quote)
|
||||
QWidget.setTabOrder(self.copy_quote, self.retryButton)
|
||||
|
||||
self.retranslateUi(Dialog)
|
||||
|
||||
self.stackedWidget.setCurrentIndex(3)
|
||||
|
||||
|
||||
QMetaObject.connectSlotsByName(Dialog)
|
||||
|
||||
# setupUi
|
||||
|
||||
def retranslateUi(self, Dialog):
|
||||
Dialog.setWindowTitle(QCoreApplication.translate("Dialog", u"Dialog", None))
|
||||
self.groupBox.setTitle(QCoreApplication.translate("Dialog", u"Medientyp?", None))
|
||||
self.btn_mono.setText(QCoreApplication.translate("Dialog", u"Monografie", None))
|
||||
self.btn_zs.setText(QCoreApplication.translate("Dialog", u"Zeitschrift", None))
|
||||
self.btn_hg.setText(QCoreApplication.translate("Dialog", u"Herausgeberwerk", None))
|
||||
self.label_2.setText(QCoreApplication.translate("Dialog", u"Identifikator", None))
|
||||
self.btn_search.setText(QCoreApplication.translate("Dialog", u"Suchen", None))
|
||||
#if QT_CONFIG(tooltip)
|
||||
self.make_quote.setToolTip(QCoreApplication.translate("Dialog", u"Zuerst die Seitenzahl anpassen", None))
|
||||
#endif // QT_CONFIG(tooltip)
|
||||
self.make_quote.setText(QCoreApplication.translate("Dialog", u"Zitat erstellen", None))
|
||||
self.label.setText(QCoreApplication.translate("Dialog", u"Autor(en)\n"
|
||||
" Nachname, Vorname", None))
|
||||
#if QT_CONFIG(tooltip)
|
||||
self.book_author.setToolTip(QCoreApplication.translate("Dialog", u"Bei mehreren Autoren mit ; trennen", None))
|
||||
#endif // QT_CONFIG(tooltip)
|
||||
self.label_3.setText(QCoreApplication.translate("Dialog", u"Jahr", None))
|
||||
self.label_4.setText(QCoreApplication.translate("Dialog", u"Auflage", None))
|
||||
self.label_5.setText(QCoreApplication.translate("Dialog", u"Titel", None))
|
||||
self.label_6.setText(QCoreApplication.translate("Dialog", u"Ort", None))
|
||||
self.label_7.setText(QCoreApplication.translate("Dialog", u"Verlag", None))
|
||||
self.label_8.setText(QCoreApplication.translate("Dialog", u"Signatur", None))
|
||||
self.label_9.setText(QCoreApplication.translate("Dialog", u"Seiten", None))
|
||||
self.book_pages.setPlaceholderText(QCoreApplication.translate("Dialog", u"Seitenanzahl des Mediums, zum zitieren \u00e4ndern!", None))
|
||||
Dialog.setWindowTitle(QCoreApplication.translate("Dialog", "Dialog", None))
|
||||
self.groupBox.setTitle(QCoreApplication.translate("Dialog", "Medientyp?", None))
|
||||
self.btn_mono.setText(QCoreApplication.translate("Dialog", "Monografie", None))
|
||||
self.btn_zs.setText(QCoreApplication.translate("Dialog", "Zeitschrift", None))
|
||||
self.btn_hg.setText(
|
||||
QCoreApplication.translate("Dialog", "Herausgeberwerk", None)
|
||||
)
|
||||
self.label_2.setText(
|
||||
QCoreApplication.translate("Dialog", "Identifikator", None)
|
||||
)
|
||||
self.btn_search.setText(QCoreApplication.translate("Dialog", "Suchen", None))
|
||||
# if QT_CONFIG(tooltip)
|
||||
self.make_quote.setToolTip(
|
||||
QCoreApplication.translate("Dialog", "Zuerst die Seitenzahl anpassen", None)
|
||||
)
|
||||
# endif // QT_CONFIG(tooltip)
|
||||
self.make_quote.setText(
|
||||
QCoreApplication.translate("Dialog", "Zitat erstellen", None)
|
||||
)
|
||||
self.label.setText(
|
||||
QCoreApplication.translate(
|
||||
"Dialog", "Autor(en)\n" " Nachname, Vorname", None
|
||||
)
|
||||
)
|
||||
# if QT_CONFIG(tooltip)
|
||||
self.book_author.setToolTip(
|
||||
QCoreApplication.translate(
|
||||
"Dialog", "Bei mehreren Autoren mit ; trennen", None
|
||||
)
|
||||
)
|
||||
# endif // QT_CONFIG(tooltip)
|
||||
self.label_3.setText(QCoreApplication.translate("Dialog", "Jahr", None))
|
||||
self.label_4.setText(QCoreApplication.translate("Dialog", "Auflage", None))
|
||||
self.label_5.setText(QCoreApplication.translate("Dialog", "Titel", None))
|
||||
self.label_6.setText(QCoreApplication.translate("Dialog", "Ort", None))
|
||||
self.label_7.setText(QCoreApplication.translate("Dialog", "Verlag", None))
|
||||
self.label_8.setText(QCoreApplication.translate("Dialog", "Signatur", None))
|
||||
self.label_9.setText(QCoreApplication.translate("Dialog", "Seiten", None))
|
||||
self.book_pages.setPlaceholderText(
|
||||
QCoreApplication.translate(
|
||||
"Dialog", "Seitenanzahl des Mediums, zum zitieren \u00e4ndern!", None
|
||||
)
|
||||
)
|
||||
self.page_warn_2.setText("")
|
||||
self.label_29.setText(QCoreApplication.translate("Dialog", u"ISBN", None))
|
||||
#if QT_CONFIG(tooltip)
|
||||
self.hg_editor.setToolTip(QCoreApplication.translate("Dialog", u"Bei mehreren Autoren mit ; trennen", None))
|
||||
#endif // QT_CONFIG(tooltip)
|
||||
self.label_26.setText(QCoreApplication.translate("Dialog", u"Verlag", None))
|
||||
self.label_20.setText(QCoreApplication.translate("Dialog", u"Jahr", None))
|
||||
self.label_24.setText(QCoreApplication.translate("Dialog", u"Beitragstitel", None))
|
||||
self.label_27.setText(QCoreApplication.translate("Dialog", u"Seiten", None))
|
||||
self.label_28.setText(QCoreApplication.translate("Dialog", u"Signatur", None))
|
||||
self.label_23.setText(QCoreApplication.translate("Dialog", u"Titel des Werkes", None))
|
||||
self.label_21.setText(QCoreApplication.translate("Dialog", u"Auflage", None))
|
||||
self.label_19.setText(QCoreApplication.translate("Dialog", u"Autor(en)\n"
|
||||
"Nachname, Vorname", None))
|
||||
self.label_30.setText(QCoreApplication.translate("Dialog", u"ISBN", None))
|
||||
self.label_25.setText(QCoreApplication.translate("Dialog", u"Ort", None))
|
||||
self.label_22.setText(QCoreApplication.translate("Dialog", u"Herausgebername(n)\n"
|
||||
"Nachname, Vorname", None))
|
||||
#if QT_CONFIG(tooltip)
|
||||
self.hg_author.setToolTip(QCoreApplication.translate("Dialog", u"Bei mehreren Autoren mit ; trennen", None))
|
||||
#endif // QT_CONFIG(tooltip)
|
||||
self.label_29.setText(QCoreApplication.translate("Dialog", "ISBN", None))
|
||||
# if QT_CONFIG(tooltip)
|
||||
self.hg_editor.setToolTip(
|
||||
QCoreApplication.translate(
|
||||
"Dialog", "Bei mehreren Autoren mit ; trennen", None
|
||||
)
|
||||
)
|
||||
# endif // QT_CONFIG(tooltip)
|
||||
self.label_26.setText(QCoreApplication.translate("Dialog", "Verlag", None))
|
||||
self.label_20.setText(QCoreApplication.translate("Dialog", "Jahr", None))
|
||||
self.label_24.setText(
|
||||
QCoreApplication.translate("Dialog", "Beitragstitel", None)
|
||||
)
|
||||
self.label_27.setText(QCoreApplication.translate("Dialog", "Seiten", None))
|
||||
self.label_28.setText(QCoreApplication.translate("Dialog", "Signatur", None))
|
||||
self.label_23.setText(
|
||||
QCoreApplication.translate("Dialog", "Titel des Werkes", None)
|
||||
)
|
||||
self.label_21.setText(QCoreApplication.translate("Dialog", "Auflage", None))
|
||||
self.label_19.setText(
|
||||
QCoreApplication.translate(
|
||||
"Dialog", "Autor(en)\n" "Nachname, Vorname", None
|
||||
)
|
||||
)
|
||||
self.label_30.setText(QCoreApplication.translate("Dialog", "ISBN", None))
|
||||
self.label_25.setText(QCoreApplication.translate("Dialog", "Ort", None))
|
||||
self.label_22.setText(
|
||||
QCoreApplication.translate(
|
||||
"Dialog", "Herausgebername(n)\n" "Nachname, Vorname", None
|
||||
)
|
||||
)
|
||||
# if QT_CONFIG(tooltip)
|
||||
self.hg_author.setToolTip(
|
||||
QCoreApplication.translate(
|
||||
"Dialog", "Bei mehreren Autoren mit ; trennen", None
|
||||
)
|
||||
)
|
||||
# endif // QT_CONFIG(tooltip)
|
||||
self.page_warn_3.setText("")
|
||||
self.label_10.setText(QCoreApplication.translate("Dialog", u"Autor(en)\n"
|
||||
"Nachname, Vorname", None))
|
||||
self.label_14.setText(QCoreApplication.translate("Dialog", u"Name der Zeitschrift", None))
|
||||
self.label_11.setText(QCoreApplication.translate("Dialog", u"Jahr", None))
|
||||
self.label_17.setText(QCoreApplication.translate("Dialog", u"Seiten", None))
|
||||
self.label_16.setText(QCoreApplication.translate("Dialog", u"Verlag", None))
|
||||
self.label_12.setText(QCoreApplication.translate("Dialog", u"Heft", None))
|
||||
self.label_31.setText(QCoreApplication.translate("Dialog", u"ISSN", None))
|
||||
self.label_15.setText(QCoreApplication.translate("Dialog", u"Ort", None))
|
||||
self.label_13.setText(QCoreApplication.translate("Dialog", u"Artikeltitel", None))
|
||||
self.label_18.setText(QCoreApplication.translate("Dialog", u"Signatur", None))
|
||||
#if QT_CONFIG(tooltip)
|
||||
self.zs_author.setToolTip(QCoreApplication.translate("Dialog", u"Bei mehreren Autoren mit ; trennen", None))
|
||||
#endif // QT_CONFIG(tooltip)
|
||||
self.label_10.setText(
|
||||
QCoreApplication.translate(
|
||||
"Dialog", "Autor(en)\n" "Nachname, Vorname", None
|
||||
)
|
||||
)
|
||||
self.label_14.setText(
|
||||
QCoreApplication.translate("Dialog", "Name der Zeitschrift", None)
|
||||
)
|
||||
self.label_11.setText(QCoreApplication.translate("Dialog", "Jahr", None))
|
||||
self.label_17.setText(QCoreApplication.translate("Dialog", "Seiten", None))
|
||||
self.label_16.setText(QCoreApplication.translate("Dialog", "Verlag", None))
|
||||
self.label_12.setText(QCoreApplication.translate("Dialog", "Heft", None))
|
||||
self.label_31.setText(QCoreApplication.translate("Dialog", "ISSN", None))
|
||||
self.label_15.setText(QCoreApplication.translate("Dialog", "Ort", None))
|
||||
self.label_13.setText(
|
||||
QCoreApplication.translate("Dialog", "Artikeltitel", None)
|
||||
)
|
||||
self.label_18.setText(QCoreApplication.translate("Dialog", "Signatur", None))
|
||||
# if QT_CONFIG(tooltip)
|
||||
self.zs_author.setToolTip(
|
||||
QCoreApplication.translate(
|
||||
"Dialog", "Bei mehreren Autoren mit ; trennen", None
|
||||
)
|
||||
)
|
||||
# endif // QT_CONFIG(tooltip)
|
||||
self.page_warn.setText("")
|
||||
self.label_32.setText(QCoreApplication.translate("Dialog", u"Dateiname", None))
|
||||
self.label_34.setText(QCoreApplication.translate("Dialog", u"ILIAS Name", None))
|
||||
self.label_33.setText(QCoreApplication.translate("Dialog", u"ILIAS Dateibeschreibung", None))
|
||||
self.copy_filename.setText(QCoreApplication.translate("Dialog", u"Kopieren", None))
|
||||
self.label_32.setText(QCoreApplication.translate("Dialog", "Dateiname", None))
|
||||
self.label_34.setText(QCoreApplication.translate("Dialog", "ILIAS Name", None))
|
||||
self.label_33.setText(
|
||||
QCoreApplication.translate("Dialog", "ILIAS Dateibeschreibung", None)
|
||||
)
|
||||
self.copy_filename.setText(
|
||||
QCoreApplication.translate("Dialog", "Kopieren", None)
|
||||
)
|
||||
self.filename_edit_label.setText("")
|
||||
self.copy_ilias_filename.setText(QCoreApplication.translate("Dialog", u"Kopieren", None))
|
||||
self.copy_ilias_filename.setText(
|
||||
QCoreApplication.translate("Dialog", "Kopieren", None)
|
||||
)
|
||||
self.ilias_filename_label.setText("")
|
||||
self.copy_qoute.setText(QCoreApplication.translate("Dialog", u"Kopieren", None))
|
||||
self.copy_quote.setText(QCoreApplication.translate("Dialog", "Kopieren", None))
|
||||
self.file_desc_edit_label.setText("")
|
||||
self.retryButton.setText(QCoreApplication.translate("Dialog", u"Wiederholen", None))
|
||||
# retranslateUi
|
||||
self.retryButton.setText(
|
||||
QCoreApplication.translate("Dialog", "Wiederholen", None)
|
||||
)
|
||||
|
||||
# retranslateUi
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
from natsort import natsorted
|
||||
from PySide6 import QtWidgets
|
||||
|
||||
from src import Icon
|
||||
from src.database import Database
|
||||
from src.core.models import Semester
|
||||
from src.database import Database
|
||||
from src.utils.icon import Icon
|
||||
from src.utils.richtext import SemapSchilder, SemesterDocument
|
||||
|
||||
from .dialog_sources.documentprint_ui import Ui_Dialog
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
from PySide6 import QtWidgets
|
||||
|
||||
from src import Icon
|
||||
from src.services.webrequest import BibTextTransformer, WebRequest
|
||||
from src.services.zotero import ZoteroController
|
||||
from src.shared.logging import log
|
||||
from src.transformers.transformers import DictToTable
|
||||
from src.utils.icon import Icon
|
||||
|
||||
from .dialog_sources.elsa_add_table_entry_ui import Ui_Dialog
|
||||
|
||||
|
||||
@@ -4,8 +4,9 @@ import sys
|
||||
import loguru
|
||||
from PySide6 import QtCore, QtWidgets
|
||||
|
||||
from src import LOG_DIR, Icon
|
||||
from src import LOG_DIR
|
||||
from src.database import Database
|
||||
from src.utils.icon import Icon
|
||||
|
||||
from .dialog_sources.login_ui import Ui_Dialog
|
||||
|
||||
@@ -87,24 +88,23 @@ class LoginDialog(Ui_Dialog):
|
||||
log.success(f"User {username} logged in.")
|
||||
self.dialog.accept()
|
||||
|
||||
# Credentials are invalid, display a warning
|
||||
elif username == "" or password == "":
|
||||
log.warning("Invalid username or password. Login failed.")
|
||||
warning_dialog = QtWidgets.QMessageBox()
|
||||
warning_dialog.setIcon(QtWidgets.QMessageBox.Icon.Warning)
|
||||
warning_dialog.setText("Please enter a username and password.")
|
||||
warning_dialog.setWindowTitle("Login Failed")
|
||||
warning_dialog.exec()
|
||||
else:
|
||||
# Credentials are invalid, display a warning
|
||||
if username == "" or password == "":
|
||||
log.warning("Invalid username or password. Login failed.")
|
||||
warning_dialog = QtWidgets.QMessageBox()
|
||||
warning_dialog.setIcon(QtWidgets.QMessageBox.Icon.Warning)
|
||||
warning_dialog.setText("Please enter a username and password.")
|
||||
warning_dialog.setWindowTitle("Login Failed")
|
||||
warning_dialog.exec()
|
||||
else:
|
||||
log.warning("Invalid username or password. Login failed.")
|
||||
warning_dialog = QtWidgets.QMessageBox()
|
||||
warning_dialog.setIcon(QtWidgets.QMessageBox.Icon.Warning)
|
||||
warning_dialog.setText(
|
||||
"Invalid username or password. Please try again."
|
||||
)
|
||||
warning_dialog.setWindowTitle("Login Failed")
|
||||
warning_dialog.exec()
|
||||
log.warning("Invalid username or password. Login failed.")
|
||||
warning_dialog = QtWidgets.QMessageBox()
|
||||
warning_dialog.setIcon(QtWidgets.QMessageBox.Icon.Warning)
|
||||
warning_dialog.setText(
|
||||
"Invalid username or password. Please try again.",
|
||||
)
|
||||
warning_dialog.setWindowTitle("Login Failed")
|
||||
warning_dialog.exec()
|
||||
|
||||
def cancel_buttonfn(self):
|
||||
self.dialog.reject()
|
||||
|
||||
@@ -5,9 +5,9 @@ import sys
|
||||
|
||||
from PySide6 import QtWidgets
|
||||
|
||||
from src import Icon
|
||||
from src import settings as config
|
||||
from src.shared.logging import log
|
||||
from src.utils.icon import Icon
|
||||
|
||||
from .dialog_sources.mail_preview_ui import Ui_eMailPreview as MailPreviewDialog
|
||||
from .mailTemplate import MailTemplateDialog
|
||||
@@ -18,8 +18,7 @@ empty_signature = """"""
|
||||
|
||||
|
||||
def _escape_braces_in_style(html: str) -> str:
|
||||
"""
|
||||
Double curly braces ONLY inside <style>...</style> blocks so that
|
||||
"""Double curly braces ONLY inside <style>...</style> blocks so that
|
||||
str.format(...) won't treat CSS as placeholders. The doubled braces
|
||||
will automatically render back to single braces after formatting.
|
||||
"""
|
||||
@@ -38,11 +37,9 @@ def _escape_braces_in_style(html: str) -> str:
|
||||
|
||||
|
||||
def _split_eml_headers_body(eml_text: str) -> tuple[str, str]:
|
||||
"""
|
||||
Return (headers, body_html). Robustly split on first blank line.
|
||||
"""Return (headers, body_html). Robustly split on first blank line.
|
||||
Accepts lines that contain only spaces/tabs as the separator.
|
||||
"""
|
||||
|
||||
parts = re.split(r"\r?\n[ \t]*\r?\n", eml_text, maxsplit=1)
|
||||
if len(parts) == 2:
|
||||
return parts[0], parts[1]
|
||||
@@ -50,7 +47,7 @@ def _split_eml_headers_body(eml_text: str) -> tuple[str, str]:
|
||||
m = re.search(
|
||||
r"(?:^|\r?\n)Content-Transfer-Encoding:.*?(?:\r?\n)",
|
||||
eml_text,
|
||||
flags=re.I | re.S,
|
||||
flags=re.IGNORECASE | re.DOTALL,
|
||||
)
|
||||
if m:
|
||||
return eml_text[: m.end()], eml_text[m.end() :]
|
||||
@@ -115,10 +112,10 @@ class Mail_Dialog(QtWidgets.QDialog, MailPreviewDialog):
|
||||
if self.gender_male.isChecked():
|
||||
self.btn_okay.setEnabled(True)
|
||||
return f"Sehr geehrter Herr {prof},"
|
||||
elif self.gender_female.isChecked():
|
||||
if self.gender_female.isChecked():
|
||||
self.btn_okay.setEnabled(True)
|
||||
return f"Sehr geehrte Frau {prof},"
|
||||
elif self.gender_non.isChecked():
|
||||
if self.gender_non.isChecked():
|
||||
self.btn_okay.setEnabled(True)
|
||||
name = f"{self.profname.split(' ')[1]} {self.profname.split(' ')[0]}"
|
||||
return f"Guten Tag {name},"
|
||||
@@ -152,8 +149,7 @@ class Mail_Dialog(QtWidgets.QDialog, MailPreviewDialog):
|
||||
Ihr Semesterapparatsteam
|
||||
Mail: semesterapparate@ph-freiburg.de
|
||||
Tel.: 0761/682-778 | 0761/682-545"""
|
||||
else:
|
||||
return config.mail.signature
|
||||
return config.mail.signature
|
||||
|
||||
def load_mail_templates(self):
|
||||
log.info("Loading mail templates")
|
||||
@@ -174,7 +170,7 @@ Tel.: 0761/682-778 | 0761/682-545"""
|
||||
log.debug("No mail template selected")
|
||||
return
|
||||
|
||||
with open(f"mail_vorlagen/{email_template}", "r", encoding="utf-8") as f:
|
||||
with open(f"mail_vorlagen/{email_template}", encoding="utf-8") as f:
|
||||
eml_text = f.read()
|
||||
|
||||
# header label for UI (unchanged)
|
||||
@@ -203,7 +199,7 @@ Tel.: 0761/682-778 | 0761/682-545"""
|
||||
[
|
||||
f"- {book.title} (ISBN: {','.join(book.isbn)}, Auflage: {book.edition if book.edition else 'nicht bekannt'}, In Bibliothek: {'ja' if getattr(book, 'signature', None) is not None and 'Handbibliothek' not in str(book.library_location) else 'nein'}, Typ: {book.get_book_type()}) Aktuelle Auflage: {book.old_book.edition if book.old_book and book.old_book.edition else 'nicht bekannt'}"
|
||||
for book in (self.books or [])
|
||||
]
|
||||
],
|
||||
)
|
||||
if self.books
|
||||
else "keine neuen Auflagen gefunden",
|
||||
@@ -211,7 +207,7 @@ Tel.: 0761/682-778 | 0761/682-545"""
|
||||
[
|
||||
f" - {book.title}, ISBN: {','.join(book.isbn)}, Bibliotheksstandort : {book.library_location if book.library_location else 'N/A'}, Link: {book.link}"
|
||||
for book in (self.ordered_books or [])
|
||||
]
|
||||
],
|
||||
),
|
||||
)
|
||||
except Exception as e:
|
||||
@@ -255,7 +251,6 @@ Tel.: 0761/682-778 | 0761/682-545"""
|
||||
server.login(sender_email, password)
|
||||
server.sendmail(sender_email, tolist, mail)
|
||||
server.quit()
|
||||
pass
|
||||
log.info("Mail sent, closing connection to server and dialog")
|
||||
self.accept()
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ import sys
|
||||
from loguru import logger as log
|
||||
from PySide6 import QtCore, QtWidgets
|
||||
|
||||
from src import Icon
|
||||
from src.utils.icon import Icon
|
||||
|
||||
from .dialog_sources import NewMailTemplateDesignerDialog
|
||||
|
||||
@@ -35,13 +35,13 @@ class MailTemplateDialog(QtWidgets.QDialog, NewMailTemplateDesignerDialog):
|
||||
|
||||
# buttons
|
||||
self.buttonBox.button(
|
||||
QtWidgets.QDialogButtonBox.StandardButton.Save
|
||||
QtWidgets.QDialogButtonBox.StandardButton.Save,
|
||||
).clicked.connect(self.save_template)
|
||||
self.buttonBox.button(
|
||||
QtWidgets.QDialogButtonBox.StandardButton.Discard
|
||||
QtWidgets.QDialogButtonBox.StandardButton.Discard,
|
||||
).clicked.connect(self.discard_changes)
|
||||
self.buttonBox.button(
|
||||
QtWidgets.QDialogButtonBox.StandardButton.Cancel
|
||||
QtWidgets.QDialogButtonBox.StandardButton.Cancel,
|
||||
).clicked.connect(self.closeNow)
|
||||
log.info("Mail template dialog (plaintext) setup complete")
|
||||
|
||||
@@ -69,12 +69,12 @@ class MailTemplateDialog(QtWidgets.QDialog, NewMailTemplateDesignerDialog):
|
||||
dialog.setIcon(QtWidgets.QMessageBox.Icon.Warning)
|
||||
Icon("warning", dialog)
|
||||
dialog.setText(
|
||||
"Ein Template mit diesem Namen existiert bereits. Möchten Sie es überschreiben?"
|
||||
"Ein Template mit diesem Namen existiert bereits. Möchten Sie es überschreiben?",
|
||||
)
|
||||
dialog.setWindowTitle("Template überschreiben")
|
||||
dialog.setStandardButtons(
|
||||
QtWidgets.QMessageBox.StandardButton.Yes
|
||||
| QtWidgets.QMessageBox.StandardButton.No
|
||||
| QtWidgets.QMessageBox.StandardButton.No,
|
||||
)
|
||||
dialog.setDefaultButton(QtWidgets.QMessageBox.StandardButton.No)
|
||||
ret = dialog.exec()
|
||||
@@ -93,11 +93,14 @@ class MailTemplateDialog(QtWidgets.QDialog, NewMailTemplateDesignerDialog):
|
||||
|
||||
eml = mail_headers + "\n\n" + mail_body
|
||||
eml = re.sub(r" +", " ", eml) # remove multiple spaces
|
||||
eml = re.sub(r"\n +", "\n", eml) #
|
||||
eml = re.sub(r"\n +", "\n", eml)
|
||||
print(eml)
|
||||
|
||||
with open(
|
||||
f"mail_vorlagen/{template}", "w", encoding="utf-8", newline=""
|
||||
f"mail_vorlagen/{template}",
|
||||
"w",
|
||||
encoding="utf-8",
|
||||
newline="",
|
||||
) as f:
|
||||
f.write(eml)
|
||||
|
||||
@@ -128,7 +131,7 @@ class MailTemplateDialog(QtWidgets.QDialog, NewMailTemplateDesignerDialog):
|
||||
dialog.setWindowTitle("Änderungen verwerfen")
|
||||
dialog.setStandardButtons(
|
||||
QtWidgets.QMessageBox.StandardButton.Yes
|
||||
| QtWidgets.QMessageBox.StandardButton.No
|
||||
| QtWidgets.QMessageBox.StandardButton.No,
|
||||
)
|
||||
dialog.setDefaultButton(QtWidgets.QMessageBox.StandardButton.No)
|
||||
ret = dialog.exec()
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from PySide6 import QtCore, QtGui, QtWidgets
|
||||
|
||||
from src import Icon
|
||||
from src.utils.icon import Icon
|
||||
|
||||
from .dialog_sources.medianadder_ui import Ui_Dialog
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
|
||||
from PySide6 import QtWidgets
|
||||
|
||||
from src import Icon
|
||||
from src.utils.icon import Icon
|
||||
|
||||
from .dialog_sources.confirm_extend_ui import Ui_extend_confirm
|
||||
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
from PySide6 import QtWidgets
|
||||
|
||||
from .dialog_sources.reminder_ui import Ui_Erinnerung as Ui_Dialog
|
||||
from src import Icon
|
||||
import datetime as date
|
||||
from typing import Any
|
||||
|
||||
from PySide6 import QtWidgets
|
||||
|
||||
from src.utils.icon import Icon
|
||||
|
||||
from .dialog_sources.reminder_ui import Ui_Erinnerung as Ui_Dialog
|
||||
|
||||
|
||||
class ReminderDialog(QtWidgets.QDialog, Ui_Dialog):
|
||||
def __init__(self, parent=None):
|
||||
super().__init__(parent)
|
||||
@@ -18,5 +21,5 @@ class ReminderDialog(QtWidgets.QDialog, Ui_Dialog):
|
||||
{
|
||||
"message": self.message_box.toPlainText(),
|
||||
"remind_at": self.dateEdit.date().toString("yyyy-MM-dd"),
|
||||
}
|
||||
},
|
||||
]
|
||||
|
||||
@@ -3,8 +3,9 @@ import sys
|
||||
import loguru
|
||||
from PySide6 import QtCore, QtGui, QtWidgets
|
||||
|
||||
from src import LOG_DIR, Icon, settings
|
||||
from src import LOG_DIR, settings
|
||||
from src.ui.widgets.iconLine import IconWidget
|
||||
from src.utils.icon import Icon
|
||||
|
||||
from .dialog_sources.settings_ui import Ui_Dialog as _settings
|
||||
|
||||
@@ -92,7 +93,7 @@ class Settings(QtWidgets.QDialog, _settings):
|
||||
self.printermail.setText(settings.mail.printer_mail)
|
||||
self.printermail.setPlaceholderText("E-Mail-Adresse des Druckers")
|
||||
self.use_username_smtp_login.setChecked(
|
||||
settings.mail.use_user_name if settings.mail.use_user_name else False
|
||||
settings.mail.use_user_name if settings.mail.use_user_name else False,
|
||||
)
|
||||
self.editSignature.setHtml(settings.mail.signature)
|
||||
self.zotero_api_key.setText(settings.zotero.api_key)
|
||||
@@ -177,7 +178,7 @@ class Settings(QtWidgets.QDialog, _settings):
|
||||
if file_dialog.exec():
|
||||
self.db_name.setText(file_dialog.selectedFiles()[0].split("/")[-1])
|
||||
self.db_path.setText(
|
||||
file_dialog.selectedFiles()[0].split(self.db_name.text())[0]
|
||||
file_dialog.selectedFiles()[0].split(self.db_name.text())[0],
|
||||
)
|
||||
|
||||
def set_save_path(self):
|
||||
@@ -211,7 +212,8 @@ class Settings(QtWidgets.QDialog, _settings):
|
||||
settings.set_mail_attr("user_name", self.mail_username.text())
|
||||
settings.set_mail_attr("password", self.password.text())
|
||||
settings.set_mail_attr(
|
||||
"use_user_name", self.use_username_smtp_login.isChecked()
|
||||
"use_user_name",
|
||||
self.use_username_smtp_login.isChecked(),
|
||||
)
|
||||
settings.set_mail_attr("printer_mail", self.printermail.text())
|
||||
settings.set_mail_attr("signature", signature)
|
||||
|
||||
@@ -3,7 +3,6 @@ from __future__ import annotations
|
||||
import atexit
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
import webbrowser
|
||||
from pathlib import Path
|
||||
@@ -15,7 +14,6 @@ from PySide6.QtCore import QThread
|
||||
from PySide6.QtGui import QRegularExpressionValidator
|
||||
from PySide6.QtMultimedia import QAudioOutput, QMediaPlayer
|
||||
|
||||
from src import Icon
|
||||
from src.background import (
|
||||
AvailChecker,
|
||||
BookGrabber,
|
||||
@@ -32,14 +30,12 @@ from src.core.models import (
|
||||
Semester,
|
||||
)
|
||||
from src.database import Database
|
||||
from src.logic import (
|
||||
from src.parsers import (
|
||||
csv_to_list,
|
||||
eml_to_semap,
|
||||
pdf_to_semap,
|
||||
word_to_semap,
|
||||
)
|
||||
from src.parsers import (
|
||||
csv_to_list,
|
||||
)
|
||||
from src.shared.logging import log
|
||||
from src.ui import Ui_Semesterapparat
|
||||
from src.ui.dialogs import (
|
||||
@@ -73,6 +69,7 @@ from src.ui.widgets import (
|
||||
)
|
||||
from src.utils.files import delete_temp_contents as tempdelete
|
||||
from src.utils.files import recreateFile
|
||||
from src.utils.icon import Icon
|
||||
|
||||
log.success("UI started")
|
||||
valid_input = (0, 0, 0, 0, 0, 0)
|
||||
@@ -1165,63 +1162,65 @@ class Ui(QtWidgets.QMainWindow, Ui_Semesterapparat):
|
||||
self.document_list.currentRow(),
|
||||
0,
|
||||
).text()
|
||||
if file_location == "Database":
|
||||
# create a temporaty file to use, delete it after use
|
||||
temp_file = tempfile.NamedTemporaryFile(
|
||||
delete=False,
|
||||
suffix="." + file_type,
|
||||
)
|
||||
temp_file.write(self.db.getBlob(file_name, int(app_id)))
|
||||
temp_file.close()
|
||||
file = temp_file.name
|
||||
if file_type == "pdf":
|
||||
data = pdf_to_semap(file)
|
||||
signatures = data.signatures
|
||||
data = __open_dialog(signatures)
|
||||
# if no data was returned, return
|
||||
if data == []:
|
||||
return
|
||||
for book in data:
|
||||
if not isinstance(book, BookData):
|
||||
continue
|
||||
self.db.addBookToDatabase(
|
||||
bookdata=book,
|
||||
app_id=app_id,
|
||||
prof_id=prof_id,
|
||||
)
|
||||
if file_type == "csv":
|
||||
signatures = csv_to_list(file)
|
||||
data = __open_dialog(signatures)
|
||||
# add the data to the database
|
||||
for book in data:
|
||||
if not isinstance(book, BookData):
|
||||
continue
|
||||
self.db.addBookToDatabase(
|
||||
bookdata=book,
|
||||
app_id=app_id,
|
||||
prof_id=prof_id,
|
||||
)
|
||||
if file_type == "docx":
|
||||
data = word_to_semap(file)
|
||||
signatures = data.signatures
|
||||
data = __open_dialog(signatures)
|
||||
# if no data was returned, return
|
||||
if data == []:
|
||||
return
|
||||
for book in data:
|
||||
if not isinstance(book, BookData):
|
||||
continue
|
||||
self.db.addBookToDatabase(
|
||||
bookdata=book,
|
||||
app_id=app_id,
|
||||
prof_id=prof_id,
|
||||
)
|
||||
if file_type == "eml":
|
||||
data = eml_to_semap(file)
|
||||
|
||||
data = self.extract_document_data()
|
||||
# if file_location == "Database":
|
||||
# # create a temporaty file to use, delete it after use
|
||||
# temp_file = tempfile.NamedTemporaryFile(
|
||||
# delete=False,
|
||||
# suffix="." + file_type,
|
||||
# )
|
||||
# temp_file.write(self.db.getBlob(file_name, int(app_id)))
|
||||
# temp_file.close()
|
||||
# file = temp_file.name
|
||||
# if file_type == "pdf":
|
||||
# data = pdf_to_semap(file)
|
||||
# signatures = data.signatures
|
||||
# data = __open_dialog(signatures)
|
||||
# # if no data was returned, return
|
||||
# if data == []:
|
||||
# return
|
||||
# for book in data:
|
||||
# if not isinstance(book, BookData):
|
||||
# continue
|
||||
# self.db.addBookToDatabase(
|
||||
# bookdata=book,
|
||||
# app_id=app_id,
|
||||
# prof_id=prof_id,
|
||||
# )
|
||||
# if file_type == "csv":
|
||||
# signatures = csv_to_list(file)
|
||||
# data = __open_dialog(signatures)
|
||||
# # add the data to the database
|
||||
# for book in data:
|
||||
# if not isinstance(book, BookData):
|
||||
# continue
|
||||
# self.db.addBookToDatabase(
|
||||
# bookdata=book,
|
||||
# app_id=app_id,
|
||||
# prof_id=prof_id,
|
||||
# )
|
||||
# if file_type == "docx":
|
||||
# data = word_to_semap(file)
|
||||
# signatures = data.signatures
|
||||
# data = __open_dialog(signatures)
|
||||
# # if no data was returned, return
|
||||
# if data == []:
|
||||
# return
|
||||
# for book in data:
|
||||
# if not isinstance(book, BookData):
|
||||
# continue
|
||||
# self.db.addBookToDatabase(
|
||||
# bookdata=book,
|
||||
# app_id=app_id,
|
||||
# prof_id=prof_id,
|
||||
# )
|
||||
# if file_type == "eml":
|
||||
# data = eml_to_semap(file)
|
||||
self.update_app_media_list()
|
||||
# #log.debug(len(signatures))
|
||||
|
||||
def extract_document_data(self) -> list[str] | SemapDocument:
|
||||
def extract_document_data(self) -> SemapDocument:
|
||||
file_type = self.document_list.item(self.document_list.currentRow(), 1).text()
|
||||
file_location = self.document_list.item(
|
||||
self.document_list.currentRow(),
|
||||
@@ -1239,12 +1238,12 @@ class Ui(QtWidgets.QMainWindow, Ui_Semesterapparat):
|
||||
)
|
||||
if file_type == "pdf":
|
||||
# TODO: implement parser here
|
||||
self.confirm_popup("PDF Dateien werden nicht unterstützt!", title="Fehler")
|
||||
return [""]
|
||||
if file_type == "csv":
|
||||
signatures = csv_to_list(file)
|
||||
# add the data to the database
|
||||
data = pdf_to_semap(file)
|
||||
signatures = data.signatures
|
||||
# self.confirm_popup("PDF Dateien werden nicht unterstützt!", title="Fehler")
|
||||
return signatures
|
||||
if file_type == "csv":
|
||||
return csv_to_list(file)
|
||||
if file_type in ("docx", "doc"):
|
||||
data = word_to_semap(file)
|
||||
log.info("Converted data from semap file")
|
||||
@@ -1369,7 +1368,7 @@ class Ui(QtWidgets.QMainWindow, Ui_Semesterapparat):
|
||||
# log.debug("Prof ID is None", prof_id)
|
||||
document = None
|
||||
|
||||
if c_document is None or not isinstance(c_document, SemapDocument):
|
||||
if c_document is None:
|
||||
document = self.extract_document_data()
|
||||
if document is None:
|
||||
log.error("Document is None")
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from PySide6 import QtCore, QtWidgets
|
||||
|
||||
from src import Icon
|
||||
from src.utils.icon import Icon
|
||||
from src.database import Database
|
||||
|
||||
from .widget_sources. import Ui_Form
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
from PySide6 import QtWidgets
|
||||
from PySide6.QtCore import Signal
|
||||
|
||||
from src import Icon
|
||||
from src.database import Database
|
||||
from src.utils.icon import Icon
|
||||
|
||||
from .widget_sources.calendar_entry_ui import Ui_Dialog
|
||||
|
||||
@@ -55,7 +55,6 @@ class CalendarEntry(QtWidgets.QDialog, Ui_Dialog):
|
||||
self.repaintSignal.emit(self.date)
|
||||
self.close()
|
||||
else:
|
||||
#
|
||||
self.repaintSignal.emit()
|
||||
self.close()
|
||||
|
||||
|
||||
@@ -4,15 +4,15 @@ from PySide6 import QtCore, QtGui, QtWidgets
|
||||
from PySide6.QtCore import QDate
|
||||
from PySide6.QtGui import QRegularExpressionValidator
|
||||
|
||||
from src import Icon
|
||||
from src.core.models import Prof, Semester
|
||||
from src.database import Database
|
||||
from src.logic import elsa_word_to_csv
|
||||
from src.parsers import elsa_word_to_csv
|
||||
from src.shared.logging import log
|
||||
from src.ui.dialogs import ElsaAddEntry, popus_confirm
|
||||
from src.ui.widgets.filepicker import FilePicker
|
||||
from src.ui.widgets.graph import DataQtGraph
|
||||
from src.utils.files import recreateElsaFile
|
||||
from src.utils.icon import Icon
|
||||
|
||||
from .widget_sources.elsa_maindialog_ui import Ui_Dialog
|
||||
|
||||
|
||||
@@ -3,9 +3,9 @@ from typing import List
|
||||
from PySide6 import QtWidgets
|
||||
from PySide6.QtCore import Qt
|
||||
|
||||
from src import Icon
|
||||
from src.services.catalogue import Catalogue
|
||||
from src.core.models import BookData
|
||||
from src.services.catalogue import Catalogue
|
||||
from src.utils.icon import Icon
|
||||
|
||||
from .widget_sources.new_edition_check_book_ui import (
|
||||
Ui_Dialog as Ui_NewEditionCheckBook,
|
||||
@@ -64,7 +64,7 @@ class NewEditionCheckFoundResult(QtWidgets.QDialog, Ui_NewEditionCheckFoundResul
|
||||
self.line_source.setOpenExternalLinks(True)
|
||||
self.line_source.setTextFormat(Qt.TextFormat.RichText)
|
||||
self.line_source.setTextInteractionFlags(
|
||||
Qt.TextInteractionFlag.TextBrowserInteraction
|
||||
Qt.TextInteractionFlag.TextBrowserInteraction,
|
||||
)
|
||||
self.line_isbn.textChanged.connect(self.update_book)
|
||||
self.line_author.textChanged.connect(self.update_book)
|
||||
@@ -79,11 +79,11 @@ class NewEditionCheckFoundResult(QtWidgets.QDialog, Ui_NewEditionCheckFoundResul
|
||||
self.line_isbn.setText(
|
||||
", ".join(self.book.isbn)
|
||||
if isinstance(self.book.isbn, list)
|
||||
else self.book.isbn
|
||||
else self.book.isbn,
|
||||
)
|
||||
if self.book.signature is not None and self.book.signature != "":
|
||||
self.in_library.setText(
|
||||
f"Diese Neuauflage ist bereits in der Bibliothek vorhanden.\nStandort: {self.book.library_location}"
|
||||
f"Diese Neuauflage ist bereits in der Bibliothek vorhanden.\nStandort: {self.book.library_location}",
|
||||
)
|
||||
isbn = (
|
||||
self.book.isbn[0]
|
||||
@@ -98,7 +98,7 @@ class NewEditionCheckFoundResult(QtWidgets.QDialog, Ui_NewEditionCheckFoundResul
|
||||
and self.book.library_location not in (0, "0", None)
|
||||
):
|
||||
self.in_library.setText(
|
||||
f"Diese Neuauflage ist bereits in der Bibliothek vorhanden, und an diesem Standort: {self.book.library_location}."
|
||||
f"Diese Neuauflage ist bereits in der Bibliothek vorhanden, und an diesem Standort: {self.book.library_location}.",
|
||||
)
|
||||
isbn = (
|
||||
str(self.book.isbn[0])
|
||||
@@ -107,8 +107,6 @@ class NewEditionCheckFoundResult(QtWidgets.QDialog, Ui_NewEditionCheckFoundResul
|
||||
)
|
||||
self.book.link = LEHMANNS_LINK.format(isbn)
|
||||
|
||||
pass
|
||||
|
||||
def update_book(self):
|
||||
print("update book")
|
||||
# for each line edit, get the value and assign it to the book on the corresponding attribute
|
||||
@@ -130,7 +128,7 @@ class NewEditionCheckFoundResult(QtWidgets.QDialog, Ui_NewEditionCheckFoundResul
|
||||
print("set", attr, "to", value)
|
||||
if attr == "isbn" and value is not None:
|
||||
self.line_source.setText(
|
||||
f"<a href='{LEHMANNS_LINK.format(self.line_isbn.text())}'>Lehmanns</a>"
|
||||
f"<a href='{LEHMANNS_LINK.format(self.line_isbn.text())}'>Lehmanns</a>",
|
||||
)
|
||||
|
||||
|
||||
@@ -155,7 +153,7 @@ class NewEditionCheckBook(QtWidgets.QDialog, Ui_NewEditionCheckBook):
|
||||
self.line_isbn.setText(
|
||||
", ".join(self.book.isbn)
|
||||
if isinstance(self.book.isbn, list)
|
||||
else self.book.isbn
|
||||
else self.book.isbn,
|
||||
)
|
||||
|
||||
for _ in range(self.stackedWidget.count()):
|
||||
@@ -164,7 +162,7 @@ class NewEditionCheckBook(QtWidgets.QDialog, Ui_NewEditionCheckBook):
|
||||
widget.deleteLater()
|
||||
for response in self.responses:
|
||||
self.stackedWidget.addWidget(
|
||||
NewEditionCheckFoundResult(parent=self, book=response)
|
||||
NewEditionCheckFoundResult(parent=self, book=response),
|
||||
)
|
||||
self.label_book_index.setText(f"1 / {self.stackedWidget.count()}")
|
||||
link = f"<a href='{self.book.link}'>Katalog</a>"
|
||||
@@ -172,7 +170,7 @@ class NewEditionCheckBook(QtWidgets.QDialog, Ui_NewEditionCheckBook):
|
||||
self.label_source_local.setOpenExternalLinks(True)
|
||||
self.label_source_local.setTextFormat(Qt.TextFormat.RichText)
|
||||
self.label_source_local.setTextInteractionFlags(
|
||||
Qt.TextInteractionFlag.TextBrowserInteraction
|
||||
Qt.TextInteractionFlag.TextBrowserInteraction,
|
||||
)
|
||||
|
||||
isbn = (
|
||||
@@ -181,12 +179,12 @@ class NewEditionCheckBook(QtWidgets.QDialog, Ui_NewEditionCheckBook):
|
||||
else f"{self.book.title}+{self.book.author}"
|
||||
)
|
||||
self.label_source_external.setText(
|
||||
f"<a href='{LEHMANNS_LINK.format(isbn)}'>Lehmanns</a>"
|
||||
f"<a href='{LEHMANNS_LINK.format(isbn)}'>Lehmanns</a>",
|
||||
)
|
||||
self.label_source_external.setOpenExternalLinks(True)
|
||||
self.label_source_external.setTextFormat(Qt.TextFormat.RichText)
|
||||
self.label_source_external.setTextInteractionFlags(
|
||||
Qt.TextInteractionFlag.TextBrowserInteraction
|
||||
Qt.TextInteractionFlag.TextBrowserInteraction,
|
||||
)
|
||||
self.btn_next.clicked.connect(self.next)
|
||||
self.btn_prev.clicked.connect(self.previous)
|
||||
@@ -216,8 +214,6 @@ class NewEditionCheckBook(QtWidgets.QDialog, Ui_NewEditionCheckBook):
|
||||
if index < self.stackedWidget.count() - 1:
|
||||
self.btn_next.show()
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class NewEditionChecker(QtWidgets.QDialog, Ui_NewEditionCheck):
|
||||
def __init__(self, results, parent=None):
|
||||
@@ -235,7 +231,7 @@ class NewEditionChecker(QtWidgets.QDialog, Ui_NewEditionCheck):
|
||||
book, responses = resultset
|
||||
# print(book, responses)
|
||||
self.stackedWidget.addWidget(
|
||||
NewEditionCheckBook(parent=self, book=book, responses=responses)
|
||||
NewEditionCheckBook(parent=self, book=book, responses=responses),
|
||||
)
|
||||
self.accepted_books = []
|
||||
self.stackedWidget.setCurrentIndex(0)
|
||||
|
||||
Reference in New Issue
Block a user