Merge pull request 'Implement optimizations' (#15) from dev-rebase into dev
Reviewed-on: #15
This commit was merged in pull request #15.
This commit is contained in:
13
main.py
13
main.py
@@ -1,11 +1,14 @@
|
|||||||
from src import first_launch, settings
|
|
||||||
from src.ui.widgets.welcome_wizard import launch_wizard as startup
|
|
||||||
from PySide6 import QtWidgets
|
|
||||||
import sys
|
import sys
|
||||||
from src.ui.userInterface import launch_gui as UI
|
|
||||||
|
|
||||||
|
from PySide6 import QtWidgets
|
||||||
|
|
||||||
|
from src import first_launch, settings
|
||||||
|
from src.shared.logging import configure
|
||||||
|
from src.ui.userInterface import launch_gui as UI
|
||||||
|
from src.ui.widgets.welcome_wizard import launch_wizard as startup
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
configure("INFO")
|
||||||
app = QtWidgets.QApplication(sys.argv)
|
app = QtWidgets.QApplication(sys.argv)
|
||||||
if not first_launch:
|
if not first_launch:
|
||||||
setup = startup()
|
setup = startup()
|
||||||
@@ -16,4 +19,4 @@ if __name__ == "__main__":
|
|||||||
else:
|
else:
|
||||||
sys.exit()
|
sys.exit()
|
||||||
else:
|
else:
|
||||||
UI()
|
UI()
|
||||||
|
|||||||
@@ -1,28 +1,13 @@
|
|||||||
import sys
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
import loguru
|
|
||||||
import regex
|
import regex
|
||||||
import requests
|
import requests
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
from src import LOG_DIR
|
|
||||||
from src.logic import BookData as Book
|
from src.logic import BookData as Book
|
||||||
|
from src.shared.logging import log
|
||||||
|
|
||||||
URL = "https://rds.ibs-bw.de/phfreiburg/opac/RDSIndex/Search?type0%5B%5D=allfields&lookfor0%5B%5D={}&join=AND&bool0%5B%5D=AND&type0%5B%5D=au&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=ti&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=ct&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=isn&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=ta&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=co&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=py&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=pp&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=pu&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=si&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=zr&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=cc&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND"
|
URL = "https://rds.ibs-bw.de/phfreiburg/opac/RDSIndex/Search?type0%5B%5D=allfields&lookfor0%5B%5D={}&join=AND&bool0%5B%5D=AND&type0%5B%5D=au&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=ti&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=ct&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=isn&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=ta&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=co&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=py&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=pp&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=pu&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=si&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=zr&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=cc&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND"
|
||||||
BASE = "https://rds.ibs-bw.de"
|
BASE = "https://rds.ibs-bw.de"
|
||||||
|
|
||||||
log = loguru.logger
|
|
||||||
log.remove()
|
|
||||||
log.add(sys.stdout, level="INFO")
|
|
||||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
|
||||||
|
|
||||||
log.add(
|
|
||||||
f"{LOG_DIR}/{datetime.now().strftime('%Y-%m-%d')}.log",
|
|
||||||
rotation="1 day",
|
|
||||||
retention="1 month",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Catalogue:
|
class Catalogue:
|
||||||
def __init__(self, timeout=15):
|
def __init__(self, timeout=15):
|
||||||
|
|||||||
@@ -3,7 +3,6 @@ import json
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sqlite3 as sql
|
import sqlite3 as sql
|
||||||
import sys
|
|
||||||
import tempfile
|
import tempfile
|
||||||
from dataclasses import asdict
|
from dataclasses import asdict
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@@ -13,7 +12,7 @@ from typing import Any, List, Optional, Tuple, Union
|
|||||||
|
|
||||||
import loguru
|
import loguru
|
||||||
|
|
||||||
from src import DATABASE_DIR, LOG_DIR, settings
|
from src import DATABASE_DIR, settings
|
||||||
from src.backend.db import (
|
from src.backend.db import (
|
||||||
CREATE_ELSA_FILES_TABLE,
|
CREATE_ELSA_FILES_TABLE,
|
||||||
CREATE_ELSA_MEDIA_TABLE,
|
CREATE_ELSA_MEDIA_TABLE,
|
||||||
@@ -34,9 +33,6 @@ from src.logic.semester import Semester
|
|||||||
from src.utils.blob import create_blob
|
from src.utils.blob import create_blob
|
||||||
|
|
||||||
log = loguru.logger
|
log = loguru.logger
|
||||||
log.remove()
|
|
||||||
log.add(sys.stdout, level="INFO")
|
|
||||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
|
||||||
|
|
||||||
|
|
||||||
ascii_lowercase = lower + digits + punctuation
|
ascii_lowercase = lower + digits + punctuation
|
||||||
@@ -186,7 +182,13 @@ class Database:
|
|||||||
Returns:
|
Returns:
|
||||||
sql.Connection: The active connection to the database
|
sql.Connection: The active connection to the database
|
||||||
"""
|
"""
|
||||||
return sql.connect(self.db_path)
|
conn = sql.connect(self.db_path)
|
||||||
|
# Fast pragmas suitable for a desktop app DB
|
||||||
|
conn.execute("PRAGMA journal_mode=WAL;")
|
||||||
|
conn.execute("PRAGMA synchronous=NORMAL;")
|
||||||
|
conn.execute("PRAGMA temp_store=MEMORY;")
|
||||||
|
conn.execute("PRAGMA mmap_size=134217728;") # 128MB
|
||||||
|
return conn
|
||||||
|
|
||||||
def close_connection(self, conn: sql.Connection):
|
def close_connection(self, conn: sql.Connection):
|
||||||
"""
|
"""
|
||||||
@@ -214,6 +216,25 @@ class Database:
|
|||||||
cursor.execute(CREATE_ELSA_TABLE)
|
cursor.execute(CREATE_ELSA_TABLE)
|
||||||
cursor.execute(CREATE_ELSA_FILES_TABLE)
|
cursor.execute(CREATE_ELSA_FILES_TABLE)
|
||||||
cursor.execute(CREATE_ELSA_MEDIA_TABLE)
|
cursor.execute(CREATE_ELSA_MEDIA_TABLE)
|
||||||
|
# Helpful indices to speed up frequent lookups and joins
|
||||||
|
cursor.execute(
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_media_app_prof ON media(app_id, prof_id);"
|
||||||
|
)
|
||||||
|
cursor.execute(
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_media_deleted ON media(deleted);"
|
||||||
|
)
|
||||||
|
cursor.execute(
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_media_available ON media(available);"
|
||||||
|
)
|
||||||
|
cursor.execute(
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_messages_remind_at ON messages(remind_at);"
|
||||||
|
)
|
||||||
|
cursor.execute(
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_semesterapparat_prof ON semesterapparat(prof_id);"
|
||||||
|
)
|
||||||
|
cursor.execute(
|
||||||
|
"CREATE INDEX IF NOT EXISTS idx_semesterapparat_appnr ON semesterapparat(appnr);"
|
||||||
|
)
|
||||||
conn.commit()
|
conn.commit()
|
||||||
self.close_connection(conn)
|
self.close_connection(conn)
|
||||||
|
|
||||||
@@ -227,7 +248,7 @@ class Database:
|
|||||||
"""
|
"""
|
||||||
conn = self.connect()
|
conn = self.connect()
|
||||||
cursor = conn.cursor()
|
cursor = conn.cursor()
|
||||||
log.debug(f"Inserting {params} into database with query {query}")
|
log.debug(f"Inserting into DB: {query}")
|
||||||
cursor.execute(query, params)
|
cursor.execute(query, params)
|
||||||
conn.commit()
|
conn.commit()
|
||||||
self.close_connection(conn)
|
self.close_connection(conn)
|
||||||
@@ -1650,7 +1671,7 @@ class Database:
|
|||||||
tempdir.mkdir(parents=True, exist_ok=True)
|
tempdir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
file = tempfile.NamedTemporaryFile(
|
file = tempfile.NamedTemporaryFile(
|
||||||
delete=False, dir=tempdir_path, mode="wb", suffix=f".{filetype}"
|
delete=False, dir=tempdir, mode="wb", suffix=f".{filetype}"
|
||||||
)
|
)
|
||||||
file.write(blob)
|
file.write(blob)
|
||||||
# log.debug("file created")
|
# log.debug("file created")
|
||||||
@@ -1713,9 +1734,9 @@ class Database:
|
|||||||
telnr = profdata.telnr
|
telnr = profdata.telnr
|
||||||
title = profdata.title
|
title = profdata.title
|
||||||
|
|
||||||
query = f"INSERT INTO prof (fname, lname, fullname, mail, telnr,titel) VALUES ('{fname}','{lname}','{fullname}','{mail}','{telnr}','{title}')"
|
query = "INSERT INTO prof (fname, lname, fullname, mail, telnr, titel) VALUES (?,?,?,?,?,?)"
|
||||||
log.debug(query)
|
log.debug(query)
|
||||||
cursor.execute(query)
|
cursor.execute(query, (fname, lname, fullname, mail, telnr, title))
|
||||||
|
|
||||||
conn.commit()
|
conn.commit()
|
||||||
conn.close()
|
conn.close()
|
||||||
@@ -1758,10 +1779,10 @@ class Database:
|
|||||||
fullname = profdata["profname"]
|
fullname = profdata["profname"]
|
||||||
else:
|
else:
|
||||||
fullname = profdata.name()
|
fullname = profdata.name()
|
||||||
query = f"SELECT id FROM prof WHERE fullname = '{fullname}'"
|
query = "SELECT id FROM prof WHERE fullname = ?"
|
||||||
log.debug(query)
|
log.debug(query)
|
||||||
|
|
||||||
cursor.execute(query)
|
cursor.execute(query, (fullname,))
|
||||||
result = cursor.fetchone()
|
result = cursor.fetchone()
|
||||||
if result:
|
if result:
|
||||||
return result[0]
|
return result[0]
|
||||||
@@ -1776,10 +1797,10 @@ class Database:
|
|||||||
"""
|
"""
|
||||||
conn = self.connect()
|
conn = self.connect()
|
||||||
cursor = conn.cursor()
|
cursor = conn.cursor()
|
||||||
query = f"SELECT * FROM prof WHERE fullname = '{fullname}'"
|
query = "SELECT * FROM prof WHERE fullname = ?"
|
||||||
log.debug(query)
|
log.debug(query)
|
||||||
|
|
||||||
result = cursor.execute(query).fetchone()
|
result = cursor.execute(query, (fullname,)).fetchone()
|
||||||
if result:
|
if result:
|
||||||
return Prof().from_tuple(result)
|
return Prof().from_tuple(result)
|
||||||
else:
|
else:
|
||||||
@@ -1795,8 +1816,8 @@ class Database:
|
|||||||
|
|
||||||
int | None: The id of the prof or None if not found
|
int | None: The id of the prof or None if not found
|
||||||
"""
|
"""
|
||||||
query = f"SELECT prof_id from semesterapparat WHERE appnr = '{apprarat_id}' and deletion_status = 0"
|
query = "SELECT prof_id from semesterapparat WHERE appnr = ? and deletion_status = 0"
|
||||||
data = self.query_db(query)
|
data = self.query_db(query, (apprarat_id,))
|
||||||
if data:
|
if data:
|
||||||
log.info("Prof ID: " + str(data[0][0]))
|
log.info("Prof ID: " + str(data[0][0]))
|
||||||
return data[0][0]
|
return data[0][0]
|
||||||
@@ -1807,20 +1828,13 @@ class Database:
|
|||||||
# get book data
|
# get book data
|
||||||
new_apparat_id = apparat
|
new_apparat_id = apparat
|
||||||
new_prof_id = self.getProfIDByApparat(new_apparat_id)
|
new_prof_id = self.getProfIDByApparat(new_apparat_id)
|
||||||
query = f"""
|
query = (
|
||||||
INSERT INTO media (bookdata, app_id, prof_id, deleted, available, reservation)
|
"INSERT INTO media (bookdata, app_id, prof_id, deleted, available, reservation) "
|
||||||
SELECT
|
"SELECT bookdata, ?, ?, 0, available, reservation FROM media WHERE id = ?"
|
||||||
bookdata,
|
)
|
||||||
'{new_apparat_id}',
|
|
||||||
'{new_prof_id}',
|
|
||||||
0,
|
|
||||||
available,
|
|
||||||
reservation
|
|
||||||
FROM media
|
|
||||||
where id = '{book_id}'"""
|
|
||||||
connection = self.connect()
|
connection = self.connect()
|
||||||
cursor = connection.cursor()
|
cursor = connection.cursor()
|
||||||
cursor.execute(query)
|
cursor.execute(query, (new_apparat_id, new_prof_id, book_id))
|
||||||
connection.commit()
|
connection.commit()
|
||||||
connection.close()
|
connection.close()
|
||||||
|
|
||||||
@@ -1832,16 +1846,18 @@ class Database:
|
|||||||
appratat (int): the ID of the new apparat
|
appratat (int): the ID of the new apparat
|
||||||
"""
|
"""
|
||||||
# get book data
|
# get book data
|
||||||
query = f"UPDATE media SET app_id = '{appratat}' WHERE id = '{book_id}'"
|
query = "UPDATE media SET app_id = ? WHERE id = ?"
|
||||||
connection = self.connect()
|
connection = self.connect()
|
||||||
cursor = connection.cursor()
|
cursor = connection.cursor()
|
||||||
cursor.execute(query)
|
cursor.execute(query, (appratat, book_id))
|
||||||
connection.commit()
|
connection.commit()
|
||||||
connection.close()
|
connection.close()
|
||||||
|
|
||||||
def getApparatNameByAppNr(self, appnr: int):
|
def getApparatNameByAppNr(self, appnr: int):
|
||||||
query = f"SELECT name FROM semesterapparat WHERE appnr = '{appnr}' and deletion_status = 0"
|
query = (
|
||||||
data = self.query_db(query)
|
"SELECT name FROM semesterapparat WHERE appnr = ? and deletion_status = 0"
|
||||||
|
)
|
||||||
|
data = self.query_db(query, (appnr,))
|
||||||
if data:
|
if data:
|
||||||
return data[0][0]
|
return data[0][0]
|
||||||
else:
|
else:
|
||||||
@@ -1856,8 +1872,8 @@ class Database:
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
def getBookIdByPPN(self, ppn: str) -> int:
|
def getBookIdByPPN(self, ppn: str) -> int:
|
||||||
query = f"SELECT id FROM media WHERE bookdata LIKE '%{ppn}%'"
|
query = "SELECT id FROM media WHERE bookdata LIKE ?"
|
||||||
data = self.query_db(query)
|
data = self.query_db(query, (f"%{ppn}%",))
|
||||||
if data:
|
if data:
|
||||||
return data[0][0]
|
return data[0][0]
|
||||||
else:
|
else:
|
||||||
@@ -1876,9 +1892,7 @@ class Database:
|
|||||||
results = self.query_db(query, (apparat_id,))
|
results = self.query_db(query, (apparat_id,))
|
||||||
res = []
|
res = []
|
||||||
for result in results:
|
for result in results:
|
||||||
old_edition_edition = self.query_db(
|
# keep only new edition payload; old edition can be reconstructed if needed
|
||||||
"SELECT bookdata FROM media WHERE id=?", (result[2],), one=True
|
|
||||||
)
|
|
||||||
res.append(BookData().from_string(result[1]))
|
res.append(BookData().from_string(result[1]))
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
|||||||
@@ -1,20 +1,10 @@
|
|||||||
import sys
|
|
||||||
|
|
||||||
import loguru
|
|
||||||
from PySide6.QtCore import QThread, Signal
|
from PySide6.QtCore import QThread, Signal
|
||||||
|
|
||||||
from src import LOG_DIR
|
|
||||||
from src.backend import Database
|
from src.backend import Database
|
||||||
from src.logic.webrequest import BibTextTransformer, WebRequest
|
from src.logic.webrequest import BibTextTransformer, WebRequest
|
||||||
|
from src.shared.logging import log
|
||||||
|
|
||||||
log = loguru.logger
|
# Logger configured centrally in main; this module just uses `log`
|
||||||
log.remove()
|
|
||||||
log.add(sys.stdout, level="INFO")
|
|
||||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
|
||||||
|
|
||||||
|
|
||||||
# logger.add(sys.stderr, format="{time} {level} {message}", level="INFO")
|
|
||||||
log.add(sys.stdout, level="INFO")
|
|
||||||
|
|
||||||
|
|
||||||
class BookGrabber(QThread):
|
class BookGrabber(QThread):
|
||||||
@@ -37,7 +27,6 @@ class BookGrabber(QThread):
|
|||||||
self.request = WebRequest()
|
self.request = WebRequest()
|
||||||
self.db = Database()
|
self.db = Database()
|
||||||
|
|
||||||
|
|
||||||
def add_values(
|
def add_values(
|
||||||
self, app_id: int, prof_id: int, mode: str, data, any_book=False, exact=False
|
self, app_id: int, prof_id: int, mode: str, data, any_book=False, exact=False
|
||||||
):
|
):
|
||||||
@@ -50,7 +39,9 @@ class BookGrabber(QThread):
|
|||||||
log.info(f"Working on {len(self.data)} entries")
|
log.info(f"Working on {len(self.data)} entries")
|
||||||
self.tstate = (self.app_nr, self.prof_id, self.mode, self.data)
|
self.tstate = (self.app_nr, self.prof_id, self.mode, self.data)
|
||||||
log.debug("State: " + str(self.tstate))
|
log.debug("State: " + str(self.tstate))
|
||||||
app_nr = self.db.query_db("SELECT appnr FROM semesterapparat WHERE id = ?", (self.app_id,))[0][0]
|
app_nr = self.db.query_db(
|
||||||
|
"SELECT appnr FROM semesterapparat WHERE id = ?", (self.app_id,)
|
||||||
|
)[0][0]
|
||||||
self.request.set_apparat(app_nr)
|
self.request.set_apparat(app_nr)
|
||||||
# log.debug(self.tstate)
|
# log.debug(self.tstate)
|
||||||
|
|
||||||
|
|||||||
@@ -1,37 +1,24 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
from datetime import datetime
|
|
||||||
from math import ceil
|
from math import ceil
|
||||||
from queue import Empty, Queue
|
from queue import Empty, Queue
|
||||||
from time import monotonic # <-- NEW
|
from time import monotonic # <-- NEW
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
import loguru
|
|
||||||
from PySide6.QtCore import QThread, Signal
|
from PySide6.QtCore import QThread, Signal
|
||||||
|
|
||||||
from src import LOG_DIR
|
|
||||||
|
|
||||||
# from src.logic.webrequest import BibTextTransformer, WebRequest
|
# from src.logic.webrequest import BibTextTransformer, WebRequest
|
||||||
from src.backend.catalogue import Catalogue
|
from src.backend.catalogue import Catalogue
|
||||||
from src.logic import BookData
|
from src.logic import BookData
|
||||||
from src.logic.SRU import SWB
|
from src.logic.SRU import SWB
|
||||||
|
from src.shared.logging import log
|
||||||
|
|
||||||
# use all available cores - 2, but at least 1
|
# use all available cores - 2, but at least 1
|
||||||
THREAD_COUNT = max(os.cpu_count() - 2, 1)
|
THREAD_COUNT = max(os.cpu_count() - 2, 1)
|
||||||
THREAD_MIN_ITEMS = 5
|
THREAD_MIN_ITEMS = 5
|
||||||
|
|
||||||
log = loguru.logger
|
# Logger configured centrally in main; use shared `log`
|
||||||
log.remove()
|
|
||||||
log.add(sys.stdout, level="INFO")
|
|
||||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
|
||||||
|
|
||||||
log.add(
|
|
||||||
f"{LOG_DIR}/{datetime.now().strftime('%Y-%m-%d')}.log",
|
|
||||||
rotation="1 day",
|
|
||||||
retention="7 days",
|
|
||||||
)
|
|
||||||
|
|
||||||
swb = SWB()
|
swb = SWB()
|
||||||
dnb = SWB()
|
dnb = SWB()
|
||||||
@@ -146,7 +133,7 @@ def find_newer_edition(
|
|||||||
if not deduped:
|
if not deduped:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# 3) Final pick (single best)
|
# 3) Preserve all qualifying newer editions, but order by preference
|
||||||
def sort_key(b: BookData):
|
def sort_key(b: BookData):
|
||||||
year = b.year if b.year is not None else -1
|
year = b.year if b.year is not None else -1
|
||||||
ed = b.edition_number if b.edition_number is not None else -1
|
ed = b.edition_number if b.edition_number is not None else -1
|
||||||
@@ -158,8 +145,8 @@ def find_newer_edition(
|
|||||||
ed,
|
ed,
|
||||||
)
|
)
|
||||||
|
|
||||||
best = max(deduped, key=sort_key)
|
deduped.sort(key=sort_key, reverse=True)
|
||||||
return [best] if best else None
|
return deduped
|
||||||
|
|
||||||
|
|
||||||
class NewEditionCheckerThread(QThread):
|
class NewEditionCheckerThread(QThread):
|
||||||
|
|||||||
@@ -1,21 +1,10 @@
|
|||||||
import sys
|
|
||||||
import time
|
|
||||||
|
|
||||||
# from src.transformers import RDS_AVAIL_DATA
|
|
||||||
import loguru
|
|
||||||
|
|
||||||
# from icecream import ic
|
# from icecream import ic
|
||||||
from PySide6.QtCore import QThread
|
from PySide6.QtCore import QThread
|
||||||
from PySide6.QtCore import Signal as Signal
|
from PySide6.QtCore import Signal as Signal
|
||||||
|
|
||||||
from src import LOG_DIR
|
|
||||||
from src.backend.database import Database
|
from src.backend.database import Database
|
||||||
from src.logic.webrequest import BibTextTransformer, WebRequest
|
from src.logic.webrequest import BibTextTransformer, WebRequest
|
||||||
|
from src.shared.logging import log
|
||||||
log = loguru.logger
|
|
||||||
log.remove()
|
|
||||||
log.add(sys.stdout, level="INFO")
|
|
||||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
|
||||||
|
|
||||||
|
|
||||||
class AvailChecker(QThread):
|
class AvailChecker(QThread):
|
||||||
@@ -23,7 +12,11 @@ class AvailChecker(QThread):
|
|||||||
updateProgress = Signal(int, int)
|
updateProgress = Signal(int, int)
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self, links: list = None, appnumber: int = None, parent=None, books=list[dict]
|
self,
|
||||||
|
links: list[str] | None = None,
|
||||||
|
appnumber: int | None = None,
|
||||||
|
parent=None,
|
||||||
|
books: list[dict] | None = None,
|
||||||
):
|
):
|
||||||
if links is None:
|
if links is None:
|
||||||
links = []
|
links = []
|
||||||
@@ -38,11 +31,13 @@ class AvailChecker(QThread):
|
|||||||
)
|
)
|
||||||
self.links = links
|
self.links = links
|
||||||
self.appnumber = appnumber
|
self.appnumber = appnumber
|
||||||
self.books = books
|
self.books = books or []
|
||||||
log.info(
|
log.info(
|
||||||
f"Started worker with appnumber: {self.appnumber} and links: {self.links} and {len(self.books)} books..."
|
f"Started worker with appnumber: {self.appnumber} and links: {self.links} and {len(self.books)} books..."
|
||||||
)
|
)
|
||||||
time.sleep(2)
|
# Pre-create reusable request and transformer to avoid per-item overhead
|
||||||
|
self._request = WebRequest().set_apparat(self.appnumber)
|
||||||
|
self._rds_transformer = BibTextTransformer("RDS")
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
self.db = Database()
|
self.db = Database()
|
||||||
@@ -50,9 +45,8 @@ class AvailChecker(QThread):
|
|||||||
count = 0
|
count = 0
|
||||||
for link in self.links:
|
for link in self.links:
|
||||||
log.info("Processing entry: " + str(link))
|
log.info("Processing entry: " + str(link))
|
||||||
data = WebRequest().set_apparat(self.appnumber).get_ppn(link).get_data()
|
data = self._request.get_ppn(link).get_data()
|
||||||
transformer = BibTextTransformer("RDS")
|
rds = self._rds_transformer.get_data(data).return_data("rds_availability")
|
||||||
rds = transformer.get_data(data).return_data("rds_availability")
|
|
||||||
|
|
||||||
book_id = None
|
book_id = None
|
||||||
if not rds or not rds.items:
|
if not rds or not rds.items:
|
||||||
|
|||||||
@@ -1,26 +1,17 @@
|
|||||||
import sys
|
import re
|
||||||
import xml.etree.ElementTree as ET
|
import xml.etree.ElementTree as ET
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from datetime import datetime
|
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import Dict, Iterable, List, Optional, Tuple
|
from typing import Dict, Iterable, List, Optional, Tuple, Union
|
||||||
|
|
||||||
import loguru
|
|
||||||
import requests
|
import requests
|
||||||
|
from requests.adapters import HTTPAdapter
|
||||||
|
|
||||||
from src import LOG_DIR
|
# centralized logging used via src.shared.logging
|
||||||
from src.logic.dataclass import BookData
|
from src.logic.dataclass import BookData
|
||||||
|
from src.shared.logging import log
|
||||||
|
|
||||||
log = loguru.logger
|
log # ensure imported logger is referenced
|
||||||
log.remove()
|
|
||||||
log.add(sys.stdout, level="INFO")
|
|
||||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
|
||||||
|
|
||||||
log.add(
|
|
||||||
f"{LOG_DIR}/{datetime.now().strftime('%Y-%m-%d')}.log",
|
|
||||||
rotation="1 day",
|
|
||||||
retention="1 month",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# -----------------------
|
# -----------------------
|
||||||
@@ -184,7 +175,9 @@ def parse_echoed_request(root: ET.Element) -> Optional[EchoedSearchRequest]:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def parse_search_retrieve_response(xml_str: str) -> SearchRetrieveResponse:
|
def parse_search_retrieve_response(
|
||||||
|
xml_str: Union[str, bytes],
|
||||||
|
) -> SearchRetrieveResponse:
|
||||||
root = ET.fromstring(xml_str)
|
root = ET.fromstring(xml_str)
|
||||||
|
|
||||||
# Root is zs:searchRetrieveResponse
|
# Root is zs:searchRetrieveResponse
|
||||||
@@ -556,7 +549,22 @@ class Api:
|
|||||||
self.site = site
|
self.site = site
|
||||||
self.url = url
|
self.url = url
|
||||||
self.prefix = prefix
|
self.prefix = prefix
|
||||||
pass
|
# Reuse TCP connections across requests for better performance
|
||||||
|
self._session = requests.Session()
|
||||||
|
# Slightly larger connection pool for concurrent calls
|
||||||
|
adapter = HTTPAdapter(pool_connections=10, pool_maxsize=20)
|
||||||
|
self._session.mount("http://", adapter)
|
||||||
|
self._session.mount("https://", adapter)
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
try:
|
||||||
|
self._session.close()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
# Best-effort cleanup
|
||||||
|
self.close()
|
||||||
|
|
||||||
def get(self, query_args: Iterable[str]) -> List[Record]:
|
def get(self, query_args: Iterable[str]) -> List[Record]:
|
||||||
# if any query_arg ends with =, remove it
|
# if any query_arg ends with =, remove it
|
||||||
@@ -580,18 +588,18 @@ class Api:
|
|||||||
"Accept": "application/xml",
|
"Accept": "application/xml",
|
||||||
"Accept-Charset": "latin1,utf-8;q=0.7,*;q=0.3",
|
"Accept-Charset": "latin1,utf-8;q=0.7,*;q=0.3",
|
||||||
}
|
}
|
||||||
response = requests.get(url, headers=headers)
|
# Use persistent session and set timeouts to avoid hanging
|
||||||
if response.status_code != 200:
|
resp = self._session.get(url, headers=headers, timeout=(3.05, 60))
|
||||||
raise Exception(f"Error fetching data from SWB: {response.status_code}")
|
if resp.status_code != 200:
|
||||||
data = response.content
|
raise Exception(f"Error fetching data from SWB: {resp.status_code}")
|
||||||
|
# Parse using raw bytes (original behavior) to preserve encoding edge cases
|
||||||
# extract top-level response
|
sr = parse_search_retrieve_response(resp.content)
|
||||||
response = parse_search_retrieve_response(data)
|
return sr.records
|
||||||
return response.records
|
|
||||||
|
|
||||||
def getBooks(self, query_args: Iterable[str]) -> List[BookData]:
|
def getBooks(self, query_args: Iterable[str]) -> List[BookData]:
|
||||||
records: List[Record] = self.get(query_args)
|
records: List[Record] = self.get(query_args)
|
||||||
print(f"{self.site} found {len(records)} records")
|
# Avoid printing on hot paths; rely on logger if needed
|
||||||
|
log.debug(f"{self.site} found {len(records)} records for args={query_args}")
|
||||||
books: List[BookData] = []
|
books: List[BookData] = []
|
||||||
# extract title from query_args if present
|
# extract title from query_args if present
|
||||||
title = None
|
title = None
|
||||||
@@ -611,7 +619,8 @@ class Api:
|
|||||||
return books
|
return books
|
||||||
|
|
||||||
def getLinkForBook(self, book: BookData) -> str:
|
def getLinkForBook(self, book: BookData) -> str:
|
||||||
results = self.getBooks()
|
# Not implemented: depends on catalog front-end; return empty string for now
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
class SWB(Api):
|
class SWB(Api):
|
||||||
|
|||||||
@@ -18,16 +18,8 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import re
|
import re
|
||||||
import sys
|
|
||||||
|
|
||||||
import loguru
|
from src.shared.logging import log
|
||||||
|
|
||||||
from src import LOG_DIR
|
|
||||||
|
|
||||||
log = loguru.logger
|
|
||||||
log.remove()
|
|
||||||
log.add(sys.stdout, level="INFO")
|
|
||||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
|
||||||
|
|
||||||
|
|
||||||
class Semester:
|
class Semester:
|
||||||
@@ -124,21 +116,22 @@ class Semester:
|
|||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
# Comparison helpers
|
# Comparison helpers
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def isPastSemester(self, other: "Semester") -> bool:
|
def isPastSemester(self, current: "Semester") -> bool:
|
||||||
if self.year < other.year:
|
log.debug(f"Comparing {self} < {current}")
|
||||||
|
if self.year < current.year:
|
||||||
return True
|
return True
|
||||||
if self.year == other.year:
|
if self.year == current.year:
|
||||||
return (
|
return (
|
||||||
self.semester == "WiSe" and other.semester == "SoSe"
|
self.semester == "WiSe" and current.semester == "SoSe"
|
||||||
) # WiSe before next SoSe
|
) # WiSe before next SoSe
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def isFutureSemester(self, other: "Semester") -> bool:
|
def isFutureSemester(self, current: "Semester") -> bool:
|
||||||
if self.year > other.year:
|
if self.year > current.year:
|
||||||
return True
|
return True
|
||||||
if self.year == other.year:
|
if self.year == current.year:
|
||||||
return (
|
return (
|
||||||
self.semester == "SoSe" and other.semester == "WiSe"
|
self.semester == "SoSe" and current.semester == "WiSe"
|
||||||
) # SoSe after WiSe of same year
|
) # SoSe after WiSe of same year
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|||||||
@@ -1,23 +1,16 @@
|
|||||||
import sys
|
|
||||||
from typing import Any, Optional, Union
|
from typing import Any, Optional, Union
|
||||||
|
|
||||||
import loguru
|
|
||||||
import requests
|
import requests
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
|
|
||||||
# import sleep_and_retry decorator to retry requests
|
# import sleep_and_retry decorator to retry requests
|
||||||
from ratelimit import limits, sleep_and_retry
|
from ratelimit import limits, sleep_and_retry
|
||||||
|
|
||||||
from src import LOG_DIR
|
|
||||||
from src.logic.dataclass import BookData
|
from src.logic.dataclass import BookData
|
||||||
|
from src.shared.logging import log
|
||||||
from src.transformers import ARRAYData, BibTeXData, COinSData, RDSData, RISData
|
from src.transformers import ARRAYData, BibTeXData, COinSData, RDSData, RISData
|
||||||
from src.transformers.transformers import RDS_AVAIL_DATA, RDS_GENERIC_DATA
|
from src.transformers.transformers import RDS_AVAIL_DATA, RDS_GENERIC_DATA
|
||||||
|
|
||||||
log = loguru.logger
|
|
||||||
log.remove()
|
|
||||||
log.add(sys.stdout, level="INFO")
|
|
||||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
|
||||||
|
|
||||||
# logger.add(sys.stderr, format="{time} {level} {message}", level="INFO")
|
# logger.add(sys.stderr, format="{time} {level} {message}", level="INFO")
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,20 +1,13 @@
|
|||||||
import sys
|
|
||||||
import zipfile
|
import zipfile
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
import fitz # PyMuPDF
|
import fitz # PyMuPDF
|
||||||
import loguru
|
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
from docx import Document
|
from docx import Document
|
||||||
|
|
||||||
from src import LOG_DIR
|
|
||||||
from src.logic.dataclass import Book, SemapDocument
|
from src.logic.dataclass import Book, SemapDocument
|
||||||
|
from src.shared.logging import log
|
||||||
log = loguru.logger
|
|
||||||
log.remove()
|
|
||||||
log.add(sys.stdout, level="INFO")
|
|
||||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
|
||||||
|
|
||||||
|
|
||||||
def word_docx_to_csv(path: str) -> list[pd.DataFrame]:
|
def word_docx_to_csv(path: str) -> list[pd.DataFrame]:
|
||||||
@@ -50,7 +43,6 @@ def get_fach(path: str) -> str:
|
|||||||
soup = BeautifulSoup(xml_data, "xml")
|
soup = BeautifulSoup(xml_data, "xml")
|
||||||
# text we need is in <w:p w14:paraId="12456A32" ... > -> w:r -> w:t
|
# text we need is in <w:p w14:paraId="12456A32" ... > -> w:r -> w:t
|
||||||
paragraphs = soup.find_all("w:p")
|
paragraphs = soup.find_all("w:p")
|
||||||
names = []
|
|
||||||
for para in paragraphs:
|
for para in paragraphs:
|
||||||
para_id = para.get("w14:paraId")
|
para_id = para.get("w14:paraId")
|
||||||
if para_id == "12456A32":
|
if para_id == "12456A32":
|
||||||
|
|||||||
25
src/shared/logging.py
Normal file
25
src/shared/logging.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
import sys
|
||||||
|
|
||||||
|
import loguru
|
||||||
|
|
||||||
|
from src import LOG_DIR
|
||||||
|
|
||||||
|
log = loguru.logger
|
||||||
|
_configured = False
|
||||||
|
|
||||||
|
|
||||||
|
def configure(level: str = "INFO", to_stdout: bool = True, rotate_bytes: str = "1 MB"):
|
||||||
|
global _configured
|
||||||
|
if _configured:
|
||||||
|
return log
|
||||||
|
log.remove()
|
||||||
|
if to_stdout:
|
||||||
|
log.add(sys.stdout, level=level)
|
||||||
|
# application rolling log
|
||||||
|
log.add(
|
||||||
|
f"{LOG_DIR}/application.log",
|
||||||
|
rotation=rotate_bytes,
|
||||||
|
retention="10 days",
|
||||||
|
)
|
||||||
|
_configured = True
|
||||||
|
return log
|
||||||
@@ -3,20 +3,15 @@ import re
|
|||||||
import smtplib
|
import smtplib
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import loguru
|
|
||||||
from PySide6 import QtWidgets
|
from PySide6 import QtWidgets
|
||||||
|
|
||||||
from src import LOG_DIR, Icon
|
from src import Icon
|
||||||
from src import settings as config
|
from src import settings as config
|
||||||
|
from src.shared.logging import log
|
||||||
|
|
||||||
from .dialog_sources.mail_preview_ui import Ui_eMailPreview as MailPreviewDialog
|
from .dialog_sources.mail_preview_ui import Ui_eMailPreview as MailPreviewDialog
|
||||||
from .mailTemplate import MailTemplateDialog
|
from .mailTemplate import MailTemplateDialog
|
||||||
|
|
||||||
log = loguru.logger
|
|
||||||
log.remove()
|
|
||||||
log.add(sys.stdout, level="INFO")
|
|
||||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
|
||||||
|
|
||||||
CSS_RESET = "<style>html,body{margin:0;padding:0}p{margin:0}</style>"
|
CSS_RESET = "<style>html,body{margin:0;padding:0}p{margin:0}</style>"
|
||||||
|
|
||||||
empty_signature = """"""
|
empty_signature = """"""
|
||||||
|
|||||||
@@ -1,20 +1,11 @@
|
|||||||
import sys
|
|
||||||
|
|
||||||
import loguru
|
|
||||||
from PySide6 import QtCore, QtWidgets
|
from PySide6 import QtCore, QtWidgets
|
||||||
|
|
||||||
from src import LOG_DIR
|
|
||||||
from src.backend.catalogue import Catalogue
|
from src.backend.catalogue import Catalogue
|
||||||
from src.backend.database import Database
|
from src.backend.database import Database
|
||||||
from src.ui.dialogs.mail import Mail_Dialog
|
from src.ui.dialogs.mail import Mail_Dialog
|
||||||
|
|
||||||
from .dialog_sources.order_neweditions_ui import Ui_Dialog
|
from .dialog_sources.order_neweditions_ui import Ui_Dialog
|
||||||
|
|
||||||
log = loguru.logger
|
|
||||||
log.remove()
|
|
||||||
log.add(sys.stdout, level="INFO")
|
|
||||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
|
||||||
|
|
||||||
|
|
||||||
class NewEditionDialog(QtWidgets.QDialog, Ui_Dialog):
|
class NewEditionDialog(QtWidgets.QDialog, Ui_Dialog):
|
||||||
def __init__(self, app_id, mail_data):
|
def __init__(self, app_id, mail_data):
|
||||||
@@ -31,10 +22,7 @@ class NewEditionDialog(QtWidgets.QDialog, Ui_Dialog):
|
|||||||
|
|
||||||
def populateTable(self):
|
def populateTable(self):
|
||||||
for book in self.books:
|
for book in self.books:
|
||||||
signature = book.signature
|
# signature not required here; using book.signature directly when needed
|
||||||
# if signature is None or signature == "None" and book.ppn is not None:
|
|
||||||
# signature = self.catalogue.get_signature(f"kid:{book.ppn}")
|
|
||||||
# book.signature = signature
|
|
||||||
link_label = QtWidgets.QLabel()
|
link_label = QtWidgets.QLabel()
|
||||||
link = (
|
link = (
|
||||||
book.link
|
book.link
|
||||||
|
|||||||
@@ -32,6 +32,7 @@ class CheckThread(QtCore.QThread):
|
|||||||
range(len(self.items)),
|
range(len(self.items)),
|
||||||
unit_scale=True,
|
unit_scale=True,
|
||||||
)
|
)
|
||||||
|
swb_client = SWB()
|
||||||
for i in tqdm_object:
|
for i in tqdm_object:
|
||||||
book: BookData = self.items[i]
|
book: BookData = self.items[i]
|
||||||
author = (
|
author = (
|
||||||
@@ -43,7 +44,7 @@ class CheckThread(QtCore.QThread):
|
|||||||
# remove trailing punctuation from title
|
# remove trailing punctuation from title
|
||||||
title = book.title.rstrip(" .:,;!?")
|
title = book.title.rstrip(" .:,;!?")
|
||||||
response: list[BookData] = []
|
response: list[BookData] = []
|
||||||
response = SWB().getBooks(
|
response = swb_client.getBooks(
|
||||||
[
|
[
|
||||||
"pica.bib=20735",
|
"pica.bib=20735",
|
||||||
f"pica.tit={title.split(':')[0].strip()}",
|
f"pica.tit={title.split(':')[0].strip()}",
|
||||||
@@ -88,4 +89,6 @@ class ProgressDialog(QDialog):
|
|||||||
layout.addWidget(self.start_button)
|
layout.addWidget(self.start_button)
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
|
# Start logic is managed externally; keep method for UI wiring
|
||||||
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -1,20 +1,12 @@
|
|||||||
import sys
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
import darkdetect
|
import darkdetect
|
||||||
import loguru
|
|
||||||
from PySide6 import QtCore, QtWidgets
|
from PySide6 import QtCore, QtWidgets
|
||||||
from PySide6.QtCore import QDate
|
from PySide6.QtCore import QDate
|
||||||
from PySide6.QtGui import QColor, QPen
|
from PySide6.QtGui import QColor, QPen
|
||||||
|
|
||||||
from src import LOG_DIR
|
|
||||||
from src.backend import Database
|
from src.backend import Database
|
||||||
|
from src.shared.logging import log
|
||||||
log = loguru.logger
|
|
||||||
log.remove()
|
|
||||||
log.add(sys.stdout, level="INFO")
|
|
||||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
|
||||||
|
|
||||||
|
|
||||||
color = "#ddfb00" if darkdetect.isDark() else "#2204ff"
|
color = "#ddfb00" if darkdetect.isDark() else "#2204ff"
|
||||||
pen = QPen(QColor(color))
|
pen = QPen(QColor(color))
|
||||||
|
|||||||
@@ -1,25 +1,19 @@
|
|||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
|
|
||||||
import loguru
|
|
||||||
from PySide6 import QtCore, QtGui, QtWidgets
|
from PySide6 import QtCore, QtGui, QtWidgets
|
||||||
from PySide6.QtCore import QDate
|
from PySide6.QtCore import QDate
|
||||||
from PySide6.QtGui import QRegularExpressionValidator
|
from PySide6.QtGui import QRegularExpressionValidator
|
||||||
|
|
||||||
from src import LOG_DIR, Icon
|
from src import Icon
|
||||||
from src.backend import Database, recreateElsaFile
|
from src.backend import Database, recreateElsaFile
|
||||||
from src.logic import Prof, Semester, elsa_word_to_csv
|
from src.logic import Prof, Semester, elsa_word_to_csv
|
||||||
|
from src.shared.logging import log
|
||||||
from src.ui.dialogs import ElsaAddEntry, popus_confirm
|
from src.ui.dialogs import ElsaAddEntry, popus_confirm
|
||||||
from src.ui.widgets.filepicker import FilePicker
|
from src.ui.widgets.filepicker import FilePicker
|
||||||
from src.ui.widgets.graph import DataQtGraph
|
from src.ui.widgets.graph import DataQtGraph
|
||||||
|
|
||||||
from .widget_sources.elsa_maindialog_ui import Ui_Dialog
|
from .widget_sources.elsa_maindialog_ui import Ui_Dialog
|
||||||
|
|
||||||
log = loguru.logger
|
|
||||||
log.remove()
|
|
||||||
log.add(sys.stdout, level="INFO")
|
|
||||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
|
||||||
|
|
||||||
|
|
||||||
class ElsaDialog(QtWidgets.QDialog, Ui_Dialog):
|
class ElsaDialog(QtWidgets.QDialog, Ui_Dialog):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
@@ -399,6 +393,7 @@ class ElsaDialog(QtWidgets.QDialog, Ui_Dialog):
|
|||||||
# get the file path of the selected file based on it's row
|
# get the file path of the selected file based on it's row
|
||||||
row = self.dokument_list_elsa.currentRow()
|
row = self.dokument_list_elsa.currentRow()
|
||||||
file = self.dokument_list_elsa.item(row, 3).text()
|
file = self.dokument_list_elsa.item(row, 3).text()
|
||||||
|
file_location = file
|
||||||
if file == "Database":
|
if file == "Database":
|
||||||
filename = self.dokument_list_elsa.item(row, 0).text()
|
filename = self.dokument_list_elsa.item(row, 0).text()
|
||||||
filetype = self.dokument_list_elsa.item(row, 1).text()
|
filetype = self.dokument_list_elsa.item(row, 1).text()
|
||||||
@@ -415,16 +410,17 @@ class ElsaDialog(QtWidgets.QDialog, Ui_Dialog):
|
|||||||
log.debug(
|
log.debug(
|
||||||
f"elsa_id: {elsa_id}, prof: {self.elsa_prof.currentText()}, semester: {self.elsa_semester.text()}, date: {self.elsa_date.text()}"
|
f"elsa_id: {elsa_id}, prof: {self.elsa_prof.currentText()}, semester: {self.elsa_semester.text()}, date: {self.elsa_date.text()}"
|
||||||
)
|
)
|
||||||
self.db.insertElsaFile(
|
if file_location != "Database":
|
||||||
[
|
self.db.insertElsaFile(
|
||||||
{
|
[
|
||||||
"name": file.split("/")[-1],
|
{
|
||||||
"path": file,
|
"name": file.split("/")[-1],
|
||||||
"type": file.split(".")[-1],
|
"path": file,
|
||||||
}
|
"type": file.split(".")[-1],
|
||||||
],
|
}
|
||||||
elsa_id,
|
],
|
||||||
)
|
elsa_id,
|
||||||
|
)
|
||||||
for row in data:
|
for row in data:
|
||||||
if self.seperateEntries.isChecked():
|
if self.seperateEntries.isChecked():
|
||||||
if ";" in row["pages"]:
|
if ";" in row["pages"]:
|
||||||
|
|||||||
@@ -1,20 +1,12 @@
|
|||||||
import random
|
import random
|
||||||
import sys
|
|
||||||
from typing import Any, Union
|
from typing import Any, Union
|
||||||
|
|
||||||
import loguru
|
|
||||||
from PySide6 import QtCore, QtGui, QtWidgets
|
from PySide6 import QtCore, QtGui, QtWidgets
|
||||||
from PySide6.QtCharts import QCategoryAxis, QChart, QChartView, QLineSeries, QValueAxis
|
from PySide6.QtCharts import QCategoryAxis, QChart, QChartView, QLineSeries, QValueAxis
|
||||||
from PySide6.QtGui import QColor, QPainter, QPen
|
from PySide6.QtGui import QColor, QPainter, QPen
|
||||||
|
|
||||||
from src import LOG_DIR
|
|
||||||
from src.logic.semester import Semester
|
from src.logic.semester import Semester
|
||||||
|
|
||||||
log = loguru.logger
|
|
||||||
log.remove()
|
|
||||||
log.add(sys.stdout, level="INFO")
|
|
||||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
|
||||||
|
|
||||||
|
|
||||||
def mergedicts(d1: dict[str, Any], d2: dict[str, Any]):
|
def mergedicts(d1: dict[str, Any], d2: dict[str, Any]):
|
||||||
res: dict[str, Any] = {}
|
res: dict[str, Any] = {}
|
||||||
@@ -182,16 +174,3 @@ class DataQtGraph(QtWidgets.QWidget):
|
|||||||
# split the data back into x and y
|
# split the data back into x and y
|
||||||
data = {"x": list(data.keys()), "y": list(data.values())}
|
data = {"x": list(data.keys()), "y": list(data.values())}
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
import sys
|
|
||||||
|
|
||||||
app = QtWidgets.QApplication(sys.argv)
|
|
||||||
|
|
||||||
graph_data = {"x": ["WiSe 25/26", "WiSe 24/25", "SoSe 25"], "y": [1, 2, 1]}
|
|
||||||
widget = DataGraph(
|
|
||||||
"ELSA Apparate pro Semester", graph_data, True, "Anzahl der Apparate"
|
|
||||||
)
|
|
||||||
widget.show()
|
|
||||||
sys.exit(app.exec())
|
|
||||||
|
|||||||
@@ -1,26 +1,19 @@
|
|||||||
import sys
|
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
import loguru
|
|
||||||
from natsort import natsorted
|
from natsort import natsorted
|
||||||
from PySide6 import QtCore, QtGui, QtWidgets
|
from PySide6 import QtCore, QtGui, QtWidgets
|
||||||
from PySide6.QtCore import Signal
|
from PySide6.QtCore import Signal
|
||||||
|
|
||||||
from src import LOG_DIR
|
|
||||||
from src.backend import Database
|
from src.backend import Database
|
||||||
from src.logic import BookData, Prof, Semester, custom_sort, sort_semesters_list
|
from src.logic import BookData, Prof, Semester, custom_sort, sort_semesters_list
|
||||||
from src.logic.dataclass import Apparat
|
from src.logic.dataclass import Apparat
|
||||||
|
from src.shared.logging import log
|
||||||
from src.ui.dialogs import ApparatExtendDialog, Mail_Dialog, ReminderDialog
|
from src.ui.dialogs import ApparatExtendDialog, Mail_Dialog, ReminderDialog
|
||||||
from src.ui.widgets import DataQtGraph, StatusWidget
|
from src.ui.widgets import DataQtGraph, StatusWidget
|
||||||
from src.ui.widgets.signature_update import UpdaterThread
|
from src.ui.widgets.signature_update import UpdaterThread
|
||||||
|
|
||||||
from .widget_sources.search_statistic_page_ui import Ui_Dialog
|
from .widget_sources.search_statistic_page_ui import Ui_Dialog
|
||||||
|
|
||||||
log = loguru.logger
|
|
||||||
log.remove()
|
|
||||||
log.add(sys.stdout, level="INFO")
|
|
||||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
|
||||||
|
|
||||||
|
|
||||||
class MyComboBox(QtWidgets.QComboBox):
|
class MyComboBox(QtWidgets.QComboBox):
|
||||||
def __init__(self, parent=None):
|
def __init__(self, parent=None):
|
||||||
@@ -477,15 +470,7 @@ class SearchStatisticPage(QtWidgets.QDialog, Ui_Dialog):
|
|||||||
data = []
|
data = []
|
||||||
|
|
||||||
for entry in result:
|
for entry in result:
|
||||||
if "deletable" in params.keys():
|
data.append(entry)
|
||||||
sem = Semester().from_string(
|
|
||||||
entry[8] if entry[8] is not None else entry[5]
|
|
||||||
)
|
|
||||||
log.info(f"Semester: {sem}")
|
|
||||||
if sem.isPastSemester(Semester()):
|
|
||||||
data.append(entry)
|
|
||||||
else:
|
|
||||||
data.append(entry)
|
|
||||||
self.tableWidget.setRowCount(len(data))
|
self.tableWidget.setRowCount(len(data))
|
||||||
if len(data) > 0:
|
if len(data) > 0:
|
||||||
self.btn_del_select_apparats.setEnabled(True)
|
self.btn_del_select_apparats.setEnabled(True)
|
||||||
|
|||||||
@@ -1,28 +1,13 @@
|
|||||||
import sys
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
import loguru
|
|
||||||
from PySide6 import QtCore, QtWidgets
|
from PySide6 import QtCore, QtWidgets
|
||||||
from PySide6.QtMultimedia import QAudioOutput, QMediaPlayer
|
from PySide6.QtMultimedia import QAudioOutput, QMediaPlayer
|
||||||
|
|
||||||
from src import LOG_DIR
|
|
||||||
from src.backend.catalogue import Catalogue
|
from src.backend.catalogue import Catalogue
|
||||||
from src.backend.database import Database
|
from src.backend.database import Database
|
||||||
from src.logic.SRU import SWB
|
from src.logic.SRU import SWB
|
||||||
|
from src.shared.logging import log
|
||||||
|
|
||||||
from .widget_sources.admin_update_signatures_ui import Ui_Dialog
|
from .widget_sources.admin_update_signatures_ui import Ui_Dialog
|
||||||
|
|
||||||
log = loguru.logger
|
|
||||||
log.remove()
|
|
||||||
log.add(sys.stdout, level="INFO")
|
|
||||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
|
||||||
|
|
||||||
log.add(
|
|
||||||
f"{LOG_DIR}/{datetime.now().strftime('%Y-%m-%d')}.log",
|
|
||||||
rotation="1 day",
|
|
||||||
retention="1 month",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class UpdaterThread(QtCore.QThread):
|
class UpdaterThread(QtCore.QThread):
|
||||||
progress = QtCore.Signal(int)
|
progress = QtCore.Signal(int)
|
||||||
|
|||||||
@@ -1,23 +1,17 @@
|
|||||||
import sys
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
import loguru
|
|
||||||
from appdirs import AppDirs
|
from appdirs import AppDirs
|
||||||
from PySide6 import QtCore, QtWidgets
|
from PySide6 import QtCore, QtWidgets
|
||||||
|
|
||||||
from src import LOG_DIR, settings
|
from src import settings
|
||||||
from src.backend import Database
|
from src.backend import Database
|
||||||
|
from src.shared.logging import log
|
||||||
|
|
||||||
from .widget_sources.welcome_wizard_ui import Ui_Wizard
|
from .widget_sources.welcome_wizard_ui import Ui_Wizard
|
||||||
|
|
||||||
appdirs = AppDirs("SemesterApparatsManager", "SAM")
|
appdirs = AppDirs("SemesterApparatsManager", "SAM")
|
||||||
|
|
||||||
log = loguru.logger
|
|
||||||
log.remove()
|
|
||||||
log.add(sys.stdout, level="INFO")
|
|
||||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
|
||||||
|
|
||||||
|
|
||||||
class WelcomeWizard(QtWidgets.QWizard, Ui_Wizard):
|
class WelcomeWizard(QtWidgets.QWizard, Ui_Wizard):
|
||||||
def __init__(self, parent=None):
|
def __init__(self, parent=None):
|
||||||
|
|||||||
@@ -1,27 +1,17 @@
|
|||||||
|
import os
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from os.path import basename
|
||||||
|
|
||||||
from docx import Document
|
from docx import Document
|
||||||
from docx.enum.text import WD_PARAGRAPH_ALIGNMENT
|
from docx.enum.text import WD_PARAGRAPH_ALIGNMENT
|
||||||
from docx.shared import Pt, RGBColor, Cm
|
|
||||||
from docx.oxml import OxmlElement
|
from docx.oxml import OxmlElement
|
||||||
from docx.oxml.ns import qn
|
from docx.oxml.ns import qn
|
||||||
import os
|
from docx.shared import Cm, Pt, RGBColor
|
||||||
from os.path import basename
|
|
||||||
from loguru import logger as log
|
|
||||||
import sys
|
|
||||||
from src import settings
|
|
||||||
|
|
||||||
|
from src import settings
|
||||||
|
from src.shared.logging import log
|
||||||
|
|
||||||
logger = log
|
logger = log
|
||||||
logger.remove()
|
|
||||||
logger.add("logs/application.log", rotation="1 week", retention="1 month", enqueue=True)
|
|
||||||
log.add(
|
|
||||||
f"logs/{datetime.now().strftime('%Y-%m-%d')}.log",
|
|
||||||
rotation="1 day",
|
|
||||||
compression="zip",
|
|
||||||
)
|
|
||||||
|
|
||||||
# logger.add(sys.stderr, format="{time} {level} {message}", level="INFO")
|
|
||||||
logger.add(sys.stdout)
|
|
||||||
|
|
||||||
font = "Cascadia Mono"
|
font = "Cascadia Mono"
|
||||||
|
|
||||||
@@ -29,8 +19,8 @@ font = "Cascadia Mono"
|
|||||||
def print_document(file: str):
|
def print_document(file: str):
|
||||||
# send document to printer as attachment of email
|
# send document to printer as attachment of email
|
||||||
import smtplib
|
import smtplib
|
||||||
from email.mime.multipart import MIMEMultipart
|
|
||||||
from email.mime.application import MIMEApplication
|
from email.mime.application import MIMEApplication
|
||||||
|
from email.mime.multipart import MIMEMultipart
|
||||||
from email.mime.text import MIMEText
|
from email.mime.text import MIMEText
|
||||||
|
|
||||||
smtp = settings.mail.smtp_server
|
smtp = settings.mail.smtp_server
|
||||||
@@ -108,7 +98,7 @@ class SemesterDocument:
|
|||||||
self.filename = filename
|
self.filename = filename
|
||||||
if full:
|
if full:
|
||||||
log.info("Full document generation")
|
log.info("Full document generation")
|
||||||
self.cleanup
|
self.cleanup()
|
||||||
log.info("Cleanup done")
|
log.info("Cleanup done")
|
||||||
self.make_document()
|
self.make_document()
|
||||||
log.info("Document created")
|
log.info("Document created")
|
||||||
@@ -378,4 +368,4 @@ if __name__ == "__main__":
|
|||||||
"Karoß (Gymnastik - Sich Bewegen mit und ohne Handgeräte)",
|
"Karoß (Gymnastik - Sich Bewegen mit und ohne Handgeräte)",
|
||||||
"Sahrai (Kindheit und Gesellschaft)",
|
"Sahrai (Kindheit und Gesellschaft)",
|
||||||
]
|
]
|
||||||
doc = SemapSchilder(entries)
|
doc = SemapSchilder(entries)
|
||||||
|
|||||||
40
test.py
40
test.py
@@ -1,11 +1,33 @@
|
|||||||
olddata = (
|
from src.logic.semester import Semester
|
||||||
None,
|
|
||||||
"Christian",
|
|
||||||
"Berger",
|
|
||||||
"alexander.kirchner@ph-freiburg.de",
|
|
||||||
"764",
|
|
||||||
"Berger Christian",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
sem1 = Semester.from_string("WiSe 23/24")
|
||||||
|
print(sem1.value)
|
||||||
|
sem2 = Semester.from_string("SoSe 24")
|
||||||
|
print(sem2.value)
|
||||||
|
sem3 = Semester()
|
||||||
|
print(sem3.value)
|
||||||
|
|
||||||
print(olddata[1], olddata[2], olddata[3], olddata[4], olddata[5])
|
print("Comparing Sem1 with sem2")
|
||||||
|
assert sem1.isPastSemester(sem2) is True
|
||||||
|
assert sem1.isFutureSemester(sem2) is False
|
||||||
|
assert sem1.isMatch(sem2) is False
|
||||||
|
print("Comparing Sem2 with sem1")
|
||||||
|
assert sem2.isPastSemester(sem1) is False
|
||||||
|
assert sem2.isFutureSemester(sem1) is True
|
||||||
|
assert sem2.isMatch(sem1) is False
|
||||||
|
print("Comparing Sem1 with sem1")
|
||||||
|
assert sem1.isPastSemester(sem1) is False
|
||||||
|
assert sem1.isFutureSemester(sem1) is False
|
||||||
|
assert sem1.isMatch(sem1) is True
|
||||||
|
print("Comparing Sem2 with sem2")
|
||||||
|
assert sem2.isPastSemester(sem2) is False
|
||||||
|
assert sem2.isFutureSemester(sem2) is False
|
||||||
|
assert sem2.isMatch(sem2) is True
|
||||||
|
print("Comparing Sem3 with sem3")
|
||||||
|
assert sem3.isPastSemester(sem3) is False
|
||||||
|
assert sem3.isFutureSemester(sem3) is False
|
||||||
|
assert sem3.isMatch(sem3) is True
|
||||||
|
print("Comparing Sem1 with sem3")
|
||||||
|
assert sem1.isPastSemester(sem3) is True
|
||||||
|
assert sem1.isFutureSemester(sem3) is False
|
||||||
|
assert sem1.isMatch(sem3) is False
|
||||||
|
|||||||
28
uv.lock
generated
28
uv.lock
generated
@@ -165,15 +165,6 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216 },
|
{ url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216 },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "chardet"
|
|
||||||
version = "5.2.0"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/f3/0d/f7b6ab21ec75897ed80c17d79b15951a719226b9fababf1e40ea74d69079/chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7", size = 2069618 }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/38/6f/f5fbc992a329ee4e0f288c1fe0e2ad9485ed064cac731ed2fe47dcc38cbf/chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970", size = 199385 },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "charset-normalizer"
|
name = "charset-normalizer"
|
||||||
version = "3.4.3"
|
version = "3.4.3"
|
||||||
@@ -1159,19 +1150,6 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/b5/75/0bced57e6ba014adeeaa504205c4a92d5211b6c5daa20c0a80b06de6d0f4/pyzotero-1.6.11-py3-none-any.whl", hash = "sha256:949cdff92fd688fe70f609c928f09ab25a7d2aa05f35c575725d5bd0f395d3b4", size = 26368 },
|
{ url = "https://files.pythonhosted.org/packages/b5/75/0bced57e6ba014adeeaa504205c4a92d5211b6c5daa20c0a80b06de6d0f4/pyzotero-1.6.11-py3-none-any.whl", hash = "sha256:949cdff92fd688fe70f609c928f09ab25a7d2aa05f35c575725d5bd0f395d3b4", size = 26368 },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "qtqdm"
|
|
||||||
version = "0.2.0"
|
|
||||||
source = { registry = "https://git.theprivateserver.de/api/packages/WorldTeacher/pypi/simple/" }
|
|
||||||
dependencies = [
|
|
||||||
{ name = "pyside6" },
|
|
||||||
{ name = "tqdm" },
|
|
||||||
]
|
|
||||||
sdist = { url = "https://git.theprivateserver.de/api/packages/WorldTeacher/pypi/files/qtqdm/0.2.0/qtqdm-0.2.0.tar.gz", hash = "sha256:86f9b3764d0ebe32edba050de5aa4fb29e287c025d5197ad17e8e8da02155a88" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://git.theprivateserver.de/api/packages/WorldTeacher/pypi/files/qtqdm/0.2.0/qtqdm-0.2.0-py3-none-any.whl", hash = "sha256:9a76e4086b09edb698861de0b28663e12ddda34ddb039be607bfd27a3aa07a0f" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "questionary"
|
name = "questionary"
|
||||||
version = "2.1.0"
|
version = "2.1.0"
|
||||||
@@ -1240,7 +1218,7 @@ dependencies = [
|
|||||||
{ name = "appdirs" },
|
{ name = "appdirs" },
|
||||||
{ name = "beautifulsoup4" },
|
{ name = "beautifulsoup4" },
|
||||||
{ name = "bump-my-version" },
|
{ name = "bump-my-version" },
|
||||||
{ name = "chardet" },
|
{ name = "charset-normalizer" },
|
||||||
{ name = "comtypes" },
|
{ name = "comtypes" },
|
||||||
{ name = "darkdetect" },
|
{ name = "darkdetect" },
|
||||||
{ name = "docx2pdf" },
|
{ name = "docx2pdf" },
|
||||||
@@ -1258,7 +1236,6 @@ dependencies = [
|
|||||||
{ name = "pyside6" },
|
{ name = "pyside6" },
|
||||||
{ name = "python-docx" },
|
{ name = "python-docx" },
|
||||||
{ name = "pyzotero" },
|
{ name = "pyzotero" },
|
||||||
{ name = "qtqdm" },
|
|
||||||
{ name = "ratelimit" },
|
{ name = "ratelimit" },
|
||||||
{ name = "requests" },
|
{ name = "requests" },
|
||||||
]
|
]
|
||||||
@@ -1278,7 +1255,7 @@ requires-dist = [
|
|||||||
{ name = "appdirs", specifier = ">=1.4.4" },
|
{ name = "appdirs", specifier = ">=1.4.4" },
|
||||||
{ name = "beautifulsoup4", specifier = ">=4.13.5" },
|
{ name = "beautifulsoup4", specifier = ">=4.13.5" },
|
||||||
{ name = "bump-my-version", specifier = ">=0.29.0" },
|
{ name = "bump-my-version", specifier = ">=0.29.0" },
|
||||||
{ name = "chardet", specifier = ">=5.2.0" },
|
{ name = "charset-normalizer", specifier = ">=3.4.3" },
|
||||||
{ name = "comtypes", specifier = ">=1.4.9" },
|
{ name = "comtypes", specifier = ">=1.4.9" },
|
||||||
{ name = "darkdetect", specifier = ">=0.8.0" },
|
{ name = "darkdetect", specifier = ">=0.8.0" },
|
||||||
{ name = "docx2pdf", specifier = ">=0.1.8" },
|
{ name = "docx2pdf", specifier = ">=0.1.8" },
|
||||||
@@ -1296,7 +1273,6 @@ requires-dist = [
|
|||||||
{ name = "pyside6", specifier = ">=6.9.1" },
|
{ name = "pyside6", specifier = ">=6.9.1" },
|
||||||
{ name = "python-docx", specifier = ">=1.1.2" },
|
{ name = "python-docx", specifier = ">=1.1.2" },
|
||||||
{ name = "pyzotero", specifier = ">=1.6.4" },
|
{ name = "pyzotero", specifier = ">=1.6.4" },
|
||||||
{ name = "qtqdm" },
|
|
||||||
{ name = "ratelimit", specifier = ">=2.2.1" },
|
{ name = "ratelimit", specifier = ">=2.2.1" },
|
||||||
{ name = "requests", specifier = ">=2.32.3" },
|
{ name = "requests", specifier = ">=2.32.3" },
|
||||||
]
|
]
|
||||||
|
|||||||
Reference in New Issue
Block a user