more AI optimizations, reworked logger
This commit is contained in:
11
main.py
11
main.py
@@ -1,11 +1,14 @@
|
||||
from src import first_launch, settings
|
||||
from src.ui.widgets.welcome_wizard import launch_wizard as startup
|
||||
from PySide6 import QtWidgets
|
||||
import sys
|
||||
from src.ui.userInterface import launch_gui as UI
|
||||
|
||||
from PySide6 import QtWidgets
|
||||
|
||||
from src import first_launch, settings
|
||||
from src.shared.logging import configure
|
||||
from src.ui.userInterface import launch_gui as UI
|
||||
from src.ui.widgets.welcome_wizard import launch_wizard as startup
|
||||
|
||||
if __name__ == "__main__":
|
||||
configure("INFO")
|
||||
app = QtWidgets.QApplication(sys.argv)
|
||||
if not first_launch:
|
||||
setup = startup()
|
||||
|
||||
@@ -1,28 +1,13 @@
|
||||
import sys
|
||||
from datetime import datetime
|
||||
|
||||
import loguru
|
||||
import regex
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from src import LOG_DIR
|
||||
from src.logic import BookData as Book
|
||||
from src.shared.logging import log
|
||||
|
||||
URL = "https://rds.ibs-bw.de/phfreiburg/opac/RDSIndex/Search?type0%5B%5D=allfields&lookfor0%5B%5D={}&join=AND&bool0%5B%5D=AND&type0%5B%5D=au&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=ti&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=ct&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=isn&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=ta&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=co&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=py&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=pp&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=pu&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=si&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=zr&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND&type0%5B%5D=cc&lookfor0%5B%5D=&join=AND&bool0%5B%5D=AND"
|
||||
BASE = "https://rds.ibs-bw.de"
|
||||
|
||||
log = loguru.logger
|
||||
log.remove()
|
||||
log.add(sys.stdout, level="INFO")
|
||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
||||
|
||||
log.add(
|
||||
f"{LOG_DIR}/{datetime.now().strftime('%Y-%m-%d')}.log",
|
||||
rotation="1 day",
|
||||
retention="1 month",
|
||||
)
|
||||
|
||||
|
||||
class Catalogue:
|
||||
def __init__(self, timeout=15):
|
||||
|
||||
@@ -3,7 +3,6 @@ import json
|
||||
import os
|
||||
import re
|
||||
import sqlite3 as sql
|
||||
import sys
|
||||
import tempfile
|
||||
from dataclasses import asdict
|
||||
from pathlib import Path
|
||||
@@ -13,7 +12,7 @@ from typing import Any, List, Optional, Tuple, Union
|
||||
|
||||
import loguru
|
||||
|
||||
from src import DATABASE_DIR, LOG_DIR, settings
|
||||
from src import DATABASE_DIR, settings
|
||||
from src.backend.db import (
|
||||
CREATE_ELSA_FILES_TABLE,
|
||||
CREATE_ELSA_MEDIA_TABLE,
|
||||
@@ -34,9 +33,6 @@ from src.logic.semester import Semester
|
||||
from src.utils.blob import create_blob
|
||||
|
||||
log = loguru.logger
|
||||
log.remove()
|
||||
log.add(sys.stdout, level="INFO")
|
||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
||||
|
||||
|
||||
ascii_lowercase = lower + digits + punctuation
|
||||
@@ -186,7 +182,13 @@ class Database:
|
||||
Returns:
|
||||
sql.Connection: The active connection to the database
|
||||
"""
|
||||
return sql.connect(self.db_path)
|
||||
conn = sql.connect(self.db_path)
|
||||
# Fast pragmas suitable for a desktop app DB
|
||||
conn.execute("PRAGMA journal_mode=WAL;")
|
||||
conn.execute("PRAGMA synchronous=NORMAL;")
|
||||
conn.execute("PRAGMA temp_store=MEMORY;")
|
||||
conn.execute("PRAGMA mmap_size=134217728;") # 128MB
|
||||
return conn
|
||||
|
||||
def close_connection(self, conn: sql.Connection):
|
||||
"""
|
||||
@@ -214,6 +216,25 @@ class Database:
|
||||
cursor.execute(CREATE_ELSA_TABLE)
|
||||
cursor.execute(CREATE_ELSA_FILES_TABLE)
|
||||
cursor.execute(CREATE_ELSA_MEDIA_TABLE)
|
||||
# Helpful indices to speed up frequent lookups and joins
|
||||
cursor.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_media_app_prof ON media(app_id, prof_id);"
|
||||
)
|
||||
cursor.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_media_deleted ON media(deleted);"
|
||||
)
|
||||
cursor.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_media_available ON media(available);"
|
||||
)
|
||||
cursor.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_messages_remind_at ON messages(remind_at);"
|
||||
)
|
||||
cursor.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_semesterapparat_prof ON semesterapparat(prof_id);"
|
||||
)
|
||||
cursor.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_semesterapparat_appnr ON semesterapparat(appnr);"
|
||||
)
|
||||
conn.commit()
|
||||
self.close_connection(conn)
|
||||
|
||||
@@ -227,7 +248,7 @@ class Database:
|
||||
"""
|
||||
conn = self.connect()
|
||||
cursor = conn.cursor()
|
||||
log.debug(f"Inserting {params} into database with query {query}")
|
||||
log.debug(f"Inserting into DB: {query}")
|
||||
cursor.execute(query, params)
|
||||
conn.commit()
|
||||
self.close_connection(conn)
|
||||
@@ -1650,7 +1671,7 @@ class Database:
|
||||
tempdir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
file = tempfile.NamedTemporaryFile(
|
||||
delete=False, dir=tempdir_path, mode="wb", suffix=f".{filetype}"
|
||||
delete=False, dir=tempdir, mode="wb", suffix=f".{filetype}"
|
||||
)
|
||||
file.write(blob)
|
||||
# log.debug("file created")
|
||||
@@ -1713,9 +1734,9 @@ class Database:
|
||||
telnr = profdata.telnr
|
||||
title = profdata.title
|
||||
|
||||
query = f"INSERT INTO prof (fname, lname, fullname, mail, telnr,titel) VALUES ('{fname}','{lname}','{fullname}','{mail}','{telnr}','{title}')"
|
||||
query = "INSERT INTO prof (fname, lname, fullname, mail, telnr, titel) VALUES (?,?,?,?,?,?)"
|
||||
log.debug(query)
|
||||
cursor.execute(query)
|
||||
cursor.execute(query, (fname, lname, fullname, mail, telnr, title))
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
@@ -1758,10 +1779,10 @@ class Database:
|
||||
fullname = profdata["profname"]
|
||||
else:
|
||||
fullname = profdata.name()
|
||||
query = f"SELECT id FROM prof WHERE fullname = '{fullname}'"
|
||||
query = "SELECT id FROM prof WHERE fullname = ?"
|
||||
log.debug(query)
|
||||
|
||||
cursor.execute(query)
|
||||
cursor.execute(query, (fullname,))
|
||||
result = cursor.fetchone()
|
||||
if result:
|
||||
return result[0]
|
||||
@@ -1776,10 +1797,10 @@ class Database:
|
||||
"""
|
||||
conn = self.connect()
|
||||
cursor = conn.cursor()
|
||||
query = f"SELECT * FROM prof WHERE fullname = '{fullname}'"
|
||||
query = "SELECT * FROM prof WHERE fullname = ?"
|
||||
log.debug(query)
|
||||
|
||||
result = cursor.execute(query).fetchone()
|
||||
result = cursor.execute(query, (fullname,)).fetchone()
|
||||
if result:
|
||||
return Prof().from_tuple(result)
|
||||
else:
|
||||
@@ -1795,8 +1816,8 @@ class Database:
|
||||
|
||||
int | None: The id of the prof or None if not found
|
||||
"""
|
||||
query = f"SELECT prof_id from semesterapparat WHERE appnr = '{apprarat_id}' and deletion_status = 0"
|
||||
data = self.query_db(query)
|
||||
query = "SELECT prof_id from semesterapparat WHERE appnr = ? and deletion_status = 0"
|
||||
data = self.query_db(query, (apprarat_id,))
|
||||
if data:
|
||||
log.info("Prof ID: " + str(data[0][0]))
|
||||
return data[0][0]
|
||||
@@ -1807,20 +1828,13 @@ class Database:
|
||||
# get book data
|
||||
new_apparat_id = apparat
|
||||
new_prof_id = self.getProfIDByApparat(new_apparat_id)
|
||||
query = f"""
|
||||
INSERT INTO media (bookdata, app_id, prof_id, deleted, available, reservation)
|
||||
SELECT
|
||||
bookdata,
|
||||
'{new_apparat_id}',
|
||||
'{new_prof_id}',
|
||||
0,
|
||||
available,
|
||||
reservation
|
||||
FROM media
|
||||
where id = '{book_id}'"""
|
||||
query = (
|
||||
"INSERT INTO media (bookdata, app_id, prof_id, deleted, available, reservation) "
|
||||
"SELECT bookdata, ?, ?, 0, available, reservation FROM media WHERE id = ?"
|
||||
)
|
||||
connection = self.connect()
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(query)
|
||||
cursor.execute(query, (new_apparat_id, new_prof_id, book_id))
|
||||
connection.commit()
|
||||
connection.close()
|
||||
|
||||
@@ -1832,16 +1846,18 @@ class Database:
|
||||
appratat (int): the ID of the new apparat
|
||||
"""
|
||||
# get book data
|
||||
query = f"UPDATE media SET app_id = '{appratat}' WHERE id = '{book_id}'"
|
||||
query = "UPDATE media SET app_id = ? WHERE id = ?"
|
||||
connection = self.connect()
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(query)
|
||||
cursor.execute(query, (appratat, book_id))
|
||||
connection.commit()
|
||||
connection.close()
|
||||
|
||||
def getApparatNameByAppNr(self, appnr: int):
|
||||
query = f"SELECT name FROM semesterapparat WHERE appnr = '{appnr}' and deletion_status = 0"
|
||||
data = self.query_db(query)
|
||||
query = (
|
||||
"SELECT name FROM semesterapparat WHERE appnr = ? and deletion_status = 0"
|
||||
)
|
||||
data = self.query_db(query, (appnr,))
|
||||
if data:
|
||||
return data[0][0]
|
||||
else:
|
||||
@@ -1856,8 +1872,8 @@ class Database:
|
||||
return result
|
||||
|
||||
def getBookIdByPPN(self, ppn: str) -> int:
|
||||
query = f"SELECT id FROM media WHERE bookdata LIKE '%{ppn}%'"
|
||||
data = self.query_db(query)
|
||||
query = "SELECT id FROM media WHERE bookdata LIKE ?"
|
||||
data = self.query_db(query, (f"%{ppn}%",))
|
||||
if data:
|
||||
return data[0][0]
|
||||
else:
|
||||
@@ -1876,9 +1892,7 @@ class Database:
|
||||
results = self.query_db(query, (apparat_id,))
|
||||
res = []
|
||||
for result in results:
|
||||
old_edition_edition = self.query_db(
|
||||
"SELECT bookdata FROM media WHERE id=?", (result[2],), one=True
|
||||
)
|
||||
# keep only new edition payload; old edition can be reconstructed if needed
|
||||
res.append(BookData().from_string(result[1]))
|
||||
return res
|
||||
|
||||
|
||||
@@ -1,20 +1,10 @@
|
||||
import sys
|
||||
|
||||
import loguru
|
||||
from PySide6.QtCore import QThread, Signal
|
||||
|
||||
from src import LOG_DIR
|
||||
from src.backend import Database
|
||||
from src.logic.webrequest import BibTextTransformer, WebRequest
|
||||
from src.shared.logging import log
|
||||
|
||||
log = loguru.logger
|
||||
log.remove()
|
||||
log.add(sys.stdout, level="INFO")
|
||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
||||
|
||||
|
||||
# logger.add(sys.stderr, format="{time} {level} {message}", level="INFO")
|
||||
log.add(sys.stdout, level="INFO")
|
||||
# Logger configured centrally in main; this module just uses `log`
|
||||
|
||||
|
||||
class BookGrabber(QThread):
|
||||
@@ -37,7 +27,6 @@ class BookGrabber(QThread):
|
||||
self.request = WebRequest()
|
||||
self.db = Database()
|
||||
|
||||
|
||||
def add_values(
|
||||
self, app_id: int, prof_id: int, mode: str, data, any_book=False, exact=False
|
||||
):
|
||||
@@ -50,7 +39,9 @@ class BookGrabber(QThread):
|
||||
log.info(f"Working on {len(self.data)} entries")
|
||||
self.tstate = (self.app_nr, self.prof_id, self.mode, self.data)
|
||||
log.debug("State: " + str(self.tstate))
|
||||
app_nr = self.db.query_db("SELECT appnr FROM semesterapparat WHERE id = ?", (self.app_id,))[0][0]
|
||||
app_nr = self.db.query_db(
|
||||
"SELECT appnr FROM semesterapparat WHERE id = ?", (self.app_id,)
|
||||
)[0][0]
|
||||
self.request.set_apparat(app_nr)
|
||||
# log.debug(self.tstate)
|
||||
|
||||
|
||||
@@ -1,37 +1,24 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from datetime import datetime
|
||||
from math import ceil
|
||||
from queue import Empty, Queue
|
||||
from time import monotonic # <-- NEW
|
||||
from typing import List, Optional
|
||||
|
||||
import loguru
|
||||
from PySide6.QtCore import QThread, Signal
|
||||
|
||||
from src import LOG_DIR
|
||||
|
||||
# from src.logic.webrequest import BibTextTransformer, WebRequest
|
||||
from src.backend.catalogue import Catalogue
|
||||
from src.logic import BookData
|
||||
from src.logic.SRU import SWB
|
||||
from src.shared.logging import log
|
||||
|
||||
# use all available cores - 2, but at least 1
|
||||
THREAD_COUNT = max(os.cpu_count() - 2, 1)
|
||||
THREAD_MIN_ITEMS = 5
|
||||
|
||||
log = loguru.logger
|
||||
log.remove()
|
||||
log.add(sys.stdout, level="INFO")
|
||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
||||
|
||||
log.add(
|
||||
f"{LOG_DIR}/{datetime.now().strftime('%Y-%m-%d')}.log",
|
||||
rotation="1 day",
|
||||
retention="7 days",
|
||||
)
|
||||
# Logger configured centrally in main; use shared `log`
|
||||
|
||||
swb = SWB()
|
||||
dnb = SWB()
|
||||
@@ -146,7 +133,7 @@ def find_newer_edition(
|
||||
if not deduped:
|
||||
return None
|
||||
|
||||
# 3) Final pick (single best)
|
||||
# 3) Preserve all qualifying newer editions, but order by preference
|
||||
def sort_key(b: BookData):
|
||||
year = b.year if b.year is not None else -1
|
||||
ed = b.edition_number if b.edition_number is not None else -1
|
||||
@@ -158,8 +145,8 @@ def find_newer_edition(
|
||||
ed,
|
||||
)
|
||||
|
||||
best = max(deduped, key=sort_key)
|
||||
return [best] if best else None
|
||||
deduped.sort(key=sort_key, reverse=True)
|
||||
return deduped
|
||||
|
||||
|
||||
class NewEditionCheckerThread(QThread):
|
||||
|
||||
@@ -1,20 +1,10 @@
|
||||
import sys
|
||||
|
||||
# from src.transformers import RDS_AVAIL_DATA
|
||||
import loguru
|
||||
|
||||
# from icecream import ic
|
||||
from PySide6.QtCore import QThread
|
||||
from PySide6.QtCore import Signal as Signal
|
||||
|
||||
from src import LOG_DIR
|
||||
from src.backend.database import Database
|
||||
from src.logic.webrequest import BibTextTransformer, WebRequest
|
||||
|
||||
log = loguru.logger
|
||||
log.remove()
|
||||
log.add(sys.stdout, level="INFO")
|
||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
||||
from src.shared.logging import log
|
||||
|
||||
|
||||
class AvailChecker(QThread):
|
||||
|
||||
@@ -1,28 +1,17 @@
|
||||
import re
|
||||
import sys
|
||||
import xml.etree.ElementTree as ET
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from typing import Dict, Iterable, List, Optional, Tuple
|
||||
from typing import Dict, Iterable, List, Optional, Tuple, Union
|
||||
|
||||
import loguru
|
||||
import requests
|
||||
from requests.adapters import HTTPAdapter
|
||||
|
||||
from src import LOG_DIR
|
||||
# centralized logging used via src.shared.logging
|
||||
from src.logic.dataclass import BookData
|
||||
from src.shared.logging import log
|
||||
|
||||
log = loguru.logger
|
||||
log.remove()
|
||||
log.add(sys.stdout, level="INFO")
|
||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
||||
|
||||
log.add(
|
||||
f"{LOG_DIR}/{datetime.now().strftime('%Y-%m-%d')}.log",
|
||||
rotation="1 day",
|
||||
retention="1 month",
|
||||
)
|
||||
log # ensure imported logger is referenced
|
||||
|
||||
|
||||
# -----------------------
|
||||
@@ -186,7 +175,9 @@ def parse_echoed_request(root: ET.Element) -> Optional[EchoedSearchRequest]:
|
||||
)
|
||||
|
||||
|
||||
def parse_search_retrieve_response(xml_str: str) -> SearchRetrieveResponse:
|
||||
def parse_search_retrieve_response(
|
||||
xml_str: Union[str, bytes],
|
||||
) -> SearchRetrieveResponse:
|
||||
root = ET.fromstring(xml_str)
|
||||
|
||||
# Root is zs:searchRetrieveResponse
|
||||
@@ -598,12 +589,12 @@ class Api:
|
||||
"Accept-Charset": "latin1,utf-8;q=0.7,*;q=0.3",
|
||||
}
|
||||
# Use persistent session and set timeouts to avoid hanging
|
||||
response = self._session.get(url, headers=headers, timeout=(3.05, 20))
|
||||
if response.status_code != 200:
|
||||
raise Exception(f"Error fetching data from SWB: {response.status_code}")
|
||||
# extract top-level response (decode to text for the XML parser)
|
||||
response = parse_search_retrieve_response(response.text)
|
||||
return response.records
|
||||
resp = self._session.get(url, headers=headers, timeout=(3.05, 60))
|
||||
if resp.status_code != 200:
|
||||
raise Exception(f"Error fetching data from SWB: {resp.status_code}")
|
||||
# Parse using raw bytes (original behavior) to preserve encoding edge cases
|
||||
sr = parse_search_retrieve_response(resp.content)
|
||||
return sr.records
|
||||
|
||||
def getBooks(self, query_args: Iterable[str]) -> List[BookData]:
|
||||
records: List[Record] = self.get(query_args)
|
||||
|
||||
@@ -18,16 +18,8 @@ from __future__ import annotations
|
||||
|
||||
import datetime
|
||||
import re
|
||||
import sys
|
||||
|
||||
import loguru
|
||||
|
||||
from src import LOG_DIR
|
||||
|
||||
log = loguru.logger
|
||||
log.remove()
|
||||
log.add(sys.stdout, level="INFO")
|
||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
||||
from src.shared.logging import log
|
||||
|
||||
|
||||
class Semester:
|
||||
@@ -124,21 +116,22 @@ class Semester:
|
||||
# ------------------------------------------------------------------
|
||||
# Comparison helpers
|
||||
# ------------------------------------------------------------------
|
||||
def isPastSemester(self, other: "Semester") -> bool:
|
||||
if self.year < other.year:
|
||||
def isPastSemester(self, current: "Semester") -> bool:
|
||||
log.debug(f"Comparing {self} < {current}")
|
||||
if self.year < current.year:
|
||||
return True
|
||||
if self.year == other.year:
|
||||
if self.year == current.year:
|
||||
return (
|
||||
self.semester == "WiSe" and other.semester == "SoSe"
|
||||
self.semester == "WiSe" and current.semester == "SoSe"
|
||||
) # WiSe before next SoSe
|
||||
return False
|
||||
|
||||
def isFutureSemester(self, other: "Semester") -> bool:
|
||||
if self.year > other.year:
|
||||
def isFutureSemester(self, current: "Semester") -> bool:
|
||||
if self.year > current.year:
|
||||
return True
|
||||
if self.year == other.year:
|
||||
if self.year == current.year:
|
||||
return (
|
||||
self.semester == "SoSe" and other.semester == "WiSe"
|
||||
self.semester == "SoSe" and current.semester == "WiSe"
|
||||
) # SoSe after WiSe of same year
|
||||
return False
|
||||
|
||||
|
||||
@@ -1,23 +1,16 @@
|
||||
import sys
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
import loguru
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
# import sleep_and_retry decorator to retry requests
|
||||
from ratelimit import limits, sleep_and_retry
|
||||
|
||||
from src import LOG_DIR
|
||||
from src.logic.dataclass import BookData
|
||||
from src.shared.logging import log
|
||||
from src.transformers import ARRAYData, BibTeXData, COinSData, RDSData, RISData
|
||||
from src.transformers.transformers import RDS_AVAIL_DATA, RDS_GENERIC_DATA
|
||||
|
||||
log = loguru.logger
|
||||
log.remove()
|
||||
log.add(sys.stdout, level="INFO")
|
||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
||||
|
||||
# logger.add(sys.stderr, format="{time} {level} {message}", level="INFO")
|
||||
|
||||
|
||||
|
||||
@@ -1,20 +1,13 @@
|
||||
import sys
|
||||
import zipfile
|
||||
from typing import Any
|
||||
|
||||
import fitz # PyMuPDF
|
||||
import loguru
|
||||
import pandas as pd
|
||||
from bs4 import BeautifulSoup
|
||||
from docx import Document
|
||||
|
||||
from src import LOG_DIR
|
||||
from src.logic.dataclass import Book, SemapDocument
|
||||
|
||||
log = loguru.logger
|
||||
log.remove()
|
||||
log.add(sys.stdout, level="INFO")
|
||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
||||
from src.shared.logging import log
|
||||
|
||||
|
||||
def word_docx_to_csv(path: str) -> list[pd.DataFrame]:
|
||||
@@ -50,7 +43,6 @@ def get_fach(path: str) -> str:
|
||||
soup = BeautifulSoup(xml_data, "xml")
|
||||
# text we need is in <w:p w14:paraId="12456A32" ... > -> w:r -> w:t
|
||||
paragraphs = soup.find_all("w:p")
|
||||
names = []
|
||||
for para in paragraphs:
|
||||
para_id = para.get("w14:paraId")
|
||||
if para_id == "12456A32":
|
||||
|
||||
25
src/shared/logging.py
Normal file
25
src/shared/logging.py
Normal file
@@ -0,0 +1,25 @@
|
||||
import sys
|
||||
|
||||
import loguru
|
||||
|
||||
from src import LOG_DIR
|
||||
|
||||
log = loguru.logger
|
||||
_configured = False
|
||||
|
||||
|
||||
def configure(level: str = "INFO", to_stdout: bool = True, rotate_bytes: str = "1 MB"):
|
||||
global _configured
|
||||
if _configured:
|
||||
return log
|
||||
log.remove()
|
||||
if to_stdout:
|
||||
log.add(sys.stdout, level=level)
|
||||
# application rolling log
|
||||
log.add(
|
||||
f"{LOG_DIR}/application.log",
|
||||
rotation=rotate_bytes,
|
||||
retention="10 days",
|
||||
)
|
||||
_configured = True
|
||||
return log
|
||||
@@ -3,20 +3,15 @@ import re
|
||||
import smtplib
|
||||
import sys
|
||||
|
||||
import loguru
|
||||
from PySide6 import QtWidgets
|
||||
|
||||
from src import LOG_DIR, Icon
|
||||
from src import Icon
|
||||
from src import settings as config
|
||||
from src.shared.logging import log
|
||||
|
||||
from .dialog_sources.mail_preview_ui import Ui_eMailPreview as MailPreviewDialog
|
||||
from .mailTemplate import MailTemplateDialog
|
||||
|
||||
log = loguru.logger
|
||||
log.remove()
|
||||
log.add(sys.stdout, level="INFO")
|
||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
||||
|
||||
CSS_RESET = "<style>html,body{margin:0;padding:0}p{margin:0}</style>"
|
||||
|
||||
empty_signature = """"""
|
||||
|
||||
@@ -1,20 +1,11 @@
|
||||
import sys
|
||||
|
||||
import loguru
|
||||
from PySide6 import QtCore, QtWidgets
|
||||
|
||||
from src import LOG_DIR
|
||||
from src.backend.catalogue import Catalogue
|
||||
from src.backend.database import Database
|
||||
from src.ui.dialogs.mail import Mail_Dialog
|
||||
|
||||
from .dialog_sources.order_neweditions_ui import Ui_Dialog
|
||||
|
||||
log = loguru.logger
|
||||
log.remove()
|
||||
log.add(sys.stdout, level="INFO")
|
||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
||||
|
||||
|
||||
class NewEditionDialog(QtWidgets.QDialog, Ui_Dialog):
|
||||
def __init__(self, app_id, mail_data):
|
||||
@@ -31,10 +22,7 @@ class NewEditionDialog(QtWidgets.QDialog, Ui_Dialog):
|
||||
|
||||
def populateTable(self):
|
||||
for book in self.books:
|
||||
signature = book.signature
|
||||
# if signature is None or signature == "None" and book.ppn is not None:
|
||||
# signature = self.catalogue.get_signature(f"kid:{book.ppn}")
|
||||
# book.signature = signature
|
||||
# signature not required here; using book.signature directly when needed
|
||||
link_label = QtWidgets.QLabel()
|
||||
link = (
|
||||
book.link
|
||||
|
||||
@@ -1,20 +1,12 @@
|
||||
import sys
|
||||
from typing import Any
|
||||
|
||||
import darkdetect
|
||||
import loguru
|
||||
from PySide6 import QtCore, QtWidgets
|
||||
from PySide6.QtCore import QDate
|
||||
from PySide6.QtGui import QColor, QPen
|
||||
|
||||
from src import LOG_DIR
|
||||
from src.backend import Database
|
||||
|
||||
log = loguru.logger
|
||||
log.remove()
|
||||
log.add(sys.stdout, level="INFO")
|
||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
||||
|
||||
from src.shared.logging import log
|
||||
|
||||
color = "#ddfb00" if darkdetect.isDark() else "#2204ff"
|
||||
pen = QPen(QColor(color))
|
||||
|
||||
@@ -1,25 +1,19 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
import loguru
|
||||
from PySide6 import QtCore, QtGui, QtWidgets
|
||||
from PySide6.QtCore import QDate
|
||||
from PySide6.QtGui import QRegularExpressionValidator
|
||||
|
||||
from src import LOG_DIR, Icon
|
||||
from src import Icon
|
||||
from src.backend import Database, recreateElsaFile
|
||||
from src.logic import Prof, Semester, elsa_word_to_csv
|
||||
from src.shared.logging import log
|
||||
from src.ui.dialogs import ElsaAddEntry, popus_confirm
|
||||
from src.ui.widgets.filepicker import FilePicker
|
||||
from src.ui.widgets.graph import DataQtGraph
|
||||
|
||||
from .widget_sources.elsa_maindialog_ui import Ui_Dialog
|
||||
|
||||
log = loguru.logger
|
||||
log.remove()
|
||||
log.add(sys.stdout, level="INFO")
|
||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
||||
|
||||
|
||||
class ElsaDialog(QtWidgets.QDialog, Ui_Dialog):
|
||||
def __init__(self):
|
||||
@@ -399,6 +393,7 @@ class ElsaDialog(QtWidgets.QDialog, Ui_Dialog):
|
||||
# get the file path of the selected file based on it's row
|
||||
row = self.dokument_list_elsa.currentRow()
|
||||
file = self.dokument_list_elsa.item(row, 3).text()
|
||||
file_location = file
|
||||
if file == "Database":
|
||||
filename = self.dokument_list_elsa.item(row, 0).text()
|
||||
filetype = self.dokument_list_elsa.item(row, 1).text()
|
||||
@@ -415,6 +410,7 @@ class ElsaDialog(QtWidgets.QDialog, Ui_Dialog):
|
||||
log.debug(
|
||||
f"elsa_id: {elsa_id}, prof: {self.elsa_prof.currentText()}, semester: {self.elsa_semester.text()}, date: {self.elsa_date.text()}"
|
||||
)
|
||||
if file_location != "Database":
|
||||
self.db.insertElsaFile(
|
||||
[
|
||||
{
|
||||
|
||||
@@ -1,20 +1,12 @@
|
||||
import random
|
||||
import sys
|
||||
from typing import Any, Union
|
||||
|
||||
import loguru
|
||||
from PySide6 import QtCore, QtGui, QtWidgets
|
||||
from PySide6.QtCharts import QCategoryAxis, QChart, QChartView, QLineSeries, QValueAxis
|
||||
from PySide6.QtGui import QColor, QPainter, QPen
|
||||
|
||||
from src import LOG_DIR
|
||||
from src.logic.semester import Semester
|
||||
|
||||
log = loguru.logger
|
||||
log.remove()
|
||||
log.add(sys.stdout, level="INFO")
|
||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
||||
|
||||
|
||||
def mergedicts(d1: dict[str, Any], d2: dict[str, Any]):
|
||||
res: dict[str, Any] = {}
|
||||
@@ -182,16 +174,3 @@ class DataQtGraph(QtWidgets.QWidget):
|
||||
# split the data back into x and y
|
||||
data = {"x": list(data.keys()), "y": list(data.values())}
|
||||
return data
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
|
||||
app = QtWidgets.QApplication(sys.argv)
|
||||
|
||||
graph_data = {"x": ["WiSe 25/26", "WiSe 24/25", "SoSe 25"], "y": [1, 2, 1]}
|
||||
widget = DataGraph(
|
||||
"ELSA Apparate pro Semester", graph_data, True, "Anzahl der Apparate"
|
||||
)
|
||||
widget.show()
|
||||
sys.exit(app.exec())
|
||||
|
||||
@@ -1,26 +1,19 @@
|
||||
import sys
|
||||
from typing import List
|
||||
|
||||
import loguru
|
||||
from natsort import natsorted
|
||||
from PySide6 import QtCore, QtGui, QtWidgets
|
||||
from PySide6.QtCore import Signal
|
||||
|
||||
from src import LOG_DIR
|
||||
from src.backend import Database
|
||||
from src.logic import BookData, Prof, Semester, custom_sort, sort_semesters_list
|
||||
from src.logic.dataclass import Apparat
|
||||
from src.shared.logging import log
|
||||
from src.ui.dialogs import ApparatExtendDialog, Mail_Dialog, ReminderDialog
|
||||
from src.ui.widgets import DataQtGraph, StatusWidget
|
||||
from src.ui.widgets.signature_update import UpdaterThread
|
||||
|
||||
from .widget_sources.search_statistic_page_ui import Ui_Dialog
|
||||
|
||||
log = loguru.logger
|
||||
log.remove()
|
||||
log.add(sys.stdout, level="INFO")
|
||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
||||
|
||||
|
||||
class MyComboBox(QtWidgets.QComboBox):
|
||||
def __init__(self, parent=None):
|
||||
@@ -477,14 +470,6 @@ class SearchStatisticPage(QtWidgets.QDialog, Ui_Dialog):
|
||||
data = []
|
||||
|
||||
for entry in result:
|
||||
if "deletable" in params.keys():
|
||||
sem = Semester().from_string(
|
||||
entry[8] if entry[8] is not None else entry[5]
|
||||
)
|
||||
log.info(f"Semester: {sem}")
|
||||
if sem.isPastSemester(Semester()):
|
||||
data.append(entry)
|
||||
else:
|
||||
data.append(entry)
|
||||
self.tableWidget.setRowCount(len(data))
|
||||
if len(data) > 0:
|
||||
|
||||
@@ -1,28 +1,13 @@
|
||||
import sys
|
||||
from datetime import datetime
|
||||
|
||||
import loguru
|
||||
from PySide6 import QtCore, QtWidgets
|
||||
from PySide6.QtMultimedia import QAudioOutput, QMediaPlayer
|
||||
|
||||
from src import LOG_DIR
|
||||
from src.backend.catalogue import Catalogue
|
||||
from src.backend.database import Database
|
||||
from src.logic.SRU import SWB
|
||||
from src.shared.logging import log
|
||||
|
||||
from .widget_sources.admin_update_signatures_ui import Ui_Dialog
|
||||
|
||||
log = loguru.logger
|
||||
log.remove()
|
||||
log.add(sys.stdout, level="INFO")
|
||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
||||
|
||||
log.add(
|
||||
f"{LOG_DIR}/{datetime.now().strftime('%Y-%m-%d')}.log",
|
||||
rotation="1 day",
|
||||
retention="1 month",
|
||||
)
|
||||
|
||||
|
||||
class UpdaterThread(QtCore.QThread):
|
||||
progress = QtCore.Signal(int)
|
||||
|
||||
@@ -1,23 +1,17 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import loguru
|
||||
from appdirs import AppDirs
|
||||
from PySide6 import QtCore, QtWidgets
|
||||
|
||||
from src import LOG_DIR, settings
|
||||
from src import settings
|
||||
from src.backend import Database
|
||||
from src.shared.logging import log
|
||||
|
||||
from .widget_sources.welcome_wizard_ui import Ui_Wizard
|
||||
|
||||
appdirs = AppDirs("SemesterApparatsManager", "SAM")
|
||||
|
||||
log = loguru.logger
|
||||
log.remove()
|
||||
log.add(sys.stdout, level="INFO")
|
||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
||||
|
||||
|
||||
class WelcomeWizard(QtWidgets.QWizard, Ui_Wizard):
|
||||
def __init__(self, parent=None):
|
||||
|
||||
@@ -1,27 +1,17 @@
|
||||
import os
|
||||
from datetime import datetime
|
||||
from os.path import basename
|
||||
|
||||
from docx import Document
|
||||
from docx.enum.text import WD_PARAGRAPH_ALIGNMENT
|
||||
from docx.shared import Pt, RGBColor, Cm
|
||||
from docx.oxml import OxmlElement
|
||||
from docx.oxml.ns import qn
|
||||
import os
|
||||
from os.path import basename
|
||||
from loguru import logger as log
|
||||
import sys
|
||||
from src import settings
|
||||
from docx.shared import Cm, Pt, RGBColor
|
||||
|
||||
from src import settings
|
||||
from src.shared.logging import log
|
||||
|
||||
logger = log
|
||||
logger.remove()
|
||||
logger.add("logs/application.log", rotation="1 week", retention="1 month", enqueue=True)
|
||||
log.add(
|
||||
f"logs/{datetime.now().strftime('%Y-%m-%d')}.log",
|
||||
rotation="1 day",
|
||||
compression="zip",
|
||||
)
|
||||
|
||||
# logger.add(sys.stderr, format="{time} {level} {message}", level="INFO")
|
||||
logger.add(sys.stdout)
|
||||
|
||||
font = "Cascadia Mono"
|
||||
|
||||
@@ -29,8 +19,8 @@ font = "Cascadia Mono"
|
||||
def print_document(file: str):
|
||||
# send document to printer as attachment of email
|
||||
import smtplib
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.application import MIMEApplication
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.text import MIMEText
|
||||
|
||||
smtp = settings.mail.smtp_server
|
||||
@@ -108,7 +98,7 @@ class SemesterDocument:
|
||||
self.filename = filename
|
||||
if full:
|
||||
log.info("Full document generation")
|
||||
self.cleanup
|
||||
self.cleanup()
|
||||
log.info("Cleanup done")
|
||||
self.make_document()
|
||||
log.info("Document created")
|
||||
|
||||
40
test.py
40
test.py
@@ -1,11 +1,33 @@
|
||||
olddata = (
|
||||
None,
|
||||
"Christian",
|
||||
"Berger",
|
||||
"alexander.kirchner@ph-freiburg.de",
|
||||
"764",
|
||||
"Berger Christian",
|
||||
)
|
||||
from src.logic.semester import Semester
|
||||
|
||||
sem1 = Semester.from_string("WiSe 23/24")
|
||||
print(sem1.value)
|
||||
sem2 = Semester.from_string("SoSe 24")
|
||||
print(sem2.value)
|
||||
sem3 = Semester()
|
||||
print(sem3.value)
|
||||
|
||||
print(olddata[1], olddata[2], olddata[3], olddata[4], olddata[5])
|
||||
print("Comparing Sem1 with sem2")
|
||||
assert sem1.isPastSemester(sem2) is True
|
||||
assert sem1.isFutureSemester(sem2) is False
|
||||
assert sem1.isMatch(sem2) is False
|
||||
print("Comparing Sem2 with sem1")
|
||||
assert sem2.isPastSemester(sem1) is False
|
||||
assert sem2.isFutureSemester(sem1) is True
|
||||
assert sem2.isMatch(sem1) is False
|
||||
print("Comparing Sem1 with sem1")
|
||||
assert sem1.isPastSemester(sem1) is False
|
||||
assert sem1.isFutureSemester(sem1) is False
|
||||
assert sem1.isMatch(sem1) is True
|
||||
print("Comparing Sem2 with sem2")
|
||||
assert sem2.isPastSemester(sem2) is False
|
||||
assert sem2.isFutureSemester(sem2) is False
|
||||
assert sem2.isMatch(sem2) is True
|
||||
print("Comparing Sem3 with sem3")
|
||||
assert sem3.isPastSemester(sem3) is False
|
||||
assert sem3.isFutureSemester(sem3) is False
|
||||
assert sem3.isMatch(sem3) is True
|
||||
print("Comparing Sem1 with sem3")
|
||||
assert sem1.isPastSemester(sem3) is True
|
||||
assert sem1.isFutureSemester(sem3) is False
|
||||
assert sem1.isMatch(sem3) is False
|
||||
|
||||
28
uv.lock
generated
28
uv.lock
generated
@@ -165,15 +165,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "chardet"
|
||||
version = "5.2.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f3/0d/f7b6ab21ec75897ed80c17d79b15951a719226b9fababf1e40ea74d69079/chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7", size = 2069618 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/38/6f/f5fbc992a329ee4e0f288c1fe0e2ad9485ed064cac731ed2fe47dcc38cbf/chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970", size = 199385 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "charset-normalizer"
|
||||
version = "3.4.3"
|
||||
@@ -1159,19 +1150,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/75/0bced57e6ba014adeeaa504205c4a92d5211b6c5daa20c0a80b06de6d0f4/pyzotero-1.6.11-py3-none-any.whl", hash = "sha256:949cdff92fd688fe70f609c928f09ab25a7d2aa05f35c575725d5bd0f395d3b4", size = 26368 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "qtqdm"
|
||||
version = "0.2.0"
|
||||
source = { registry = "https://git.theprivateserver.de/api/packages/WorldTeacher/pypi/simple/" }
|
||||
dependencies = [
|
||||
{ name = "pyside6" },
|
||||
{ name = "tqdm" },
|
||||
]
|
||||
sdist = { url = "https://git.theprivateserver.de/api/packages/WorldTeacher/pypi/files/qtqdm/0.2.0/qtqdm-0.2.0.tar.gz", hash = "sha256:86f9b3764d0ebe32edba050de5aa4fb29e287c025d5197ad17e8e8da02155a88" }
|
||||
wheels = [
|
||||
{ url = "https://git.theprivateserver.de/api/packages/WorldTeacher/pypi/files/qtqdm/0.2.0/qtqdm-0.2.0-py3-none-any.whl", hash = "sha256:9a76e4086b09edb698861de0b28663e12ddda34ddb039be607bfd27a3aa07a0f" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "questionary"
|
||||
version = "2.1.0"
|
||||
@@ -1240,7 +1218,7 @@ dependencies = [
|
||||
{ name = "appdirs" },
|
||||
{ name = "beautifulsoup4" },
|
||||
{ name = "bump-my-version" },
|
||||
{ name = "chardet" },
|
||||
{ name = "charset-normalizer" },
|
||||
{ name = "comtypes" },
|
||||
{ name = "darkdetect" },
|
||||
{ name = "docx2pdf" },
|
||||
@@ -1258,7 +1236,6 @@ dependencies = [
|
||||
{ name = "pyside6" },
|
||||
{ name = "python-docx" },
|
||||
{ name = "pyzotero" },
|
||||
{ name = "qtqdm" },
|
||||
{ name = "ratelimit" },
|
||||
{ name = "requests" },
|
||||
]
|
||||
@@ -1278,7 +1255,7 @@ requires-dist = [
|
||||
{ name = "appdirs", specifier = ">=1.4.4" },
|
||||
{ name = "beautifulsoup4", specifier = ">=4.13.5" },
|
||||
{ name = "bump-my-version", specifier = ">=0.29.0" },
|
||||
{ name = "chardet", specifier = ">=5.2.0" },
|
||||
{ name = "charset-normalizer", specifier = ">=3.4.3" },
|
||||
{ name = "comtypes", specifier = ">=1.4.9" },
|
||||
{ name = "darkdetect", specifier = ">=0.8.0" },
|
||||
{ name = "docx2pdf", specifier = ">=0.1.8" },
|
||||
@@ -1296,7 +1273,6 @@ requires-dist = [
|
||||
{ name = "pyside6", specifier = ">=6.9.1" },
|
||||
{ name = "python-docx", specifier = ">=1.1.2" },
|
||||
{ name = "pyzotero", specifier = ">=1.6.4" },
|
||||
{ name = "qtqdm" },
|
||||
{ name = "ratelimit", specifier = ">=2.2.1" },
|
||||
{ name = "requests", specifier = ">=2.32.3" },
|
||||
]
|
||||
|
||||
Reference in New Issue
Block a user