rework database to use json string instead of dataclass dump. Prevents failed decodings, makes contents searchable, if needed

This commit is contained in:
2025-05-13 11:37:25 +02:00
parent 8f90247e98
commit 99b9f50784
2 changed files with 28 additions and 16 deletions

View File

@@ -5,7 +5,8 @@ from pathlib import Path
from src import settings from src import settings
from typing import Any, List, Optional, Tuple, Union from typing import Any, List, Optional, Tuple, Union
import datetime import datetime
import json
from dataclasses import asdict
from src.backend.db import ( from src.backend.db import (
CREATE_ELSA_FILES_TABLE, CREATE_ELSA_FILES_TABLE,
CREATE_ELSA_MEDIA_TABLE, CREATE_ELSA_MEDIA_TABLE,
@@ -21,7 +22,6 @@ from src.backend.db import (
from src.errors import AppPresentError, NoResultError from src.errors import AppPresentError, NoResultError
from src.logic import ApparatData, BookData, Prof, Apparat, ELSA from src.logic import ApparatData, BookData, Prof, Apparat, ELSA
from src.logic.constants import SEMAP_MEDIA_ACCOUNTS from src.logic.constants import SEMAP_MEDIA_ACCOUNTS
from src.utils import create_blob, dump_pickle, load_pickle
from .semester import Semester from .semester import Semester
from string import ascii_lowercase as lower, digits, punctuation from string import ascii_lowercase as lower, digits, punctuation
import sys import sys
@@ -29,7 +29,7 @@ from loguru import logger as log
logger = log logger = log
logger.remove() logger.remove()
logger.add("logs/application.log", rotation="1 week", enqueue=True) logger.add("logs/application.log", rotation="1 week", retention="1 month", enqueue=True)
log.add( log.add(
"logs/database.log", "logs/database.log",
) )
@@ -230,19 +230,23 @@ class Database:
logger.debug(t_query) logger.debug(t_query)
# # logger.debug(t_query) # # logger.debug(t_query)
result = cursor.execute(t_query).fetchall() result = cursor.execute(t_query).fetchall()
result = [load_pickle(i[0]) for i in result] result = [BookData().from_string(i[0]) for i in result]
if bookdata in result: if bookdata in result:
# logger.debug("Bookdata already in database") # logger.debug("Bookdata already in database")
# check if the book was deleted in the apparat # check if the book was deleted in the apparat
query = ( query = (
"SELECT deleted FROM media WHERE app_id=? AND prof_id=? AND bookdata=?" "SELECT deleted FROM media WHERE app_id=? AND prof_id=? AND bookdata=?"
) )
params = (app_id, prof_id, dump_pickle(bookdata)) params = (app_id, prof_id, json.dumps(asdict(bookdata), ensure_ascii=False))
result = cursor.execute(query, params).fetchone() result = cursor.execute(query, params).fetchone()
if result[0] == 1: if result[0] == 1:
# logger.debug("Book was deleted, updating bookdata") # logger.debug("Book was deleted, updating bookdata")
query = "UPDATE media SET deleted=0 WHERE app_id=? AND prof_id=? AND bookdata=?" query = "UPDATE media SET deleted=0 WHERE app_id=? AND prof_id=? AND bookdata=?"
params = (app_id, prof_id, dump_pickle(bookdata)) params = (
app_id,
prof_id,
json.dumps(asdict(bookdata), ensure_ascii=False),
)
cursor.execute(query, params) cursor.execute(query, params)
conn.commit() conn.commit()
return return
@@ -250,7 +254,7 @@ class Database:
query = ( query = (
"INSERT INTO media (bookdata, app_id, prof_id,deleted) VALUES (?, ?, ?,?)" "INSERT INTO media (bookdata, app_id, prof_id,deleted) VALUES (?, ?, ?,?)"
) )
converted = dump_pickle(bookdata) converted = json.dumps(asdict(bookdata), ensure_ascii=False)
params = (converted, app_id, prof_id, 0) params = (converted, app_id, prof_id, 0)
cursor.execute(query, params) cursor.execute(query, params)
logMessage = f"Added book with signature {bookdata.signature} to database, data: {converted}" logMessage = f"Added book with signature {bookdata.signature} to database, data: {converted}"
@@ -276,7 +280,7 @@ class Database:
"SELECT bookdata, id FROM media WHERE app_id=? AND prof_id=?", "SELECT bookdata, id FROM media WHERE app_id=? AND prof_id=?",
(app_id, prof_id), (app_id, prof_id),
) )
books = [(load_pickle(i[0]), i[1]) for i in result] books = [(BookData().from_string(i[0]), i[1]) for i in result]
book = [i for i in books if i[0].signature == signature][0][1] book = [i for i in books if i[0].signature == signature][0][1]
return book return book
@@ -297,7 +301,7 @@ class Database:
result = self.query_db( result = self.query_db(
"SELECT bookdata FROM media WHERE app_id=? AND prof_id=?", (app_id, prof_id) "SELECT bookdata FROM media WHERE app_id=? AND prof_id=?", (app_id, prof_id)
) )
books: list[BookData] = [load_pickle(i[0]) for i in result] books: list[BookData] = [BookData().from_string(i[0]) for i in result]
book = [i for i in books if i.signature == signature][0] book = [i for i in books if i.signature == signature][0]
return book return book
@@ -336,7 +340,7 @@ class Database:
return None return None
ret = [] ret = []
for book in rdata: for book in rdata:
bookdata = load_pickle(book[1]) bookdata = BookData().from_string(book[1])
app_id = book[2] app_id = book[2]
prof_id = book[3] prof_id = book[3]
if mode == 1: if mode == 1:
@@ -395,14 +399,14 @@ class Database:
Returns: Returns:
BookData: The metadata of the book wrapped in a BookData object BookData: The metadata of the book wrapped in a BookData object
""" """
return load_pickle( return BookData().from_string(
self.query_db( self.query_db(
"SELECT bookdata FROM media WHERE id=?", (book_id,), one=True "SELECT bookdata FROM media WHERE id=?", (book_id,), one=True
)[0] )[0]
) )
def getBooks( def getBooks(
self, app_id: Union[str, int], prof_id: Union[str, int], deleted=0 self, app_id: Union[str, int], prof_id: Union[str, int], deleted: int = 0
) -> list[dict[str, Union[BookData, int]]]: ) -> list[dict[str, Union[BookData, int]]]:
""" """
Get the Books based on the apparat id and the professor id Get the Books based on the apparat id and the professor id
@@ -424,7 +428,7 @@ class Database:
for result_a in qdata: for result_a in qdata:
data: dict[str, Any] = {"id": int, "bookdata": BookData, "available": int} data: dict[str, Any] = {"id": int, "bookdata": BookData, "available": int}
data["id"] = result_a[0] data["id"] = result_a[0]
data["bookdata"] = load_pickle(result_a[1]) data["bookdata"] = BookData().from_string(result_a[1])
data["available"] = result_a[2] data["available"] = result_a[2]
ret_result.append(data) ret_result.append(data)
return ret_result return ret_result
@@ -438,10 +442,10 @@ class Database:
bookdata (BookData): The new metadata of the book bookdata (BookData): The new metadata of the book
""" """
query = "UPDATE media SET bookdata= ? WHERE id=?" query = "UPDATE media SET bookdata= ? WHERE id=?"
book = dump_pickle(bookdata) book = bookdata.to_dict
self.query_db(query, (book, book_id)) self.query_db(query, (book, book_id))
def deleteBook(self, book_id): def deleteBook(self, book_id: int):
""" """
Delete a book from the database Delete a book from the database
@@ -1656,3 +1660,11 @@ class Database:
return data[0][0] return data[0][0]
else: else:
return None return None
def fetch_one(self, query: str, args: tuple[Any, ...] = ()) -> tuple[Any, ...]:
connection = self.connect()
cursor = connection.cursor()
cursor.execute(query, args)
result = cursor.fetchone()
connection.close()
return result

View File

@@ -17,7 +17,7 @@ CREATE_TABLE_APPARAT = """CREATE TABLE semesterapparat (
)""" )"""
CREATE_TABLE_MEDIA = """CREATE TABLE media ( CREATE_TABLE_MEDIA = """CREATE TABLE media (
id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
bookdata BLOB, bookdata TEXT,
app_id INTEGER, app_id INTEGER,
prof_id INTEGER, prof_id INTEGER,
deleted INTEGER DEFAULT (0), deleted INTEGER DEFAULT (0),