more AI optimizations, reworked logger
This commit is contained in:
@@ -1,28 +1,17 @@
|
||||
import re
|
||||
import sys
|
||||
import xml.etree.ElementTree as ET
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from typing import Dict, Iterable, List, Optional, Tuple
|
||||
from typing import Dict, Iterable, List, Optional, Tuple, Union
|
||||
|
||||
import loguru
|
||||
import requests
|
||||
from requests.adapters import HTTPAdapter
|
||||
|
||||
from src import LOG_DIR
|
||||
# centralized logging used via src.shared.logging
|
||||
from src.logic.dataclass import BookData
|
||||
from src.shared.logging import log
|
||||
|
||||
log = loguru.logger
|
||||
log.remove()
|
||||
log.add(sys.stdout, level="INFO")
|
||||
log.add(f"{LOG_DIR}/application.log", rotation="1 MB", retention="10 days")
|
||||
|
||||
log.add(
|
||||
f"{LOG_DIR}/{datetime.now().strftime('%Y-%m-%d')}.log",
|
||||
rotation="1 day",
|
||||
retention="1 month",
|
||||
)
|
||||
log # ensure imported logger is referenced
|
||||
|
||||
|
||||
# -----------------------
|
||||
@@ -186,7 +175,9 @@ def parse_echoed_request(root: ET.Element) -> Optional[EchoedSearchRequest]:
|
||||
)
|
||||
|
||||
|
||||
def parse_search_retrieve_response(xml_str: str) -> SearchRetrieveResponse:
|
||||
def parse_search_retrieve_response(
|
||||
xml_str: Union[str, bytes],
|
||||
) -> SearchRetrieveResponse:
|
||||
root = ET.fromstring(xml_str)
|
||||
|
||||
# Root is zs:searchRetrieveResponse
|
||||
@@ -598,12 +589,12 @@ class Api:
|
||||
"Accept-Charset": "latin1,utf-8;q=0.7,*;q=0.3",
|
||||
}
|
||||
# Use persistent session and set timeouts to avoid hanging
|
||||
response = self._session.get(url, headers=headers, timeout=(3.05, 20))
|
||||
if response.status_code != 200:
|
||||
raise Exception(f"Error fetching data from SWB: {response.status_code}")
|
||||
# extract top-level response (decode to text for the XML parser)
|
||||
response = parse_search_retrieve_response(response.text)
|
||||
return response.records
|
||||
resp = self._session.get(url, headers=headers, timeout=(3.05, 60))
|
||||
if resp.status_code != 200:
|
||||
raise Exception(f"Error fetching data from SWB: {resp.status_code}")
|
||||
# Parse using raw bytes (original behavior) to preserve encoding edge cases
|
||||
sr = parse_search_retrieve_response(resp.content)
|
||||
return sr.records
|
||||
|
||||
def getBooks(self, query_args: Iterable[str]) -> List[BookData]:
|
||||
records: List[Record] = self.get(query_args)
|
||||
|
||||
Reference in New Issue
Block a user