replaced serie.name with serie.metadata.title to use actual series name instead of local folder name, various smaller changes
This commit is contained in:
@@ -4,7 +4,6 @@ import shutil
|
|||||||
import time
|
import time
|
||||||
import jaro
|
import jaro
|
||||||
from src.data.komga import KomgaAPI
|
from src.data.komga import KomgaAPI
|
||||||
from komgapi import komgapi
|
|
||||||
from komgapi.schemas.Series import Series
|
from komgapi.schemas.Series import Series
|
||||||
from src.data.Feeds.nyaasi import NyaaFeed
|
from src.data.Feeds.nyaasi import NyaaFeed
|
||||||
from komsuite_nyaapy import Torrent
|
from komsuite_nyaapy import Torrent
|
||||||
@@ -31,10 +30,14 @@ from src.logic.db_schemas import (
|
|||||||
LASTCHECKED_KOMGRABBER,
|
LASTCHECKED_KOMGRABBER,
|
||||||
GET_LASTCHECKED_KOMGRABBER,
|
GET_LASTCHECKED_KOMGRABBER,
|
||||||
)
|
)
|
||||||
|
from src.logic.db_schemas_mariadb import (
|
||||||
|
MARIADB_KOMGRABBER_TABLE
|
||||||
|
)
|
||||||
import loguru
|
import loguru
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from alive_progress import alive_it
|
from alive_progress import alive_it
|
||||||
from typing import Any
|
from typing import Any, Optional
|
||||||
|
from anilistapi import AnilistAPI
|
||||||
|
|
||||||
config = KomConfig()
|
config = KomConfig()
|
||||||
|
|
||||||
@@ -49,6 +52,7 @@ Komga = KomgaAPI()
|
|||||||
|
|
||||||
LINE_CLEAR = "\x1b[2K"
|
LINE_CLEAR = "\x1b[2K"
|
||||||
failed_items: list[str] = []
|
failed_items: list[str] = []
|
||||||
|
incomplete: list[str] = []
|
||||||
|
|
||||||
|
|
||||||
class mangaCli:
|
class mangaCli:
|
||||||
@@ -68,11 +72,9 @@ class mangaCli:
|
|||||||
self.series_data: Series
|
self.series_data: Series
|
||||||
self.volumes = []
|
self.volumes = []
|
||||||
self.download_path = config.komgrabber.download_location
|
self.download_path = config.komgrabber.download_location
|
||||||
self.cache = KomCache | None
|
self.library_id = library_id
|
||||||
if config.komgrabber.use_cache:
|
self.cache = KomCache()
|
||||||
self.cache = KomCache()
|
|
||||||
self.cache.create_table(KOMGRABBER_TABLE)
|
|
||||||
# self.allSeries = Komga.getAllSeries()
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def download(self, feed_url: str):
|
def download(self, feed_url: str):
|
||||||
@@ -110,6 +112,7 @@ class mangaCli:
|
|||||||
|
|
||||||
# print(f"Filename: {file}")
|
# print(f"Filename: {file}")
|
||||||
file_move = False
|
file_move = False
|
||||||
|
new_folder = None
|
||||||
if file.endswith(".cbz") or file.endswith(".cbr"):
|
if file.endswith(".cbz") or file.endswith(".cbr"):
|
||||||
new_folder = Path(self.download_path, self.serie)
|
new_folder = Path(self.download_path, self.serie)
|
||||||
os.makedirs(new_folder, exist_ok=True)
|
os.makedirs(new_folder, exist_ok=True)
|
||||||
@@ -134,9 +137,14 @@ class mangaCli:
|
|||||||
# if progress remains the same for 30 seconds, stop the download
|
# if progress remains the same for 30 seconds, stop the download
|
||||||
progress = self.dl.check_progress()
|
progress = self.dl.check_progress()
|
||||||
|
|
||||||
time.sleep(45)
|
time.sleep(90)
|
||||||
n_progress = self.dl.check_progress()
|
n_progress = self.dl.check_progress()
|
||||||
dl_name = self.dl.api.get_downloads()[0].name
|
try:
|
||||||
|
dl_name = self.dl.api.get_downloads()[0].name
|
||||||
|
except IndexError:
|
||||||
|
log.error("No downloads found, skipping...")
|
||||||
|
return False
|
||||||
|
|
||||||
if not folder_similarity(self.serie.lower(), dl_name.lower()) > 0.8:
|
if not folder_similarity(self.serie.lower(), dl_name.lower()) > 0.8:
|
||||||
log.error(
|
log.error(
|
||||||
f"Folder name {dl_name} does not match {self.serie}, skipping download"
|
f"Folder name {dl_name} does not match {self.serie}, skipping download"
|
||||||
@@ -144,63 +152,14 @@ class mangaCli:
|
|||||||
self.dl.api.get_downloads()[0].remove(force=True)
|
self.dl.api.get_downloads()[0].remove(force=True)
|
||||||
dl_complete = False
|
dl_complete = False
|
||||||
break
|
break
|
||||||
if not check_done:
|
|
||||||
local_files = os.listdir(f"{self.download_path}")
|
|
||||||
for f in local_files:
|
|
||||||
# print(f)
|
|
||||||
if os.path.isdir(f"{self.download_path}/{f}"):
|
|
||||||
local_files.extend(
|
|
||||||
[
|
|
||||||
f"{self.download_path}/{f}/{file}"
|
|
||||||
for file in os.listdir(f"{self.download_path}/{f}")
|
|
||||||
]
|
|
||||||
)
|
|
||||||
local_files = [
|
|
||||||
file
|
|
||||||
for file in local_files
|
|
||||||
if file.endswith(".cbz") or file.endswith(".cbr")
|
|
||||||
]
|
|
||||||
local_volumes = Komga.getVolumes(self.series_data.id)
|
|
||||||
# if not local_files:
|
|
||||||
# dl_complete=False
|
|
||||||
# break
|
|
||||||
local_files_volumes = []
|
|
||||||
for file in local_files:
|
|
||||||
vol_regex = r"(v\d{1,3}(-\d{1,3})?)|(Vol\. \d{1,3})"
|
|
||||||
# if the file does not match the naming convention, skip it
|
|
||||||
if re.search(vol_regex, file):
|
|
||||||
match = re.search(vol_regex, file)
|
|
||||||
if match:
|
|
||||||
vol = match.group(0).replace("v", "").replace("Vol. ", "")
|
|
||||||
if "-" in vol:
|
|
||||||
local_files_volumes.extend(
|
|
||||||
[int(volume) for volume in vol.split("-")]
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
vol = int(vol)
|
|
||||||
local_files_volumes.append(vol)
|
|
||||||
|
|
||||||
log.info(
|
|
||||||
"Grabbed volumes: {}, Komga volumes: {}".format(
|
|
||||||
sorted(local_files_volumes), local_volumes
|
|
||||||
)
|
|
||||||
)
|
|
||||||
if local_files_volumes == []:
|
|
||||||
pass
|
|
||||||
# check íf any local_file_volumes are not in local_volumes
|
|
||||||
if all([vol in local_volumes for vol in local_files_volumes]):
|
|
||||||
log.info("all volumes downloaded, stopping...")
|
|
||||||
dl_complete = False
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
log.info("not all volumes downloaded, continuing...")
|
|
||||||
check_done = True
|
|
||||||
if progress == n_progress:
|
if progress == n_progress:
|
||||||
log.debug(
|
log.debug(
|
||||||
"Progress has not changed for 30 seconds, stopping the download"
|
"Progress has not changed for 30 seconds, stopping the download"
|
||||||
)
|
)
|
||||||
self.dl.api.get_downloads()[0].remove(force=True)
|
self.dl.api.get_downloads()[0].remove(force=True)
|
||||||
dl_complete = False
|
dl_complete = False
|
||||||
|
incomplete.append(dl_name)
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
pass
|
pass
|
||||||
@@ -244,14 +203,28 @@ class mangaCli:
|
|||||||
- list[dict]: a list of dictionaries containing the entries to download
|
- list[dict]: a list of dictionaries containing the entries to download
|
||||||
"""
|
"""
|
||||||
serie = data.name
|
serie = data.name
|
||||||
|
|
||||||
series_id = data.id
|
series_id = data.id
|
||||||
vols = Komga.getVolumes(series_id=series_id, unpaged=True)
|
|
||||||
|
vols = (
|
||||||
|
Komga.getVolumes(series_id=series_id, unpaged=True)
|
||||||
|
if series_id is not None
|
||||||
|
else []
|
||||||
|
)
|
||||||
feed_titles = NyaaFeed().search(serie)
|
feed_titles = NyaaFeed().search(serie)
|
||||||
f_d = []
|
f_d = []
|
||||||
if feed_titles == []:
|
if feed_titles == []:
|
||||||
failed_items.append(serie)
|
failed_items.append(serie)
|
||||||
added_max_vols = vols if vols else [0]
|
added_max_vols = vols if vols else [0]
|
||||||
for entry in feed_titles:
|
for entry in feed_titles:
|
||||||
|
if not any(
|
||||||
|
filetype in entry.filetypes
|
||||||
|
for filetype in config.komgrabber.manga.valid_file_extensions
|
||||||
|
):
|
||||||
|
log.info(
|
||||||
|
f"Skipping {entry.name}, Reason: Filetype not in valid filetypes, wanted: {config.komgrabber.manga.valid_file_extensions}, found: {entry.filetypes}"
|
||||||
|
)
|
||||||
|
continue
|
||||||
if entry.seeders > 0:
|
if entry.seeders > 0:
|
||||||
if (
|
if (
|
||||||
serie.lower() in entry.name.lower()
|
serie.lower() in entry.name.lower()
|
||||||
@@ -275,16 +248,22 @@ class mangaCli:
|
|||||||
# return entry with the most volumes
|
# return entry with the most volumes
|
||||||
return f_d
|
return f_d
|
||||||
|
|
||||||
def media_grabber(self, serie: Series, bar: Any = None) -> bool:
|
def media_grabber(self, serie: Series, bar: Optional[Any] = None) -> bool:
|
||||||
result = self.process_serie(serie)
|
result = self.process_serie(serie)
|
||||||
|
|
||||||
total_new_volumes: list[tuple[Torrent, list[int]]] = []
|
total_new_volumes: list[tuple[Torrent, list[int]]] = []
|
||||||
fs_per_volume = config.komgrabber.manga.min_filesize
|
fs_per_volume = config.komgrabber.manga.min_filesize
|
||||||
series_volumes = Komga.getVolumes(series_id=serie.id, unpaged=True)
|
series_volumes = (
|
||||||
|
Komga.getVolumes(series_id=serie.id, unpaged=True)
|
||||||
|
if serie.id is not None
|
||||||
|
else [0]
|
||||||
|
)
|
||||||
|
max_new_volume: int = 0
|
||||||
if result is None or result == []:
|
if result is None or result == []:
|
||||||
log.info(f"Could not find any new volumes for {serie.name}")
|
log.info(f"Could not find any new volumes for {serie.metadata.title}")
|
||||||
return False
|
return False
|
||||||
bar.text(f"Downloading new volumes for {serie.name}...")
|
if bar:
|
||||||
|
bar.text(f"Downloading new volumes for {serie.metadata.title}...")
|
||||||
for res in result:
|
for res in result:
|
||||||
log.info(f"{res.name}, Volumes: {res.volumes}")
|
log.info(f"{res.name}, Volumes: {res.volumes}")
|
||||||
if res.volumes != [0]:
|
if res.volumes != [0]:
|
||||||
@@ -308,78 +287,127 @@ class mangaCli:
|
|||||||
total_new_volumes = sorted(
|
total_new_volumes = sorted(
|
||||||
total_new_volumes, key=lambda x: len(x[1]), reverse=True
|
total_new_volumes, key=lambda x: len(x[1]), reverse=True
|
||||||
)
|
)
|
||||||
res = total_new_volumes[0][0]
|
|
||||||
|
|
||||||
log.info(f"Found {len(total_new_volumes[0][1])} new entries for {serie.name}")
|
log.info(f"Found {len(total_new_volumes)} new results for {serie.name}")
|
||||||
|
for res, new_volumes in total_new_volumes:
|
||||||
|
if "epub" in res.filetypes and len(res.filetypes) == 1:
|
||||||
|
log.info(
|
||||||
|
f"Skipping {res.name}, Reason: Epub file, no other filetypes present"
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
if (
|
||||||
|
max(new_volumes) > max(series_volumes)
|
||||||
|
and max(new_volumes) > max_new_volume
|
||||||
|
):
|
||||||
|
max_new_volume = max(new_volumes)
|
||||||
|
log.info(
|
||||||
|
"Found new volumes: {} for series: {}, downloading".format(
|
||||||
|
new_volumes, serie.name
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
# log.info(
|
# log.info(
|
||||||
# f"Found {len(new_volumes)} new {'volume' if len(new_volumes) == 1 else 'volumes'} for {serie.name}"
|
# f"Found {len(new_volumes)} new {'volume' if len(new_volumes) == 1 else 'volumes'} for {serie.name}"
|
||||||
# )
|
# )
|
||||||
# # check if the new volumes were aleady downloaded
|
# # check if the new volumes were aleady downloaded
|
||||||
# log.info(f"current volumes: {series_volumes}, new volumes: {new_volumes}")
|
# log.info(f"current volumes: {series_volumes}, new volumes: {new_volumes}")
|
||||||
# # print(result)
|
# # print(result)
|
||||||
|
|
||||||
if self.download(res.download_url) is True:
|
if self.download(res.download_url) is True:
|
||||||
log.success(f"Downloaded {res.name}")
|
log.success(f"Downloaded {res.name}")
|
||||||
# self.rename_folder_and_files(self.file, komga_data=serie, remove=True)
|
# self.rename_folder_and_files(self.file, komga_data=serie, remove=True)
|
||||||
# self.move_to_komga(serie=entry)
|
# self.move_to_komga(serie=entry)
|
||||||
log.info("Renaming and tagging files")
|
log.info("Renaming and tagging files")
|
||||||
rename()
|
rename()
|
||||||
if not config.komgrabber.get_chapters:
|
if not config.komgrabber.get_chapters:
|
||||||
detect_chapters()
|
detect_chapters()
|
||||||
tag_folder()
|
tag_folder()
|
||||||
if rename_folder(series=serie):
|
if rename_folder(series=serie):
|
||||||
move(self.download_path, config.komga.media_path)
|
move(self.download_path, config.komga.media_path)
|
||||||
else:
|
else:
|
||||||
log.info("Seems like we grabbed the wrong series, oops")
|
log.info("Seems like we grabbed the wrong series, oops")
|
||||||
failed_items.append(serie.name)
|
failed_items.append(serie.metadata.title)
|
||||||
# clear folder
|
# clear folder
|
||||||
# remove the download dir and create it anew
|
# remove the download dir and create it anew
|
||||||
remove_empty_folders(self.download_path)
|
remove_empty_folders(self.download_path)
|
||||||
safe_remove_directory(self.download_path)
|
safe_remove_directory(self.download_path)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def search_for_new_volumes(self):
|
def search_for_new_volumes(self, all: bool = False):
|
||||||
cache_present = False
|
query = {
|
||||||
if self.cache:
|
"condition": {
|
||||||
cache_present = True
|
"allOf": [
|
||||||
series = Komga.series_controller.getAllSeries(
|
{
|
||||||
body={
|
"anyOf": [
|
||||||
"condition": {
|
{
|
||||||
"anyOf": [
|
"seriesStatus": {
|
||||||
{"seriesStatus": {"operator": "is", "value": "ONGOING"}},
|
"operator": "is",
|
||||||
{"seriesStatus": {"operator": "is", "value": "HIATUS"}},
|
"value": "ONGOING"
|
||||||
{"seriesStatus": {"operator": "is", "value": "ENDED"}},
|
}
|
||||||
]
|
},
|
||||||
|
{
|
||||||
|
"seriesStatus": {
|
||||||
|
"operator": "is",
|
||||||
|
"value": "HIATUS"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"allOf": [
|
||||||
|
{
|
||||||
|
"seriesStatus": {
|
||||||
|
"operator": "is",
|
||||||
|
"value": "ENDED"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"complete": {
|
||||||
|
"operator": "isFalse"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if self.library_id != "":
|
||||||
|
query["condition"]["allOf"].append(
|
||||||
|
{
|
||||||
|
"libraryId": {
|
||||||
|
"operator": "is",
|
||||||
|
"value": self.library_id,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
)
|
||||||
|
series = Komga.series_controller.getAllSeries(
|
||||||
|
body= query
|
||||||
)
|
)
|
||||||
|
|
||||||
komga_series: list[Series] = []
|
komga_series: list[Series] = []
|
||||||
shutil.rmtree(self.download_path, ignore_errors=True)
|
shutil.rmtree(self.download_path, ignore_errors=True)
|
||||||
|
|
||||||
os.mkdir(self.download_path)
|
os.mkdir(self.download_path)
|
||||||
|
# log.debug(f"Series: {series}")
|
||||||
log.info(f"{len(series)} series found")
|
|
||||||
today = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
|
today = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
|
||||||
today = time.mktime(time.strptime(today, "%Y-%m-%d %H:%M:%S"))
|
today = time.mktime(time.strptime(today, "%Y-%m-%d %H:%M:%S"))
|
||||||
|
|
||||||
def cache_ending(bar):
|
def cache_ending(bar): # type:ignore
|
||||||
bar.title("Updating cache entries")
|
bar.title("Updating cache entries") # type:ignore
|
||||||
bar.text("Cache updated, continuing...")
|
bar.text("Cache updated, continuing...") # type:ignore
|
||||||
|
|
||||||
def series_ending(bar):
|
def series_ending(bar): # type:ignore
|
||||||
bar.title("Completed searching for new volumes")
|
bar.title("Completed searching for new volumes") # type:ignore
|
||||||
bar.text("All series checked, exiting...")
|
bar.text("All series checked, exiting...") # type:ignore
|
||||||
|
|
||||||
def skip_ending(bar):
|
def skip_ending(bar): # type:ignore
|
||||||
bar.title("Skipping series")
|
bar.title("Skipping series") # type:ignore
|
||||||
bar.text("Skipped series, continuing...")
|
bar.text("Skipped series, continuing...") # type:ignore
|
||||||
|
|
||||||
def ended_ending(bar):
|
def ended_ending(bar): # type:ignore
|
||||||
bar.title("Skipping finished series")
|
bar.title("Skipping finished series") # type:ignore
|
||||||
bar.text("Finished check, continuing to search new volumes...")
|
bar.text("Finished check, continuing to search new volumes...") # type:ignore
|
||||||
|
|
||||||
if cache_present:
|
if config.komgrabber.use_cache:
|
||||||
log.info("Cache present, checking for missing entries")
|
log.info("Cache present, checking for missing entries")
|
||||||
cacheBar = alive_it(
|
cacheBar = alive_it(
|
||||||
series,
|
series,
|
||||||
@@ -389,9 +417,9 @@ class mangaCli:
|
|||||||
receipt_text=True,
|
receipt_text=True,
|
||||||
)
|
)
|
||||||
for serie in cacheBar:
|
for serie in cacheBar:
|
||||||
data = self.cache.query(SELECT_KOMGRABBER, (serie.id,))
|
data = self.cache.fetch_one(SELECT_KOMGRABBER, {"series_id": serie.id})
|
||||||
log.debug(
|
log.debug(
|
||||||
f"Cache data: {data}, Serie: {serie.name}, Status: {serie.metadata.status}"
|
f"Cache data: {data}, Serie: {serie.metadata.title}, Status: {serie.metadata.status}"
|
||||||
)
|
)
|
||||||
if data:
|
if data:
|
||||||
if data[3] == serie.metadata.status:
|
if data[3] == serie.metadata.status:
|
||||||
@@ -399,52 +427,69 @@ class mangaCli:
|
|||||||
elif data and data[3] != serie.metadata.status:
|
elif data and data[3] != serie.metadata.status:
|
||||||
self.cache.update(
|
self.cache.update(
|
||||||
UPDATE_KOMGRABBER,
|
UPDATE_KOMGRABBER,
|
||||||
(serie.name, serie.metadata.status, serie.id),
|
{
|
||||||
|
"name": serie.metadata.title,
|
||||||
|
"status": serie.metadata.status,
|
||||||
|
"series_id": serie.id,
|
||||||
|
},
|
||||||
)
|
)
|
||||||
log.info(f"Serie {serie.name} updated")
|
log.info(f"Serie {serie.name} updated")
|
||||||
time.sleep(0.05)
|
time.sleep(0.05)
|
||||||
else:
|
else:
|
||||||
self.cache.insert(
|
self.cache.insert(
|
||||||
INSERT_KOMGRABBER, (serie.name, serie.id, serie.metadata.status)
|
INSERT_KOMGRABBER,
|
||||||
|
{
|
||||||
|
"name": serie.name,
|
||||||
|
"series_id": serie.id,
|
||||||
|
"status": serie.metadata.status,
|
||||||
|
}
|
||||||
)
|
)
|
||||||
log.info(f"Serie {serie.name} added to cache")
|
log.info(f"Serie {serie.metadata.title} added to cache")
|
||||||
|
|
||||||
log.debug("Cache created, added missing entries")
|
log.debug("Cache created, added missing entries")
|
||||||
time.sleep(0.5)
|
time.sleep(0.5)
|
||||||
if cache_present:
|
if all is True:
|
||||||
skipBar = alive_it(
|
log.info("Searching for all series in the database")
|
||||||
series,
|
komga_series = series
|
||||||
bar="smooth",
|
else:
|
||||||
spinner="dots",
|
if config.komgrabber.use_cache:
|
||||||
receipt_text=True,
|
skipBar = alive_it(
|
||||||
finalize=skip_ending,
|
series,
|
||||||
)
|
bar="smooth",
|
||||||
for serie in skipBar:
|
spinner="dots",
|
||||||
last_checked = self.cache.query(
|
receipt_text=True,
|
||||||
GET_LASTCHECKED_KOMGRABBER, (serie.id,)
|
finalize=skip_ending,
|
||||||
)[0]
|
)
|
||||||
# convert timestamp to epoch float for comparison
|
for serie in skipBar:
|
||||||
if last_checked:
|
last_checked = self.cache.fetch_one(
|
||||||
|
GET_LASTCHECKED_KOMGRABBER, {"series_id": serie.id}
|
||||||
|
)[0]
|
||||||
|
log.debug(
|
||||||
|
f"Last checked: {last_checked}, Serie: {serie.name}, Status: {serie.metadata.status}"
|
||||||
|
)
|
||||||
|
# convert timestamp to epoch float for comparison
|
||||||
|
|
||||||
|
if last_checked == 0 or last_checked is None:
|
||||||
|
last_checked = "2024-01-01 00:00:00"
|
||||||
last_checked = time.mktime(
|
last_checked = time.mktime(
|
||||||
time.strptime(last_checked, "%Y-%m-%d %H:%M:%S")
|
time.strptime(str(last_checked), "%Y-%m-%d %H:%M:%S")
|
||||||
)
|
)
|
||||||
# if difference between last_checked and today is less than config.komgrabber.cache_check_interval, skip entry
|
# if difference between last_checked and today is less than config.komgrabber.cache_check_interval, skip entry
|
||||||
time_difference = time_checker(last_checked, today)
|
time_difference = time_checker(last_checked, today)
|
||||||
# if time difference is less than set in the settings and the series status is not ended and the book count is not the same as the total book count, skip the entry
|
|
||||||
if time_difference < config.komgrabber.cache_check_interval:
|
|
||||||
komga_series.append(serie)
|
|
||||||
log.debug(f"Added {serie.name} to the list")
|
|
||||||
|
|
||||||
if (
|
# if time difference is less than set in the settings and the series status is not ended and the book count is not the same as the total book count, skip the entry
|
||||||
serie.metadata.status == "ENDED"
|
if time_difference >= config.komgrabber.cache_check_interval:
|
||||||
and serie.booksCount == serie.metadata.totalBookCount
|
komga_series.append(serie)
|
||||||
):
|
|
||||||
log.debug(
|
log.debug(
|
||||||
f"Serie {serie.name} if finished and has all volumes present, skipping..."
|
f"Added {serie.name} to the checking list, as the last check was {time_difference} days ago"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
komga_series.append(serie)
|
log.debug(
|
||||||
time.sleep(0.005)
|
f"Skipped {serie.name} as the last check was {time_difference} days ago, whereas the set interval is {config.komgrabber.cache_check_interval} days"
|
||||||
|
)
|
||||||
|
|
||||||
|
time.sleep(0.005)
|
||||||
|
|
||||||
log.debug(len(komga_series))
|
log.debug(len(komga_series))
|
||||||
log.info("Finished checking cache, continuing...")
|
log.info("Finished checking cache, continuing...")
|
||||||
log.info("There are {} series to check".format(len(komga_series)))
|
log.info("There are {} series to check".format(len(komga_series)))
|
||||||
@@ -455,39 +500,104 @@ class mangaCli:
|
|||||||
title="Searching for new volumes",
|
title="Searching for new volumes",
|
||||||
)
|
)
|
||||||
for serie in pBar:
|
for serie in pBar:
|
||||||
pBar.text(f"Searching for new volumes for {serie.name}")
|
pBar.text(f"Searching for new volumes for {serie.metadata.title}")
|
||||||
log.info(
|
log.info(
|
||||||
f"searching for new volumes for {serie.name}, currently at {serie.booksCount} volumes"
|
f"searching for new volumes for {serie.metadata.title}, currently at {serie.booksCount} volumes"
|
||||||
)
|
)
|
||||||
self.series_data = serie
|
self.series_data = serie
|
||||||
self.serie = serie.name
|
self.serie = (
|
||||||
|
serie.metadata.title
|
||||||
|
) # replaced serie.name with serie.metadata.title
|
||||||
self.serie_id = serie.id
|
self.serie_id = serie.id
|
||||||
self.media_grabber(serie, bar=pBar)
|
found = self.media_grabber(serie, bar=pBar)
|
||||||
if cache_present:
|
if config.komgrabber.use_cache:
|
||||||
self.cache.update(LASTCHECKED_KOMGRABBER, (serie.id,))
|
if found:
|
||||||
time.sleep(5)
|
self.cache.update(LASTCHECKED_KOMGRABBER, {"series_id": serie.id})
|
||||||
# print("done", serie.name)
|
log.info(
|
||||||
|
f"Cache updated for {serie.metadata.title}, new volumes found"
|
||||||
|
) # updated to use serie.metadata.title
|
||||||
|
else:
|
||||||
|
self.cache.update(LASTCHECKED_KOMGRABBER, {"series_id": serie.id})
|
||||||
|
|
||||||
|
log.info(
|
||||||
|
f"Cache updated for {serie.metadata.title}, no new volumes found"
|
||||||
|
) # updated to use serie.metadata.title
|
||||||
|
# self.cache.update(LASTCHECKED_KOMGRABBER, {"series_id": serie.id})
|
||||||
|
# log.info("Cache updated")
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def search_for_series(self, series: list[str]):
|
def search_for_new_series(self, series_id: str) -> bool:
|
||||||
cache_present = False
|
anilist = AnilistAPI()
|
||||||
if self.cache:
|
series = anilist.get_manga(series_id)
|
||||||
cache_present = True
|
if series is None:
|
||||||
|
log.error(f"Could not find series with id {series_id}")
|
||||||
|
return False
|
||||||
|
komga_results = Komga.series_controller.getAllSeries(
|
||||||
|
body={
|
||||||
|
"condition": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"title": {
|
||||||
|
"operator": "contains",
|
||||||
|
"value": series.title.english
|
||||||
|
if series.title.english
|
||||||
|
else series.title.romaji,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
if not komga_results:
|
||||||
|
log.error(f"Could not find series with title {series.title.english}")
|
||||||
|
Komga_fake = Series(
|
||||||
|
name=series.title.english
|
||||||
|
if series.title.english
|
||||||
|
else series.title.romaji,
|
||||||
|
booksCount=0,
|
||||||
|
metadata={},
|
||||||
|
booksMetadata={},
|
||||||
|
)
|
||||||
|
rbar = alive_it(
|
||||||
|
[Komga_fake],
|
||||||
|
title="Searching for new volumes",
|
||||||
|
bar="smooth",
|
||||||
|
spinner="dots",
|
||||||
|
receipt_text=True,
|
||||||
|
)
|
||||||
|
for Komga_fake in rbar:
|
||||||
|
rbar.text(f"Searching for new volumes for {Komga_fake.name}")
|
||||||
|
log.info(
|
||||||
|
f"searching for new volumes for {Komga_fake.name}, currently at {Komga_fake.booksCount} volumes"
|
||||||
|
)
|
||||||
|
self.serie = Komga_fake.name
|
||||||
|
result = self.media_grabber(Komga_fake, bar=rbar)
|
||||||
|
if result is False:
|
||||||
|
rbar.title("No new volumes found")
|
||||||
|
log.error(f"Could not find any new volumes for {Komga_fake.name}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def search_for_series(self, serie: list[str]):
|
||||||
|
cache_present = config.komgrabber.use_cache
|
||||||
shutil.rmtree(self.download_path, ignore_errors=True)
|
shutil.rmtree(self.download_path, ignore_errors=True)
|
||||||
os.mkdir(self.download_path)
|
os.mkdir(self.download_path)
|
||||||
series_request = []
|
series_request = []
|
||||||
for serie in series:
|
for series in serie:
|
||||||
series_request.append(
|
series_request.append(
|
||||||
{"title": {"operator": "is", "value": serie}},
|
{"title": {"operator": "is", "value": series}},
|
||||||
)
|
)
|
||||||
request_body = {"condition": {"anyOf": series_request}}
|
request_body = {"condition": {"anyOf": series_request}}
|
||||||
series = Komga.series_controller.getAllSeries(body=request_body)
|
komga_series = Komga.series_controller.getAllSeries(body=request_body)
|
||||||
|
|
||||||
def series_ending(bar):
|
def series_ending(bar):
|
||||||
bar.title("Completed searching for new volumes")
|
bar.title("Completed searching for new volumes")
|
||||||
bar.text("All series checked, exiting...")
|
bar.text("All series checked, exiting...")
|
||||||
|
|
||||||
pBar = alive_it(series, finalize=series_ending)
|
pBar = alive_it(komga_series, finalize=series_ending)
|
||||||
for serie in pBar:
|
for serie in pBar:
|
||||||
pBar.text(f"Searching for new volumes for {serie.name}")
|
pBar.text(f"Searching for new volumes for {serie.name}")
|
||||||
log.info(
|
log.info(
|
||||||
@@ -497,13 +607,12 @@ class mangaCli:
|
|||||||
self.serie = serie.name
|
self.serie = serie.name
|
||||||
self.serie_id = serie.id
|
self.serie_id = serie.id
|
||||||
|
|
||||||
self.media_grabber(serie)
|
self.media_grabber(serie, bar=pBar)
|
||||||
if cache_present:
|
if cache_present:
|
||||||
self.cache.update(LASTCHECKED_KOMGRABBER, (serie.id,))
|
self.cache.update(LASTCHECKED_KOMGRABBER, {"series_id": serie.id})
|
||||||
|
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
# print("done", serie.name)
|
# print("done", serie.name)
|
||||||
return self
|
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -530,15 +639,13 @@ def avail_check():
|
|||||||
return (True, komga_avail)
|
return (True, komga_avail)
|
||||||
|
|
||||||
|
|
||||||
def search_all():
|
def search_all(libraryName: str ="", all: bool = False):
|
||||||
mangaCli().search_for_new_volumes()
|
libid = config.komga.libraries.get(libraryName)
|
||||||
komga = komgapi(config.komga.user, config.komga.password, config.komga.url)
|
mangaCli(library_id=libid).search_for_new_volumes(all)
|
||||||
libraries = komga.library_controller.getLibraries()
|
Komga.library_controller.scanLibrary(libid)
|
||||||
for library in libraries:
|
print(f"Initialized scan for library {libraryName}")
|
||||||
komga.library_controller.scanLibrary(library.id)
|
|
||||||
print(f"Initialized scan for library {library.name}")
|
|
||||||
# update_state()
|
|
||||||
print("Failed series:\n", failed_items)
|
print("Failed series:\n", failed_items)
|
||||||
|
print("Incomplete series:\n", incomplete)
|
||||||
|
|
||||||
|
|
||||||
def search_series(series: list[str]):
|
def search_series(series: list[str]):
|
||||||
@@ -547,5 +654,20 @@ def search_series(series: list[str]):
|
|||||||
print("Failed series:\n", failed_items)
|
print("Failed series:\n", failed_items)
|
||||||
|
|
||||||
|
|
||||||
|
def search_requested():
|
||||||
|
cache = KomCache()
|
||||||
|
series = cache.query("SELECT manga_id from manga_requests WHERE grabbed = 0")
|
||||||
|
if series:
|
||||||
|
for serie in series:
|
||||||
|
result = mangaCli().search_for_new_series(int(serie[0]))
|
||||||
|
if result:
|
||||||
|
cache.update(
|
||||||
|
"UPDATE manga_requests SET grabbed = 1 WHERE manga_id = :manga_id",
|
||||||
|
{"manga_id": serie[0]},
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
print("No series found to grab")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
search_all()
|
search_all()
|
||||||
Reference in New Issue
Block a user