Compare commits
5 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
4d0d412d38
|
|||
|
f455918ef4
|
|||
|
03d1d50a93
|
|||
|
85e2c85aca
|
|||
|
4f549920a0
|
128
cli.py
128
cli.py
@@ -1,40 +1,71 @@
|
|||||||
from src.logic.cli import avail_check, search_all
|
|
||||||
import os
|
|
||||||
import argparse
|
import argparse
|
||||||
from src.logic.utils import move, tag_folder, rename, detect_chapters
|
import os
|
||||||
from komconfig import KomConfig
|
import shutil
|
||||||
from src.aria import launch_aria2c, kill_aria2c
|
|
||||||
from src.data.komga import scan_komga
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
from komconfig import KomConfig
|
||||||
|
|
||||||
|
from src.data.komga import scan_komga
|
||||||
|
from src.logic.search import avail_check, search_all, search_requested, search_series
|
||||||
|
from src.logic.utils import detect_chapters, move, rename, tag_folder
|
||||||
|
|
||||||
cfg: KomConfig = KomConfig()
|
cfg: KomConfig = KomConfig()
|
||||||
|
|
||||||
|
|
||||||
def grabber(args):
|
def grabber(args):
|
||||||
nyaa, komga = avail_check()
|
nyaa, komga = avail_check()
|
||||||
os.system(f"rm -rf {cfg.komgrabber.download_location}")
|
# os.system(f"rm -rf {cfg.komgrabber.download_location}")
|
||||||
os.mkdir(cfg.komgrabber.download_location)
|
# os.mkdir(cfg.komgrabber.download_location)
|
||||||
|
if not os.path.exists(cfg.komgrabber.tag_location):
|
||||||
|
os.mkdir(cfg.komgrabber.tag_location)
|
||||||
|
if not os.path.exists(cfg.komgrabber.tag_location):
|
||||||
|
os.mkdir(cfg.komgrabber.tag_location)
|
||||||
|
|
||||||
if nyaa is True and komga is True:
|
if nyaa is True and komga is True:
|
||||||
search_all()
|
if args.request:
|
||||||
if args.scan:
|
search_requested()
|
||||||
scan_komga()
|
if args.library is None:
|
||||||
if cfg.komgrabber.aria2.kill_after_completion:
|
libraries = cfg.komga.libraries
|
||||||
|
if not libraries:
|
||||||
|
print("No libraries found in Komga, please check your configuration")
|
||||||
|
return
|
||||||
|
for library in libraries:
|
||||||
|
if not library.id:
|
||||||
|
print(
|
||||||
|
f"Library {library} has no id, please check your configuration"
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
search_all(library, args.all)
|
||||||
|
else:
|
||||||
|
library = cfg.komga.getLibraryByName(args.library)
|
||||||
|
search_all(library, args.all)
|
||||||
|
|
||||||
|
if (
|
||||||
|
cfg.komgrabber.downloader == "aria2"
|
||||||
|
and cfg.komgrabber.downloader_settings.kill_after_completion
|
||||||
|
):
|
||||||
# kill aria2c
|
# kill aria2c
|
||||||
os.system("killall aria2c")
|
os.system("killall aria2c")
|
||||||
else:
|
else:
|
||||||
print("No connection established, quitting")
|
print("No connection established, quitting")
|
||||||
|
|
||||||
|
|
||||||
def grab_series(series: list[str]):
|
def grab_series(args):
|
||||||
# nyaa, komga = avail_check()
|
# nyaa, komga = avail_check()
|
||||||
os.system(f"rm -rf {cfg.komgrabber.download_location}")
|
#
|
||||||
os.mkdir(cfg.komgrabber.download_location)
|
if not args.series:
|
||||||
|
print("No series provided to tag")
|
||||||
|
return
|
||||||
|
series = [series.strip() for series in args.series]
|
||||||
|
library = cfg.komga.getLibraryByName(args.library)
|
||||||
|
if not library:
|
||||||
|
print(f"Library {args.library} not found, please check your configuration")
|
||||||
|
return
|
||||||
|
search_series(library, series)
|
||||||
|
|
||||||
|
|
||||||
def file_operations(args):
|
def file_operations(args):
|
||||||
path = cfg.komgrabber.download_location
|
path = Path(args.path) # type: ignore
|
||||||
if args.path: # type: ignore
|
|
||||||
path = Path(args.path) # type: ignore
|
|
||||||
if args.rename:
|
if args.rename:
|
||||||
rename(path)
|
rename(path)
|
||||||
if args.detect_chapters:
|
if args.detect_chapters:
|
||||||
@@ -42,24 +73,55 @@ def file_operations(args):
|
|||||||
if args.tag:
|
if args.tag:
|
||||||
tag_folder(path)
|
tag_folder(path)
|
||||||
if args.move:
|
if args.move:
|
||||||
move(path)
|
move(path, args.library)
|
||||||
if args.scan:
|
if args.scan:
|
||||||
scan_komga()
|
scan_komga()
|
||||||
|
|
||||||
|
# remove all folders and files in path
|
||||||
|
for folder in os.listdir(path):
|
||||||
|
folder_path = os.path.join(path, folder)
|
||||||
|
if os.path.isfile(folder_path):
|
||||||
|
os.remove(folder_path)
|
||||||
|
elif os.path.isdir(folder_path):
|
||||||
|
shutil.rmtree(folder_path)
|
||||||
|
|
||||||
|
# os.rmdir(path)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
parser = argparse.ArgumentParser(description="KomGrabber CLI")
|
parser = argparse.ArgumentParser(description="KomGrabber CLI")
|
||||||
subparsers = parser.add_subparsers(dest="command", required=True)
|
subparsers = parser.add_subparsers(dest="command", required=True)
|
||||||
|
|
||||||
# tag subcommand
|
# tag subcommand
|
||||||
tag_parser = subparsers.add_parser("search", help="Run search operation")
|
tag_parser = subparsers.add_parser(
|
||||||
|
"search",
|
||||||
|
help="Run search operation. After the search is completed, the library will be scanned to detect new or updated series.",
|
||||||
|
)
|
||||||
|
tag_parser.add_argument(
|
||||||
|
"library",
|
||||||
|
nargs="?", # makes it optional
|
||||||
|
default=None, # or "" if you prefer an empty string
|
||||||
|
metavar="[library]", # nicer usage display
|
||||||
|
help="Library to search in (e.g. 'manga', 'anime', leave empty for all)",
|
||||||
|
)
|
||||||
|
|
||||||
tag_parser.add_argument(
|
tag_parser.add_argument(
|
||||||
"-v", "--verbose", action="store_true", help="Enable verbose output"
|
"-v", "--verbose", action="store_true", help="Enable verbose output"
|
||||||
)
|
)
|
||||||
|
|
||||||
tag_parser.add_argument(
|
tag_parser.add_argument(
|
||||||
"--scan",
|
"-a",
|
||||||
|
"--all",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
help="Scan the library after downloading",
|
default=False,
|
||||||
|
help="Search for all series in the database",
|
||||||
|
)
|
||||||
|
tag_parser.add_argument(
|
||||||
|
"-r",
|
||||||
|
"--request",
|
||||||
|
action="store_true",
|
||||||
|
default=False,
|
||||||
|
help="Search for the requested series in the database",
|
||||||
)
|
)
|
||||||
|
|
||||||
tag_parser.set_defaults(func=grabber)
|
tag_parser.set_defaults(func=grabber)
|
||||||
@@ -70,7 +132,7 @@ def main():
|
|||||||
"-p",
|
"-p",
|
||||||
"--path",
|
"--path",
|
||||||
type=str,
|
type=str,
|
||||||
default=cfg.komgrabber.download_location,
|
default=cfg.komgrabber.tag_location,
|
||||||
help="Path to use for actions (overwrites default path).",
|
help="Path to use for actions (overwrites default path).",
|
||||||
)
|
)
|
||||||
file_ops.add_argument(
|
file_ops.add_argument(
|
||||||
@@ -110,8 +172,28 @@ def main():
|
|||||||
action="store_true",
|
action="store_true",
|
||||||
help="Scan the library after downloading",
|
help="Scan the library after downloading",
|
||||||
)
|
)
|
||||||
|
file_ops.add_argument(
|
||||||
|
"--library",
|
||||||
|
type=str,
|
||||||
|
help="Specify the library to use for operations",
|
||||||
|
default="Manga",
|
||||||
|
)
|
||||||
file_ops.set_defaults(func=file_operations)
|
file_ops.set_defaults(func=file_operations)
|
||||||
|
|
||||||
|
series_tagger = subparsers.add_parser(
|
||||||
|
"search-series", help="Search series in the library"
|
||||||
|
)
|
||||||
|
series_tagger.add_argument(
|
||||||
|
"series",
|
||||||
|
type=str,
|
||||||
|
nargs="+",
|
||||||
|
help="Series to search (e.g. 'One Piece', 'Naruto')",
|
||||||
|
)
|
||||||
|
series_tagger.add_argument(
|
||||||
|
"--library", type=str, help="Library to use for tagging", default="Manga"
|
||||||
|
)
|
||||||
|
series_tagger.set_defaults(func=grab_series)
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
args.func(args)
|
args.func(args)
|
||||||
|
|
||||||
|
|||||||
@@ -5,10 +5,14 @@ description = "Add your description here"
|
|||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
requires-python = ">=3.13"
|
requires-python = ">=3.13"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"alive-progress>=3.2.0",
|
||||||
|
"anilistapi",
|
||||||
"aria2p>=0.12.1",
|
"aria2p>=0.12.1",
|
||||||
"bencodepy>=0.9.5",
|
"bencodepy>=0.9.5",
|
||||||
"feedparser>=6.0.11",
|
"feedparser>=6.0.11",
|
||||||
|
"httpx-retries>=0.3.2",
|
||||||
"jaro-winkler>=2.0.3",
|
"jaro-winkler>=2.0.3",
|
||||||
|
"komcache",
|
||||||
"komconfig",
|
"komconfig",
|
||||||
"komgapi",
|
"komgapi",
|
||||||
"komsuite-nyaapy",
|
"komsuite-nyaapy",
|
||||||
@@ -16,9 +20,18 @@ dependencies = [
|
|||||||
"loguru>=0.7.3",
|
"loguru>=0.7.3",
|
||||||
"natsort>=8.4.0",
|
"natsort>=8.4.0",
|
||||||
"omegaconf>=2.3.0",
|
"omegaconf>=2.3.0",
|
||||||
|
"qbittorrent-api>=2025.7.0",
|
||||||
|
"regex>=2024.11.6",
|
||||||
]
|
]
|
||||||
|
|
||||||
[tool.uv.sources]
|
[tool.uv.sources]
|
||||||
komgapi = { workspace = true }
|
komgapi = { workspace = true }
|
||||||
komsuite-nyaapy = { workspace = true }
|
komsuite-nyaapy = { workspace = true }
|
||||||
|
komcache = { workspace = true }
|
||||||
komconfig = { workspace = true }
|
komconfig = { workspace = true }
|
||||||
|
anilistapi = { workspace = true }
|
||||||
|
|
||||||
|
[dependency-groups]
|
||||||
|
test = [
|
||||||
|
"click>=8.1.8",
|
||||||
|
]
|
||||||
|
|||||||
@@ -1,9 +1,8 @@
|
|||||||
import time
|
import loguru
|
||||||
|
import regex
|
||||||
|
from komconfig import KomConfig
|
||||||
from komsuite_nyaapy import Nyaa, Torrent
|
from komsuite_nyaapy import Nyaa, Torrent
|
||||||
from natsort import natsorted
|
from natsort import natsorted
|
||||||
import re
|
|
||||||
from komconfig import KomConfig
|
|
||||||
import loguru
|
|
||||||
|
|
||||||
log = loguru.logger
|
log = loguru.logger
|
||||||
log.add("logs/nyaasi.log", rotation="1 week")
|
log.add("logs/nyaasi.log", rotation="1 week")
|
||||||
@@ -27,22 +26,52 @@ class NyaaFeed:
|
|||||||
return natsorted(volumes)
|
return natsorted(volumes)
|
||||||
|
|
||||||
def search(self, title: str) -> list[Torrent]:
|
def search(self, title: str) -> list[Torrent]:
|
||||||
def __search(title: str):
|
if "#" in title:
|
||||||
return Nyaa().search(title, 3, 1)
|
# replace # with whitespace
|
||||||
|
title = title.replace("#", " ")
|
||||||
|
|
||||||
regex = r"\b(v\d{2,3}-\d{2,3}|v\d{2,3}-v\d{2,3}|\d{2}|\d{2}-\d{2}|v\d{2,3})\b"
|
def __search(title: str):
|
||||||
|
result = Nyaa().search(title, category=3, subcategory=1)
|
||||||
|
return result
|
||||||
|
|
||||||
|
matchregex = (
|
||||||
|
r"\b(v\d{2,3}-\d{2,3}|v\d{2,3}-v\d{2,3}|\d{2}|\d{2}-\d{2}|v\d{2,3})\b"
|
||||||
|
)
|
||||||
dataset = None
|
dataset = None
|
||||||
while dataset is None:
|
while dataset is None:
|
||||||
try:
|
try:
|
||||||
|
log.debug("Searching for {}".format(title))
|
||||||
dataset = __search(title)
|
dataset = __search(title)
|
||||||
except:
|
dataset = [
|
||||||
time.sleep(5)
|
entry
|
||||||
|
for entry in dataset
|
||||||
|
if not entry.download_url.startswith("magnet:")
|
||||||
|
]
|
||||||
|
|
||||||
datalist = []
|
log.debug("Found {} results".format(len(dataset)))
|
||||||
|
return dataset if len(dataset) < 5 else dataset[:5]
|
||||||
|
# take first 5 results
|
||||||
|
# if dataset and len(dataset) > 5:
|
||||||
|
# dataset = dataset[:5]
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
log.error("Error: {}".format(e))
|
||||||
|
return []
|
||||||
|
|
||||||
|
datalist: list[Torrent] = []
|
||||||
if dataset is None:
|
if dataset is None:
|
||||||
return datalist
|
return datalist
|
||||||
|
|
||||||
for entry in dataset:
|
for entry in dataset:
|
||||||
if config.komgrabber.get_chapters is False:
|
if config.komgrabber.get_chapters is False:
|
||||||
|
# check if the title has a ch## in it, if so skip
|
||||||
|
chapter_regex = r"\bch\d+\b"
|
||||||
|
match = regex.search(chapter_regex, entry.name.lower())
|
||||||
|
if match:
|
||||||
|
log.info(
|
||||||
|
"Skipping {}, Reason: Chapters disabled".format(entry.name)
|
||||||
|
)
|
||||||
|
continue
|
||||||
if "chapter" in entry.name.lower():
|
if "chapter" in entry.name.lower():
|
||||||
log.info(
|
log.info(
|
||||||
"Skipping {}, Reason: Chapters disabled".format(entry.name)
|
"Skipping {}, Reason: Chapters disabled".format(entry.name)
|
||||||
@@ -54,35 +83,38 @@ class NyaaFeed:
|
|||||||
if any(x in name.lower() for x in skip_parameters):
|
if any(x in name.lower() for x in skip_parameters):
|
||||||
log.info("Skipping {}".format(name))
|
log.info("Skipping {}".format(name))
|
||||||
continue
|
continue
|
||||||
volumes = re.findall(regex, name)
|
volumes = regex.findall(matchregex, name)
|
||||||
try:
|
try:
|
||||||
match = re.match(r"^(.*?)\s(vol\.\s\d{2})|(v\d{2,3})", name.lower())
|
match = regex.match(r"^(.*?)\s(vol\.\s\d{2})|(v\d{2,3})", name.lower())
|
||||||
if match:
|
if match:
|
||||||
name = match.group(1)
|
name = match.group(1)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
# chapter check
|
# chapter check
|
||||||
try:
|
try:
|
||||||
match = re.findall(r"(?<!\d)\d{2,3}(?!\d)", name)
|
match = regex.findall(r"(?<!\d)\d{2,3}(?!\d)", name)
|
||||||
print("Matched chapter: {}".format(match))
|
print("Matched chapter: {}".format(match))
|
||||||
print("Found Chapters only, skipping")
|
print("Found Chapters only, skipping")
|
||||||
continue
|
continue
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
pass
|
pass
|
||||||
volumes = self.list_volumes(volumes) if volumes else [0]
|
# volumes = self.list_volumes(volumes) if volumes else []
|
||||||
download_url = entry.download_url
|
# download_url = entry.download_url
|
||||||
seeders = entry.seeders
|
# seeders = entry.seeders
|
||||||
size = entry.size
|
# size = entry.size
|
||||||
data = Torrent(
|
# data = Torrent(
|
||||||
name=name,
|
# name=name,
|
||||||
download_url=download_url,
|
# download_url=download_url,
|
||||||
seeders=seeders,
|
# seeders=seeders,
|
||||||
size=size,
|
# size=size,
|
||||||
volumes=volumes,
|
# volumes=volumes,
|
||||||
)
|
# filetypes=entry.filetypes,
|
||||||
|
# contents=entry.contents,
|
||||||
|
# )
|
||||||
|
|
||||||
# print(data)
|
if entry.volumes == []:
|
||||||
datalist.append(data)
|
continue
|
||||||
log.debug("Found: {}, volumes: {}".format(data.name, data.volumes))
|
datalist.append(entry)
|
||||||
|
log.debug("Found: {}, volumes: {}".format(entry.name, entry.volumes))
|
||||||
log.success("Found {} entries".format(len(datalist)))
|
log.success("Found {} entries".format(len(datalist)))
|
||||||
|
|
||||||
return datalist
|
return datalist
|
||||||
|
|||||||
@@ -1,35 +1,44 @@
|
|||||||
import json
|
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
from komgapi import komgapi as KOMGAPI_REST
|
|
||||||
from src.schema.series import SeriesMetadata
|
|
||||||
from src.logs.log import Log
|
|
||||||
from komconfig import KomConfig
|
from komconfig import KomConfig
|
||||||
|
from komgapi import komgapi as KOMGAPI_REST
|
||||||
|
|
||||||
|
from src.logs.log import Log
|
||||||
|
from src.schema.series import SeriesMetadata
|
||||||
|
|
||||||
config = KomConfig()
|
config = KomConfig()
|
||||||
|
|
||||||
|
|
||||||
|
class KomgaError(Exception):
|
||||||
|
"""Custom exception for Komga API errors."""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class KomgaAPI(KOMGAPI_REST):
|
class KomgaAPI(KOMGAPI_REST):
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
self.logger = Log("KomgaAPI")
|
self.logger = Log("KomgaAPI")
|
||||||
url = config.komga.url
|
url = config.komga.url
|
||||||
self.auth = config.komga_auth
|
self.auth = config.komga.api_key
|
||||||
|
|
||||||
super().__init__(
|
super().__init__(
|
||||||
url=url,
|
url=url,
|
||||||
username=self.auth[0],
|
api_key=self.auth,
|
||||||
password=self.auth[1],
|
|
||||||
timeout=100,
|
timeout=100,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.connected = self.test_connection(url)
|
self.connected = self.test_connection(url)
|
||||||
if not self.connected:
|
if not self.connected:
|
||||||
print("Komga API not connected")
|
raise KomgaError("Komga API not connected")
|
||||||
exit(1)
|
exit(1)
|
||||||
|
|
||||||
def test_connection(self, url) -> bool:
|
def test_connection(self, url) -> bool:
|
||||||
try:
|
try:
|
||||||
response = requests.get(f"{url}/api/v1/series", auth=self.auth)
|
if isinstance(self.auth, tuple):
|
||||||
|
response = requests.get(f"{url}/api/v1/series", auth=self.auth)
|
||||||
|
else:
|
||||||
|
response = requests.get(
|
||||||
|
f"{url}/api/v1/series", headers={"X-Api-Key": self.auth}
|
||||||
|
)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
@@ -87,14 +96,7 @@ class KomgaAPI(KOMGAPI_REST):
|
|||||||
def getVolumes(
|
def getVolumes(
|
||||||
self,
|
self,
|
||||||
series_id: str,
|
series_id: str,
|
||||||
media_status: list[str] = None,
|
|
||||||
read_status: list[str] = None,
|
|
||||||
tag: list[str] = None,
|
|
||||||
unpaged: bool = True,
|
unpaged: bool = True,
|
||||||
page_integer: int = None,
|
|
||||||
size: int = None,
|
|
||||||
sort: list[str] = None,
|
|
||||||
author: list[str] = None,
|
|
||||||
) -> list[int]:
|
) -> list[int]:
|
||||||
"""Get a list of all volumes matching the given criteria.
|
"""Get a list of all volumes matching the given criteria.
|
||||||
|
|
||||||
@@ -111,20 +113,14 @@ class KomgaAPI(KOMGAPI_REST):
|
|||||||
author (list[str], optional): name,role. Defaults to None.
|
author (list[str], optional): name,role. Defaults to None.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
list[int]: _description_
|
list[int]: The list of volumes for the given series_id
|
||||||
"""
|
"""
|
||||||
|
|
||||||
volumes = []
|
volumes = []
|
||||||
# api_url=f'{self.url}/api/v1/series/{self._get_series_id(title)}/books'
|
# api_url=f'{self.url}/api/v1/series/{self._get_series_id(title)}/books'
|
||||||
result = self.series_controller.getSeriesBooks(
|
result = self.book_controller.listBooks(
|
||||||
series_id,
|
|
||||||
media_status=media_status,
|
|
||||||
read_status=read_status,
|
|
||||||
tag=tag,
|
|
||||||
unpaged=unpaged,
|
unpaged=unpaged,
|
||||||
page=page_integer,
|
query={"condition": {"seriesId": {"operator": "is", "value": series_id}}},
|
||||||
size=size,
|
|
||||||
sort=sort,
|
|
||||||
)
|
)
|
||||||
if result is None:
|
if result is None:
|
||||||
return volumes
|
return volumes
|
||||||
@@ -140,7 +136,14 @@ class KomgaAPI(KOMGAPI_REST):
|
|||||||
m_numba = max(numba)
|
m_numba = max(numba)
|
||||||
vol = m_numba
|
vol = m_numba
|
||||||
volumes.append(int(float(vol)))
|
volumes.append(int(float(vol)))
|
||||||
return volumes
|
if volumes == []:
|
||||||
|
try:
|
||||||
|
series = self.series_controller.getSeries(series_id)
|
||||||
|
volumes = [i for i in range(1, series.booksCount + 1)]
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.log_error(f"Error getting volumes: {e}")
|
||||||
|
return []
|
||||||
|
return sorted(volumes)
|
||||||
|
|
||||||
def getReadCount(self, series_id: str) -> int:
|
def getReadCount(self, series_id: str) -> int:
|
||||||
"""Get the number of read volumes of a series."""
|
"""Get the number of read volumes of a series."""
|
||||||
|
|||||||
@@ -1,23 +1,28 @@
|
|||||||
class ConnectError(Exception):
|
class ConnectError(Exception):
|
||||||
def __init__(self, message):
|
def __init__(self, message):
|
||||||
super().__init__(message)
|
super().__init__(message)
|
||||||
|
|
||||||
|
|
||||||
class LoginError(Exception):
|
class LoginError(Exception):
|
||||||
def __init__(self, message):
|
def __init__(self, message):
|
||||||
super().__init__(message)
|
super().__init__(message)
|
||||||
|
|
||||||
|
|
||||||
class AccessError(Exception):
|
class AccessError(Exception):
|
||||||
def __init__(self, message):
|
def __init__(self, message):
|
||||||
super().__init__(message)
|
super().__init__(message)
|
||||||
|
|
||||||
|
|
||||||
class JSONError(Exception):
|
class JSONError(Exception):
|
||||||
def __init__(self, message):
|
def __init__(self, message):
|
||||||
super().__init__(message)
|
super().__init__(message)
|
||||||
|
|
||||||
|
|
||||||
class ResponseError(Exception):
|
class ResponseError(Exception):
|
||||||
def __init__(self, message):
|
def __init__(self, message):
|
||||||
super().__init__(message)
|
super().__init__(message)
|
||||||
|
|
||||||
|
|
||||||
class ResultError(Exception):
|
class ResultError(Exception):
|
||||||
def __init__(self, message):
|
def __init__(self, message):
|
||||||
super().__init__(message)
|
super().__init__(message)
|
||||||
|
|||||||
551
src/logic/cli.py
551
src/logic/cli.py
@@ -1,551 +0,0 @@
|
|||||||
import os
|
|
||||||
import re
|
|
||||||
import shutil
|
|
||||||
import time
|
|
||||||
import jaro
|
|
||||||
from src.data.komga import KomgaAPI
|
|
||||||
from komgapi import komgapi
|
|
||||||
from komgapi.schemas.Series import Series
|
|
||||||
from src.data.Feeds.nyaasi import NyaaFeed
|
|
||||||
from komsuite_nyaapy import Torrent
|
|
||||||
from src.logic.download import Download
|
|
||||||
from komconfig import KomConfig
|
|
||||||
from komcache import KomCache
|
|
||||||
from src.logic.utils import (
|
|
||||||
detect_chapters,
|
|
||||||
rename,
|
|
||||||
tag_folder,
|
|
||||||
move,
|
|
||||||
rename_folder,
|
|
||||||
remove_empty_folders,
|
|
||||||
time_checker,
|
|
||||||
folder_similarity,
|
|
||||||
calculate_new_volumes,
|
|
||||||
safe_remove_directory,
|
|
||||||
)
|
|
||||||
from src.logic.db_schemas import (
|
|
||||||
KOMGRABBER_TABLE,
|
|
||||||
INSERT_KOMGRABBER,
|
|
||||||
SELECT_KOMGRABBER,
|
|
||||||
UPDATE_KOMGRABBER,
|
|
||||||
LASTCHECKED_KOMGRABBER,
|
|
||||||
GET_LASTCHECKED_KOMGRABBER,
|
|
||||||
)
|
|
||||||
import loguru
|
|
||||||
from pathlib import Path
|
|
||||||
from alive_progress import alive_it
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
config = KomConfig()
|
|
||||||
|
|
||||||
|
|
||||||
log = loguru.logger
|
|
||||||
log.remove()
|
|
||||||
log.add("logs/application.log", level="INFO", rotation="15MB", retention="1 week")
|
|
||||||
log.add("logs/cli.log", rotation="15MB", retention="1 week") # type:ignore
|
|
||||||
# log.add(sys.stdout)
|
|
||||||
Komga = KomgaAPI()
|
|
||||||
|
|
||||||
|
|
||||||
LINE_CLEAR = "\x1b[2K"
|
|
||||||
failed_items: list[str] = []
|
|
||||||
|
|
||||||
|
|
||||||
class mangaCli:
|
|
||||||
def __init__(self, library_id: str = "") -> None:
|
|
||||||
self.dl = Download(config.komgrabber.download_location)
|
|
||||||
if os.path.exists(config.komgrabber.download_location):
|
|
||||||
for file in os.listdir(config.komgrabber.download_location):
|
|
||||||
try:
|
|
||||||
os.remove(f"{config.komgrabber.download_location}/{file}")
|
|
||||||
except:
|
|
||||||
shutil.rmtree(f"{config.komgrabber.download_location}/{file}")
|
|
||||||
else:
|
|
||||||
os.mkdir(config.komgrabber.download_location)
|
|
||||||
self.file = None
|
|
||||||
self.serie = ""
|
|
||||||
self.serie_id = ""
|
|
||||||
self.series_data: Series
|
|
||||||
self.volumes = []
|
|
||||||
self.download_path = config.komgrabber.download_location
|
|
||||||
self.cache = KomCache | None
|
|
||||||
if config.komgrabber.use_cache:
|
|
||||||
self.cache = KomCache()
|
|
||||||
self.cache.create_table(KOMGRABBER_TABLE)
|
|
||||||
# self.allSeries = Komga.getAllSeries()
|
|
||||||
pass
|
|
||||||
|
|
||||||
def download(self, feed_url: str):
|
|
||||||
def __chapter_check(title: str) -> bool:
|
|
||||||
if title.endswith(".cbz") or title.endswith(".cbr"):
|
|
||||||
if not re.search(r"(v\d{1,3}(-\d{1,3})?)|(Vol\. \d{1,3})", title):
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def __epub_check(title: str) -> bool:
|
|
||||||
if title.endswith(".epub"):
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# check if download location is empty, if not, remove everything in it
|
|
||||||
if os.path.exists(self.download_path):
|
|
||||||
# force stop the download
|
|
||||||
if len(self.dl.api.get_downloads()) > 0:
|
|
||||||
self.dl.api.get_downloads()[0].remove(force=True)
|
|
||||||
time.sleep(5)
|
|
||||||
file: str
|
|
||||||
file = self.dl.get_file(feed_url)
|
|
||||||
if __chapter_check(file):
|
|
||||||
# print(f"Skipping {file}, reason: no volume number, likely a chapter")
|
|
||||||
return False
|
|
||||||
if __epub_check(file):
|
|
||||||
# print(f"Skipping {file}, reason: epub file")
|
|
||||||
return False
|
|
||||||
|
|
||||||
self.file = file
|
|
||||||
|
|
||||||
# print(f"Filename: {file}")
|
|
||||||
file_move = False
|
|
||||||
if file.endswith(".cbz") or file.endswith(".cbr"):
|
|
||||||
new_folder = Path(self.download_path, self.serie)
|
|
||||||
os.makedirs(new_folder, exist_ok=True)
|
|
||||||
file_move = True
|
|
||||||
|
|
||||||
state = self.dl.add_torrent(feed_url.split("/")[-1])
|
|
||||||
if state is False:
|
|
||||||
# print("Error adding torrent")
|
|
||||||
return False
|
|
||||||
|
|
||||||
gid = self.dl.api.get_downloads()[0].gid
|
|
||||||
# check if the download is complete using the gid
|
|
||||||
dl_complete = True
|
|
||||||
check_done = False
|
|
||||||
while not self.dl.api.get_downloads(gids=[gid])[0].seeder:
|
|
||||||
# while not self.dl.api.get_downloads()[0].seeder:
|
|
||||||
progress = self.dl.check_progress()
|
|
||||||
progress = "{:.2f}".format(progress)
|
|
||||||
# eta = self.dl.api.get_downloads()[0].eta_string() #
|
|
||||||
# print(end=LINE_CLEAR)
|
|
||||||
# print("Progress: ", progress, "ETA: ", eta, end="\r")
|
|
||||||
# if progress remains the same for 30 seconds, stop the download
|
|
||||||
progress = self.dl.check_progress()
|
|
||||||
|
|
||||||
time.sleep(45)
|
|
||||||
n_progress = self.dl.check_progress()
|
|
||||||
dl_name = self.dl.api.get_downloads()[0].name
|
|
||||||
if not folder_similarity(self.serie.lower(), dl_name.lower()) > 0.8:
|
|
||||||
log.error(
|
|
||||||
f"Folder name {dl_name} does not match {self.serie}, skipping download"
|
|
||||||
)
|
|
||||||
self.dl.api.get_downloads()[0].remove(force=True)
|
|
||||||
dl_complete = False
|
|
||||||
break
|
|
||||||
if not check_done:
|
|
||||||
local_files = os.listdir(f"{self.download_path}")
|
|
||||||
for f in local_files:
|
|
||||||
# print(f)
|
|
||||||
if os.path.isdir(f"{self.download_path}/{f}"):
|
|
||||||
local_files.extend(
|
|
||||||
[
|
|
||||||
f"{self.download_path}/{f}/{file}"
|
|
||||||
for file in os.listdir(f"{self.download_path}/{f}")
|
|
||||||
]
|
|
||||||
)
|
|
||||||
local_files = [
|
|
||||||
file
|
|
||||||
for file in local_files
|
|
||||||
if file.endswith(".cbz") or file.endswith(".cbr")
|
|
||||||
]
|
|
||||||
local_volumes = Komga.getVolumes(self.series_data.id)
|
|
||||||
# if not local_files:
|
|
||||||
# dl_complete=False
|
|
||||||
# break
|
|
||||||
local_files_volumes = []
|
|
||||||
for file in local_files:
|
|
||||||
vol_regex = r"(v\d{1,3}(-\d{1,3})?)|(Vol\. \d{1,3})"
|
|
||||||
# if the file does not match the naming convention, skip it
|
|
||||||
if re.search(vol_regex, file):
|
|
||||||
match = re.search(vol_regex, file)
|
|
||||||
if match:
|
|
||||||
vol = match.group(0).replace("v", "").replace("Vol. ", "")
|
|
||||||
if "-" in vol:
|
|
||||||
local_files_volumes.extend(
|
|
||||||
[int(volume) for volume in vol.split("-")]
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
vol = int(vol)
|
|
||||||
local_files_volumes.append(vol)
|
|
||||||
|
|
||||||
log.info(
|
|
||||||
"Grabbed volumes: {}, Komga volumes: {}".format(
|
|
||||||
sorted(local_files_volumes), local_volumes
|
|
||||||
)
|
|
||||||
)
|
|
||||||
if local_files_volumes == []:
|
|
||||||
pass
|
|
||||||
# check íf any local_file_volumes are not in local_volumes
|
|
||||||
if all([vol in local_volumes for vol in local_files_volumes]):
|
|
||||||
log.info("all volumes downloaded, stopping...")
|
|
||||||
dl_complete = False
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
log.info("not all volumes downloaded, continuing...")
|
|
||||||
check_done = True
|
|
||||||
if progress == n_progress:
|
|
||||||
log.debug(
|
|
||||||
"Progress has not changed for 30 seconds, stopping the download"
|
|
||||||
)
|
|
||||||
self.dl.api.get_downloads()[0].remove(force=True)
|
|
||||||
dl_complete = False
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
pass
|
|
||||||
# stop the download, remove the torrent files
|
|
||||||
try:
|
|
||||||
self.dl.api.get_downloads()[0].remove(force=True)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
self.dl.remove_torrents()
|
|
||||||
# print(end=LINE_CLEAR)
|
|
||||||
# print("Download complete")
|
|
||||||
# self.dl.download(feed_url, file_rename=True)
|
|
||||||
if not dl_complete:
|
|
||||||
return False
|
|
||||||
if dl_complete is True:
|
|
||||||
try:
|
|
||||||
if file_move is True:
|
|
||||||
shutil.move(
|
|
||||||
Path(self.download_path, file),
|
|
||||||
f"{new_folder}/{file}",
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
print(e)
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def process_serie(self, data: Series) -> list[Torrent]:
|
|
||||||
"""Process a single serie based on its title.
|
|
||||||
The process is as follows:
|
|
||||||
1. get all volumes of the serie from komga using the api
|
|
||||||
2. get all feed entries from nyaa.si using the api
|
|
||||||
3. compare the volumes from komga with the volumes from nyaa.si
|
|
||||||
4. if the volumes from nyaa.si are greater than the volumes from komga, add the entry to the download list.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
- data (dict): a dict containing the title of the serie at ["title"] and the id of the serie at ["id"]
|
|
||||||
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
- list[dict]: a list of dictionaries containing the entries to download
|
|
||||||
"""
|
|
||||||
serie = data.name
|
|
||||||
series_id = data.id
|
|
||||||
vols = Komga.getVolumes(series_id=series_id, unpaged=True)
|
|
||||||
feed_titles = NyaaFeed().search(serie)
|
|
||||||
f_d = []
|
|
||||||
if feed_titles == []:
|
|
||||||
failed_items.append(serie)
|
|
||||||
added_max_vols = vols if vols else [0]
|
|
||||||
for entry in feed_titles:
|
|
||||||
if entry.seeders > 0:
|
|
||||||
if (
|
|
||||||
serie.lower() in entry.name.lower()
|
|
||||||
or jaro.jaro_metric(entry.name.lower(), serie.lower()) > 0.7
|
|
||||||
):
|
|
||||||
# get the entry with the most volumes
|
|
||||||
filesizes = entry.filesizes
|
|
||||||
volumes = entry.volumes
|
|
||||||
min_size = len(volumes) * config.komgrabber.manga.min_filesize
|
|
||||||
if filesizes < min_size:
|
|
||||||
log.info(
|
|
||||||
f"Skipping {entry.name}, Reason: Filesize is too small"
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
if max(volumes) > max(added_max_vols):
|
|
||||||
f_d.append(entry) # = entry
|
|
||||||
# added_max_vols = volumes
|
|
||||||
|
|
||||||
else:
|
|
||||||
continue
|
|
||||||
# return entry with the most volumes
|
|
||||||
return f_d
|
|
||||||
|
|
||||||
def media_grabber(self, serie: Series, bar: Any = None) -> bool:
|
|
||||||
result = self.process_serie(serie)
|
|
||||||
|
|
||||||
total_new_volumes: list[tuple[Torrent, list[int]]] = []
|
|
||||||
fs_per_volume = config.komgrabber.manga.min_filesize
|
|
||||||
series_volumes = Komga.getVolumes(series_id=serie.id, unpaged=True)
|
|
||||||
if result is None or result == []:
|
|
||||||
log.info(f"Could not find any new volumes for {serie.name}")
|
|
||||||
return False
|
|
||||||
bar.text(f"Downloading new volumes for {serie.name}...")
|
|
||||||
for res in result:
|
|
||||||
log.info(f"{res.name}, Volumes: {res.volumes}")
|
|
||||||
if res.volumes != [0]:
|
|
||||||
min_size = len(res.volumes) * fs_per_volume
|
|
||||||
if res.filesizes < min_size:
|
|
||||||
log.info(f"Skipping {res.name}, Reason: Filesize is too small")
|
|
||||||
result.remove(res)
|
|
||||||
continue
|
|
||||||
|
|
||||||
for res in result:
|
|
||||||
log.debug("present: {}, new: {}".format(series_volumes, res.volumes))
|
|
||||||
|
|
||||||
new_volumes = calculate_new_volumes(series_volumes, res.volumes)
|
|
||||||
if len(new_volumes) == 0:
|
|
||||||
log.info(f"Skipping {res.name}, Reason: No new Volumes found")
|
|
||||||
continue
|
|
||||||
total_new_volumes.append((res, new_volumes))
|
|
||||||
if len(total_new_volumes) == 0:
|
|
||||||
log.info(f"Could not find any new volumes for {serie.name}")
|
|
||||||
return False
|
|
||||||
total_new_volumes = sorted(
|
|
||||||
total_new_volumes, key=lambda x: len(x[1]), reverse=True
|
|
||||||
)
|
|
||||||
res = total_new_volumes[0][0]
|
|
||||||
|
|
||||||
log.info(f"Found {len(total_new_volumes[0][1])} new entries for {serie.name}")
|
|
||||||
|
|
||||||
# log.info(
|
|
||||||
# f"Found {len(new_volumes)} new {'volume' if len(new_volumes) == 1 else 'volumes'} for {serie.name}"
|
|
||||||
# )
|
|
||||||
# # check if the new volumes were aleady downloaded
|
|
||||||
# log.info(f"current volumes: {series_volumes}, new volumes: {new_volumes}")
|
|
||||||
# # print(result)
|
|
||||||
|
|
||||||
if self.download(res.download_url) is True:
|
|
||||||
log.success(f"Downloaded {res.name}")
|
|
||||||
# self.rename_folder_and_files(self.file, komga_data=serie, remove=True)
|
|
||||||
# self.move_to_komga(serie=entry)
|
|
||||||
log.info("Renaming and tagging files")
|
|
||||||
rename()
|
|
||||||
if not config.komgrabber.get_chapters:
|
|
||||||
detect_chapters()
|
|
||||||
tag_folder()
|
|
||||||
if rename_folder(series=serie):
|
|
||||||
move(self.download_path, config.komga.media_path)
|
|
||||||
else:
|
|
||||||
log.info("Seems like we grabbed the wrong series, oops")
|
|
||||||
failed_items.append(serie.name)
|
|
||||||
# clear folder
|
|
||||||
# remove the download dir and create it anew
|
|
||||||
remove_empty_folders(self.download_path)
|
|
||||||
safe_remove_directory(self.download_path)
|
|
||||||
return True
|
|
||||||
|
|
||||||
def search_for_new_volumes(self):
|
|
||||||
cache_present = False
|
|
||||||
if self.cache:
|
|
||||||
cache_present = True
|
|
||||||
series = Komga.series_controller.getAllSeries(
|
|
||||||
body={
|
|
||||||
"condition": {
|
|
||||||
"anyOf": [
|
|
||||||
{"seriesStatus": {"operator": "is", "value": "ONGOING"}},
|
|
||||||
{"seriesStatus": {"operator": "is", "value": "HIATUS"}},
|
|
||||||
{"seriesStatus": {"operator": "is", "value": "ENDED"}},
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
komga_series: list[Series] = []
|
|
||||||
shutil.rmtree(self.download_path, ignore_errors=True)
|
|
||||||
|
|
||||||
os.mkdir(self.download_path)
|
|
||||||
|
|
||||||
log.info(f"{len(series)} series found")
|
|
||||||
today = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
|
|
||||||
today = time.mktime(time.strptime(today, "%Y-%m-%d %H:%M:%S"))
|
|
||||||
|
|
||||||
def cache_ending(bar):
|
|
||||||
bar.title("Updating cache entries")
|
|
||||||
bar.text("Cache updated, continuing...")
|
|
||||||
|
|
||||||
def series_ending(bar):
|
|
||||||
bar.title("Completed searching for new volumes")
|
|
||||||
bar.text("All series checked, exiting...")
|
|
||||||
|
|
||||||
def skip_ending(bar):
|
|
||||||
bar.title("Skipping series")
|
|
||||||
bar.text("Skipped series, continuing...")
|
|
||||||
|
|
||||||
def ended_ending(bar):
|
|
||||||
bar.title("Skipping finished series")
|
|
||||||
bar.text("Finished check, continuing to search new volumes...")
|
|
||||||
|
|
||||||
if cache_present:
|
|
||||||
log.info("Cache present, checking for missing entries")
|
|
||||||
cacheBar = alive_it(
|
|
||||||
series,
|
|
||||||
finalize=cache_ending, # type:ignore
|
|
||||||
bar="smooth",
|
|
||||||
spinner="dots",
|
|
||||||
receipt_text=True,
|
|
||||||
)
|
|
||||||
for serie in cacheBar:
|
|
||||||
data = self.cache.query(SELECT_KOMGRABBER, (serie.id,))
|
|
||||||
log.debug(
|
|
||||||
f"Cache data: {data}, Serie: {serie.name}, Status: {serie.metadata.status}"
|
|
||||||
)
|
|
||||||
if data:
|
|
||||||
if data[3] == serie.metadata.status:
|
|
||||||
continue
|
|
||||||
elif data and data[3] != serie.metadata.status:
|
|
||||||
self.cache.update(
|
|
||||||
UPDATE_KOMGRABBER,
|
|
||||||
(serie.name, serie.metadata.status, serie.id),
|
|
||||||
)
|
|
||||||
log.info(f"Serie {serie.name} updated")
|
|
||||||
time.sleep(0.05)
|
|
||||||
else:
|
|
||||||
self.cache.insert(
|
|
||||||
INSERT_KOMGRABBER, (serie.name, serie.id, serie.metadata.status)
|
|
||||||
)
|
|
||||||
log.info(f"Serie {serie.name} added to cache")
|
|
||||||
|
|
||||||
log.debug("Cache created, added missing entries")
|
|
||||||
time.sleep(0.5)
|
|
||||||
if cache_present:
|
|
||||||
skipBar = alive_it(
|
|
||||||
series,
|
|
||||||
bar="smooth",
|
|
||||||
spinner="dots",
|
|
||||||
receipt_text=True,
|
|
||||||
finalize=skip_ending,
|
|
||||||
)
|
|
||||||
for serie in skipBar:
|
|
||||||
last_checked = self.cache.query(
|
|
||||||
GET_LASTCHECKED_KOMGRABBER, (serie.id,)
|
|
||||||
)[0]
|
|
||||||
# convert timestamp to epoch float for comparison
|
|
||||||
if last_checked:
|
|
||||||
last_checked = time.mktime(
|
|
||||||
time.strptime(last_checked, "%Y-%m-%d %H:%M:%S")
|
|
||||||
)
|
|
||||||
# if difference between last_checked and today is less than config.komgrabber.cache_check_interval, skip entry
|
|
||||||
time_difference = time_checker(last_checked, today)
|
|
||||||
# if time difference is less than set in the settings and the series status is not ended and the book count is not the same as the total book count, skip the entry
|
|
||||||
if time_difference < config.komgrabber.cache_check_interval:
|
|
||||||
komga_series.append(serie)
|
|
||||||
log.debug(f"Added {serie.name} to the list")
|
|
||||||
|
|
||||||
if (
|
|
||||||
serie.metadata.status == "ENDED"
|
|
||||||
and serie.booksCount == serie.metadata.totalBookCount
|
|
||||||
):
|
|
||||||
log.debug(
|
|
||||||
f"Serie {serie.name} if finished and has all volumes present, skipping..."
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
komga_series.append(serie)
|
|
||||||
time.sleep(0.005)
|
|
||||||
log.debug(len(komga_series))
|
|
||||||
log.info("Finished checking cache, continuing...")
|
|
||||||
log.info("There are {} series to check".format(len(komga_series)))
|
|
||||||
time.sleep(0.05)
|
|
||||||
pBar = alive_it(
|
|
||||||
komga_series,
|
|
||||||
finalize=series_ending,
|
|
||||||
title="Searching for new volumes",
|
|
||||||
)
|
|
||||||
for serie in pBar:
|
|
||||||
pBar.text(f"Searching for new volumes for {serie.name}")
|
|
||||||
log.info(
|
|
||||||
f"searching for new volumes for {serie.name}, currently at {serie.booksCount} volumes"
|
|
||||||
)
|
|
||||||
self.series_data = serie
|
|
||||||
self.serie = serie.name
|
|
||||||
self.serie_id = serie.id
|
|
||||||
self.media_grabber(serie, bar=pBar)
|
|
||||||
if cache_present:
|
|
||||||
self.cache.update(LASTCHECKED_KOMGRABBER, (serie.id,))
|
|
||||||
time.sleep(5)
|
|
||||||
# print("done", serie.name)
|
|
||||||
return self
|
|
||||||
|
|
||||||
def search_for_series(self, series: list[str]):
|
|
||||||
cache_present = False
|
|
||||||
if self.cache:
|
|
||||||
cache_present = True
|
|
||||||
shutil.rmtree(self.download_path, ignore_errors=True)
|
|
||||||
os.mkdir(self.download_path)
|
|
||||||
series_request = []
|
|
||||||
for serie in series:
|
|
||||||
series_request.append(
|
|
||||||
{"title": {"operator": "is", "value": serie}},
|
|
||||||
)
|
|
||||||
request_body = {"condition": {"anyOf": series_request}}
|
|
||||||
series = Komga.series_controller.getAllSeries(body=request_body)
|
|
||||||
|
|
||||||
def series_ending(bar):
|
|
||||||
bar.title("Completed searching for new volumes")
|
|
||||||
bar.text("All series checked, exiting...")
|
|
||||||
|
|
||||||
pBar = alive_it(series, finalize=series_ending)
|
|
||||||
for serie in pBar:
|
|
||||||
pBar.text(f"Searching for new volumes for {serie.name}")
|
|
||||||
log.info(
|
|
||||||
f"searching for new volumes for {serie.name}, currently at {serie.booksCount} volumes"
|
|
||||||
)
|
|
||||||
self.series_data = serie
|
|
||||||
self.serie = serie.name
|
|
||||||
self.serie_id = serie.id
|
|
||||||
|
|
||||||
self.media_grabber(serie)
|
|
||||||
if cache_present:
|
|
||||||
self.cache.update(LASTCHECKED_KOMGRABBER, (serie.id,))
|
|
||||||
|
|
||||||
time.sleep(5)
|
|
||||||
# print("done", serie.name)
|
|
||||||
return self
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class ebookCli:
|
|
||||||
def __init__(self) -> None:
|
|
||||||
self.dl = Download(config.komgrabber.download_location)
|
|
||||||
self.file = None
|
|
||||||
self.serie = ""
|
|
||||||
self.serie_id = ""
|
|
||||||
self.series_data: Series
|
|
||||||
self.volumes = []
|
|
||||||
self.download_path = config.komgrabber.download_location
|
|
||||||
# self.allSeries = Komga.getAllSeries()
|
|
||||||
pass
|
|
||||||
|
|
||||||
def search_for_new_volumes(self):
|
|
||||||
folder = config.komgrabber.ebook.data_directory
|
|
||||||
series = os.listdir(folder)
|
|
||||||
|
|
||||||
|
|
||||||
def avail_check():
|
|
||||||
komga_avail = True
|
|
||||||
return (True, komga_avail)
|
|
||||||
|
|
||||||
|
|
||||||
def search_all():
|
|
||||||
mangaCli().search_for_new_volumes()
|
|
||||||
komga = komgapi(config.komga.user, config.komga.password, config.komga.url)
|
|
||||||
libraries = komga.library_controller.getLibraries()
|
|
||||||
for library in libraries:
|
|
||||||
komga.library_controller.scanLibrary(library.id)
|
|
||||||
print(f"Initialized scan for library {library.name}")
|
|
||||||
# update_state()
|
|
||||||
print("Failed series:\n", failed_items)
|
|
||||||
|
|
||||||
|
|
||||||
def search_series(series: list[str]):
|
|
||||||
mangaCli().search_for_series(series)
|
|
||||||
# update_state()
|
|
||||||
print("Failed series:\n", failed_items)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
search_all()
|
|
||||||
@@ -6,27 +6,23 @@ CREATE TABLE IF NOT EXISTS komgrabber (
|
|||||||
status TEXT NOT NULL,
|
status TEXT NOT NULL,
|
||||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
last_checked TIMESTAMP DEFAULT 0
|
last_checked TIMESTAMP DEFAULT 0,
|
||||||
);
|
complete INTEGER DEFAULT 0
|
||||||
"""
|
);"""
|
||||||
|
|
||||||
INSERT_KOMGRABBER = """
|
INSERT_KOMGRABBER = """
|
||||||
INSERT INTO komgrabber (name, series_id, status)
|
INSERT INTO komgrabber (name, series_id, status)
|
||||||
VALUES (?, ?, ?);
|
VALUES (:name, :series_id, :status);
|
||||||
"""
|
"""
|
||||||
SELECT_KOMGRABBER = """
|
SELECT_KOMGRABBER = """
|
||||||
SELECT * FROM komgrabber WHERE series_id = ?;
|
SELECT * FROM komgrabber WHERE series_id = :series_id;"""
|
||||||
"""
|
|
||||||
UPDATE_KOMGRABBER = """
|
UPDATE_KOMGRABBER = """
|
||||||
UPDATE komgrabber
|
UPDATE komgrabber
|
||||||
SET name = ?, status = ?, updated_at = CURRENT_TIMESTAMP
|
SET name = :name, status = :status, updated_at = CURRENT_TIMESTAMP
|
||||||
WHERE series_id = ?;
|
WHERE series_id = :series_id;"""
|
||||||
"""
|
|
||||||
LASTCHECKED_KOMGRABBER = """
|
LASTCHECKED_KOMGRABBER = """
|
||||||
UPDATE komgrabber
|
UPDATE komgrabber
|
||||||
SET last_checked = CURRENT_TIMESTAMP
|
SET last_checked = CURRENT_TIMESTAMP
|
||||||
WHERE series_id = ?;
|
WHERE series_id = :series_id;"""
|
||||||
"""
|
|
||||||
GET_LASTCHECKED_KOMGRABBER = """
|
GET_LASTCHECKED_KOMGRABBER = """
|
||||||
SELECT last_checked FROM komgrabber WHERE series_id = ?;
|
SELECT last_checked FROM komgrabber WHERE series_id = :series_id;"""
|
||||||
"""
|
|
||||||
|
|||||||
@@ -1,36 +1,42 @@
|
|||||||
import sys
|
|
||||||
import os
|
import os
|
||||||
import time
|
|
||||||
import bencodepy
|
|
||||||
from .utils import rename
|
|
||||||
from aria2p import Client, API
|
|
||||||
from pathlib import Path
|
|
||||||
import loguru
|
|
||||||
import sys
|
import sys
|
||||||
|
import time
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import bencodepy
|
||||||
|
import loguru
|
||||||
|
import qbittorrentapi
|
||||||
|
from aria2p import API, Client
|
||||||
|
from komconfig import KomConfig
|
||||||
|
|
||||||
|
from .utils import rename
|
||||||
|
|
||||||
log = loguru.logger
|
log = loguru.logger
|
||||||
log.remove()
|
log.remove()
|
||||||
log.add("application.log", rotation="1 week", retention="1 month")
|
log.add("application.log", rotation="1 week", retention="1 month")
|
||||||
# log.add(sys.stdout, level='INFO')
|
# log.add(sys.stdout, level='INFO')
|
||||||
|
config = KomConfig()
|
||||||
|
|
||||||
|
|
||||||
class Download:
|
class Download:
|
||||||
"""Download a file from a url and start the download using aria2"""
|
"""Download a file from a url and start the download using aria2"""
|
||||||
|
|
||||||
def __init__(self, download_location: Path) -> None:
|
def __init__(
|
||||||
|
self, download_location: Path = config.komgrabber.download_location
|
||||||
|
) -> None:
|
||||||
# self.download_location needs to be a string
|
# self.download_location needs to be a string
|
||||||
self.download_location = download_location.__str__()
|
self.download_location = download_location.__str__()
|
||||||
self.filename = None
|
self.filename = None
|
||||||
self.torrent_file = None
|
self.torrent_file = None
|
||||||
self.progress = 0
|
self.progress = 0
|
||||||
self.canceled = False
|
self.canceled = False
|
||||||
self.aria2_running = self.check_aria2()
|
self.aria2_running = self.check_online()
|
||||||
self.api = API(
|
self.api = API(
|
||||||
client=Client(
|
client=Client(
|
||||||
host="http://localhost",
|
host=config.komgrabber.downloader_settings.host,
|
||||||
port=6800,
|
port=config.komgrabber.downloader_settings.port,
|
||||||
secret="",
|
secret=config.komgrabber.downloader_settings.secret,
|
||||||
timeout=60,
|
timeout=config.komgrabber.downloader_settings.timeout,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
self.api.set_global_options({"dir": self.download_location})
|
self.api.set_global_options({"dir": self.download_location})
|
||||||
@@ -38,7 +44,7 @@ class Download:
|
|||||||
log.error("Aria2 is not running")
|
log.error("Aria2 is not running")
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
def check_aria2(self):
|
def check_online(self):
|
||||||
# check if aria2 is running
|
# check if aria2 is running
|
||||||
if os.system("ps -A | grep aria2c > /dev/null 2>&1") == 0:
|
if os.system("ps -A | grep aria2c > /dev/null 2>&1") == 0:
|
||||||
return True
|
return True
|
||||||
@@ -59,10 +65,6 @@ class Download:
|
|||||||
# use wget to download the file to the download location
|
# use wget to download the file to the download location
|
||||||
name = url.split("/")[-1]
|
name = url.split("/")[-1]
|
||||||
dl_url = self.download_location
|
dl_url = self.download_location
|
||||||
# while self.get_filename(dl_url) is None:
|
|
||||||
# if not os.path.exists(dl_url):
|
|
||||||
|
|
||||||
# call os.system(f"wget -P {dl_url} {url}"), but suppress output
|
|
||||||
os.system(f"wget -P {dl_url} {url} > /dev/null 2>&1")
|
os.system(f"wget -P {dl_url} {url} > /dev/null 2>&1")
|
||||||
while not os.path.exists(dl_url):
|
while not os.path.exists(dl_url):
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
@@ -111,3 +113,33 @@ class Download:
|
|||||||
return torrent[b"info"][b"name"].decode("utf-8")
|
return torrent[b"info"][b"name"].decode("utf-8")
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class QBitDownload:
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self.category = config.komgrabber.downloader_settings.category
|
||||||
|
self.client = qbittorrentapi.Client(
|
||||||
|
host=config.komgrabber.downloader_settings.host,
|
||||||
|
port=config.komgrabber.downloader_settings.port,
|
||||||
|
username=config.komgrabber.downloader_settings.username,
|
||||||
|
password=config.komgrabber.downloader_settings.password,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not self.check_online():
|
||||||
|
raise Exception("qBittorrent is not running or login failed")
|
||||||
|
|
||||||
|
def check_online(self):
|
||||||
|
try:
|
||||||
|
self.client.auth_log_in()
|
||||||
|
return True
|
||||||
|
except qbittorrentapi.LoginFailed as e:
|
||||||
|
log.error(f"Login failed: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def add_torrent(self, url):
|
||||||
|
try:
|
||||||
|
self.client.torrents_add(urls=url, category=self.category)
|
||||||
|
log.info("Torrent added")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error adding torrent: {e}")
|
||||||
|
return False
|
||||||
|
|||||||
747
src/logic/search.py
Normal file
747
src/logic/search.py
Normal file
@@ -0,0 +1,747 @@
|
|||||||
|
import os
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
import time
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
import jaro
|
||||||
|
import loguru
|
||||||
|
from alive_progress import alive_it
|
||||||
|
from anilistapi import AnilistAPI
|
||||||
|
from komcache import KomCache
|
||||||
|
from komconfig import KomConfig
|
||||||
|
from komconfig.config import Library
|
||||||
|
from komgapi.schemas.Series import Series
|
||||||
|
from komsuite_nyaapy import Torrent
|
||||||
|
|
||||||
|
from src.data.Feeds.nyaasi import NyaaFeed
|
||||||
|
from src.data.komga import KomgaAPI
|
||||||
|
from src.logic.db_schemas import (
|
||||||
|
GET_LASTCHECKED_KOMGRABBER,
|
||||||
|
INSERT_KOMGRABBER,
|
||||||
|
LASTCHECKED_KOMGRABBER,
|
||||||
|
SELECT_KOMGRABBER,
|
||||||
|
UPDATE_KOMGRABBER,
|
||||||
|
)
|
||||||
|
from src.logic.download import Download, QBitDownload
|
||||||
|
from src.logic.utils import (
|
||||||
|
calculate_new_volumes,
|
||||||
|
detect_chapters,
|
||||||
|
folder_similarity,
|
||||||
|
move,
|
||||||
|
process_manga,
|
||||||
|
process_novel,
|
||||||
|
remove_empty_folders,
|
||||||
|
rename,
|
||||||
|
safe_remove_directory,
|
||||||
|
tag_folder,
|
||||||
|
time_checker,
|
||||||
|
)
|
||||||
|
|
||||||
|
config = KomConfig()
|
||||||
|
|
||||||
|
|
||||||
|
log = loguru.logger
|
||||||
|
log.remove()
|
||||||
|
log.add("logs/application.log", level="INFO", rotation="3MB", retention="1 week")
|
||||||
|
log.add("logs/cli.log", rotation="3MB", retention="1 week") # type:ignore
|
||||||
|
# log.add(sys.stdout)
|
||||||
|
Komga = KomgaAPI()
|
||||||
|
|
||||||
|
|
||||||
|
LINE_CLEAR = "\x1b[2K"
|
||||||
|
failed_items: list[str] = []
|
||||||
|
incomplete: list[str] = []
|
||||||
|
|
||||||
|
|
||||||
|
class mangaCli:
|
||||||
|
def __init__(self, library: Library) -> None:
|
||||||
|
if config.komgrabber.downloader == "qbittorrent":
|
||||||
|
self.dl = QBitDownload()
|
||||||
|
else:
|
||||||
|
self.dl = Download()
|
||||||
|
if os.path.exists(config.komgrabber.download_location):
|
||||||
|
for file in os.listdir(config.komgrabber.download_location):
|
||||||
|
try:
|
||||||
|
os.remove(f"{config.komgrabber.download_location}/{file}")
|
||||||
|
except:
|
||||||
|
shutil.rmtree(f"{config.komgrabber.download_location}/{file}")
|
||||||
|
else:
|
||||||
|
os.mkdir(config.komgrabber.download_location)
|
||||||
|
self.file = None
|
||||||
|
self.serie = ""
|
||||||
|
self.serie_id = ""
|
||||||
|
self.series_data: Series
|
||||||
|
self.volumes = []
|
||||||
|
self.library = library
|
||||||
|
self.download_path = config.komgrabber.download_location
|
||||||
|
self.cache = KomCache()
|
||||||
|
|
||||||
|
self.downloaded_files: list[tuple[str, str, str]] = []
|
||||||
|
log.success("Initialized mangaCli")
|
||||||
|
log.success("Starting search for {} library".format(self.library.name))
|
||||||
|
log.debug(f"Library ID: {self.library.id}, Library Type: {self.library.type}")
|
||||||
|
log.info("Ready to process downloads...")
|
||||||
|
|
||||||
|
def _chapter_check(self, title: str) -> bool:
|
||||||
|
if title.endswith(".cbz") or title.endswith(".cbr"):
|
||||||
|
if not re.search(r"(v\d{1,3}(-\d{1,3})?)|(Vol\. \d{1,3})", title):
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _epub_check(self, title: str) -> bool:
|
||||||
|
if title.endswith(".epub"):
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def aria2_download(self, feed_url: str):
|
||||||
|
# check if download location is empty, if not, remove everything in it
|
||||||
|
if os.path.exists(self.download_path):
|
||||||
|
# force stop the download
|
||||||
|
if len(self.dl.api.get_downloads()) > 0:
|
||||||
|
self.dl.api.get_downloads()[0].remove(force=True)
|
||||||
|
time.sleep(5)
|
||||||
|
file: str = ""
|
||||||
|
file = self.dl.get_file(feed_url)
|
||||||
|
if self._chapter_check(file):
|
||||||
|
# print(f"Skipping {file}, reason: no volume number, likely a chapter")
|
||||||
|
return False
|
||||||
|
if self._epub_check(file) and self.library.type == "MANGA":
|
||||||
|
log.error("Skipping epub file, library type is MANGA")
|
||||||
|
return False
|
||||||
|
|
||||||
|
self.file = file
|
||||||
|
|
||||||
|
# print(f"Filename: {file}")
|
||||||
|
file_move = False
|
||||||
|
new_folder = None
|
||||||
|
if file.endswith(".cbz") or file.endswith(".cbr") or file.endswith(".epub"):
|
||||||
|
new_folder = Path(self.download_path, self.serie)
|
||||||
|
os.makedirs(new_folder, exist_ok=True)
|
||||||
|
file_move = True
|
||||||
|
|
||||||
|
state = self.dl.add_torrent(feed_url.split("/")[-1])
|
||||||
|
if state is False:
|
||||||
|
# print("Error adding torrent")
|
||||||
|
log.error("Error adding torrent")
|
||||||
|
return False
|
||||||
|
|
||||||
|
gid = self.dl.api.get_downloads()[0].gid
|
||||||
|
# check if the download is complete using the gid
|
||||||
|
dl_complete = True
|
||||||
|
check_done = False
|
||||||
|
while not self.dl.api.get_downloads(gids=[gid])[0].seeder:
|
||||||
|
# while not self.dl.api.get_downloads()[0].seeder:
|
||||||
|
progress = self.dl.check_progress()
|
||||||
|
progress = "{:.2f}".format(progress)
|
||||||
|
# eta = self.dl.api.get_downloads()[0].eta_string() #
|
||||||
|
# print(end=LINE_CLEAR)
|
||||||
|
# print("Progress: ", progress, "ETA: ", eta, end="\r")
|
||||||
|
# if progress remains the same for 30 seconds, stop the download
|
||||||
|
progress = self.dl.check_progress()
|
||||||
|
|
||||||
|
time.sleep(config.komgrabber.check_interval)
|
||||||
|
n_progress = self.dl.check_progress()
|
||||||
|
try:
|
||||||
|
dl_name = self.dl.api.get_downloads()[0].name
|
||||||
|
except IndexError:
|
||||||
|
log.error("No downloads found, skipping...")
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not (
|
||||||
|
folder_similarity(self.serie.lower(), dl_name.lower()) > 0.8
|
||||||
|
) or not (
|
||||||
|
folder_similarity(self.series_data.name.lower(), dl_name.lower()) > 0.8
|
||||||
|
):
|
||||||
|
log.error(
|
||||||
|
f"Folder name {dl_name} does not match {self.serie}, skipping download"
|
||||||
|
)
|
||||||
|
self.dl.api.get_downloads()[0].remove(force=True)
|
||||||
|
dl_complete = False
|
||||||
|
break
|
||||||
|
|
||||||
|
if progress == n_progress:
|
||||||
|
log.debug(
|
||||||
|
"Progress has not changed for 30 seconds, stopping the download"
|
||||||
|
)
|
||||||
|
self.dl.api.get_downloads()[0].remove(force=True)
|
||||||
|
dl_complete = False
|
||||||
|
incomplete.append(dl_name)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
pass
|
||||||
|
# stop the download, remove the torrent files
|
||||||
|
try:
|
||||||
|
self.dl.api.get_downloads()[0].remove(force=True)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
self.dl.remove_torrents()
|
||||||
|
# print(end=LINE_CLEAR)
|
||||||
|
# print("Download complete")
|
||||||
|
# self.dl.download(feed_url, file_rename=True)
|
||||||
|
if not dl_complete:
|
||||||
|
return False
|
||||||
|
if dl_complete is True:
|
||||||
|
try:
|
||||||
|
if file_move is True:
|
||||||
|
shutil.move(
|
||||||
|
Path(self.download_path, file),
|
||||||
|
f"{new_folder}/{file}",
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def qbit_download(self, feed_url: str):
|
||||||
|
if (
|
||||||
|
self.dl.client.torrents_add(urls=feed_url, savepath=self.download_path)
|
||||||
|
== "Ok."
|
||||||
|
):
|
||||||
|
log.info("Torrent added to qBittorrent")
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
log.error("Failed to add torrent to qBittorrent")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def process_serie(self, data: Series) -> list[Torrent]:
|
||||||
|
"""Process a single serie based on its title.
|
||||||
|
The process is as follows:
|
||||||
|
1. get all volumes of the serie from komga using the api
|
||||||
|
2. get all feed entries from nyaa.si using the api
|
||||||
|
3. compare the volumes from komga with the volumes from nyaa.si
|
||||||
|
4. if the volumes from nyaa.si are greater than the volumes from komga, add the entry to the download list.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
#! TODO: rewrite this docstring
|
||||||
|
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
- list[dict]: a list of dictionaries containing the entries to download
|
||||||
|
"""
|
||||||
|
log.debug(f"Searching serie: {data.name}")
|
||||||
|
serie = data.metadata.title if data.metadata else data.name
|
||||||
|
|
||||||
|
series_id = data.id
|
||||||
|
|
||||||
|
vols = (
|
||||||
|
Komga.getVolumes(series_id=series_id, unpaged=True)
|
||||||
|
if series_id is not None
|
||||||
|
else []
|
||||||
|
)
|
||||||
|
feed_titles = NyaaFeed().search(serie)
|
||||||
|
f_d = []
|
||||||
|
if feed_titles == []:
|
||||||
|
failed_items.append(serie)
|
||||||
|
log.info(f"No feed entries found for {serie}")
|
||||||
|
return f_d
|
||||||
|
added_max_vols = vols if vols else [0]
|
||||||
|
for entry in feed_titles:
|
||||||
|
valid_file_extensions = self.library.valid_extensions
|
||||||
|
min_size = (
|
||||||
|
config.komgrabber.manga.min_filesize
|
||||||
|
if self.library.type == "MANGA"
|
||||||
|
else config.komgrabber.ebook.min_filesize
|
||||||
|
)
|
||||||
|
if not any(
|
||||||
|
filetype in entry.filetypes for filetype in valid_file_extensions
|
||||||
|
):
|
||||||
|
log.debug(
|
||||||
|
f"Skipping {entry.name}, Reason: Filetype not in valid filetypes, wanted: {valid_file_extensions}, found: {entry.filetypes}"
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
if entry.seeders > 0:
|
||||||
|
if (
|
||||||
|
serie.lower() in entry.name.lower()
|
||||||
|
or jaro.jaro_metric(entry.name.lower(), serie.lower()) > 0.7
|
||||||
|
):
|
||||||
|
# check if entry name is way longer than the serie name, if so, skip it
|
||||||
|
# if len(entry.name) > len(serie) + 60:
|
||||||
|
# log.info(
|
||||||
|
# f"Skipping {entry.name}, Reason: Title too long compared to series name"
|
||||||
|
# )
|
||||||
|
# continue
|
||||||
|
# get the entry with the most volumes
|
||||||
|
min_size = len(entry.volumes) * min_size
|
||||||
|
if entry.filesizes < min_size:
|
||||||
|
log.info(
|
||||||
|
f"Skipping {entry.name}, Reason: Filesize is too small"
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
volumes = entry.volumes if entry.volumes != [] else [0]
|
||||||
|
if max(volumes) > max(added_max_vols):
|
||||||
|
f_d.append(entry) # = entry
|
||||||
|
# added_max_vols = volumes
|
||||||
|
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
# return entry with the most volumes
|
||||||
|
return f_d
|
||||||
|
|
||||||
|
def media_grabber(self, serie: Series, bar: Optional[Any] = None) -> bool:
|
||||||
|
log.debug(f"Processing serie: {serie.name}")
|
||||||
|
result = self.process_serie(serie)
|
||||||
|
|
||||||
|
total_new_volumes: list[tuple[Torrent, list[int]]] = []
|
||||||
|
series_volumes = (
|
||||||
|
Komga.getVolumes(series_id=serie.id, unpaged=True)
|
||||||
|
if serie.id is not None
|
||||||
|
else [0]
|
||||||
|
)
|
||||||
|
max_new_volume: int = 0
|
||||||
|
name = serie.metadata.title if serie.metadata else serie.name
|
||||||
|
if result is None or result == []:
|
||||||
|
log.info(f"Could not find any new volumes for {name}")
|
||||||
|
return False
|
||||||
|
if bar:
|
||||||
|
bar.text(f"Downloading new volumes for {name}...")
|
||||||
|
for res in result:
|
||||||
|
log.info(f"{res.name}, Volumes: {res.volumes}")
|
||||||
|
|
||||||
|
for res in result:
|
||||||
|
log.debug("present: {}, new: {}".format(series_volumes, res.volumes))
|
||||||
|
|
||||||
|
new_volumes = calculate_new_volumes(series_volumes, res.volumes)
|
||||||
|
if len(new_volumes) == 0:
|
||||||
|
log.info(f"Skipping {res.name}, Reason: No new Volumes found")
|
||||||
|
continue
|
||||||
|
total_new_volumes.append((res, new_volumes))
|
||||||
|
if len(total_new_volumes) == 0:
|
||||||
|
log.info(f"Could not find any new volumes for {serie.name}")
|
||||||
|
return False
|
||||||
|
total_new_volumes = sorted(
|
||||||
|
total_new_volumes, key=lambda x: len(x[1]), reverse=True
|
||||||
|
)
|
||||||
|
|
||||||
|
log.info(f"Found {len(total_new_volumes)} new results for {serie.name}")
|
||||||
|
for res, new_volumes in total_new_volumes:
|
||||||
|
if (
|
||||||
|
max(new_volumes) > max(series_volumes)
|
||||||
|
and max(new_volumes) > max_new_volume
|
||||||
|
):
|
||||||
|
max_new_volume = max(new_volumes)
|
||||||
|
log.success(
|
||||||
|
"Found new volumes: {} for series: {}, downloading".format(
|
||||||
|
new_volumes, serie.name
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if config.komgrabber.downloader == "aria2":
|
||||||
|
if self.aria2_download(res.download_url) is True:
|
||||||
|
log.success(f"Downloaded {res.name}")
|
||||||
|
# self.rename_folder_and_files(self.file, komga_data=serie, remove=True)
|
||||||
|
# self.move_to_komga(serie=entry)
|
||||||
|
log.info("Renaming and tagging files")
|
||||||
|
if self.library.type == "MANGA":
|
||||||
|
process_manga(
|
||||||
|
download_path=self.download_path,
|
||||||
|
library=self.library,
|
||||||
|
serie=serie,
|
||||||
|
)
|
||||||
|
elif self.library.type == "NOVEL":
|
||||||
|
process_novel(
|
||||||
|
download_path=self.download_path,
|
||||||
|
library=self.library,
|
||||||
|
serie=serie,
|
||||||
|
copy=config.komgrabber.copy,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
log.info("Seems like we grabbed the wrong series, oops")
|
||||||
|
failed_items.append(name)
|
||||||
|
# clear folder
|
||||||
|
# remove the download dir and create it anew
|
||||||
|
remove_empty_folders(self.download_path)
|
||||||
|
safe_remove_directory(self.download_path)
|
||||||
|
else: # use qbit to handle downloads, then move them to the tag location
|
||||||
|
if self.qbit_download(res.download_url):
|
||||||
|
self.downloaded_files.append(name, res.name, "downloading")
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def search_for_new_volumes(self, all: bool = False) -> "mangaCli":
|
||||||
|
query = {
|
||||||
|
"condition": {
|
||||||
|
"allOf": [
|
||||||
|
{
|
||||||
|
"anyOf": [
|
||||||
|
{"seriesStatus": {"operator": "is", "value": "ONGOING"}},
|
||||||
|
{"seriesStatus": {"operator": "is", "value": "HIATUS"}},
|
||||||
|
{
|
||||||
|
"allOf": [
|
||||||
|
{
|
||||||
|
"seriesStatus": {
|
||||||
|
"operator": "is",
|
||||||
|
"value": "ENDED",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{"complete": {"operator": "isFalse"}}, # ,
|
||||||
|
# {"deleted": {"operator": "isFalse"}},
|
||||||
|
]
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if self.library.id != "":
|
||||||
|
query["condition"]["allOf"].append(
|
||||||
|
{
|
||||||
|
"libraryId": {
|
||||||
|
"operator": "is",
|
||||||
|
"value": self.library.id,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
series = Komga.series_controller.getAllSeries(body=query)
|
||||||
|
|
||||||
|
komga_series: list[Series] = []
|
||||||
|
shutil.rmtree(self.download_path, ignore_errors=True)
|
||||||
|
os.mkdir(self.download_path)
|
||||||
|
log.debug(f"Series: {len(series)}")
|
||||||
|
today = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
|
||||||
|
today = time.mktime(time.strptime(today, "%Y-%m-%d %H:%M:%S"))
|
||||||
|
|
||||||
|
def cache_ending(bar): # type:ignore
|
||||||
|
bar.title("Updating cache entries") # type:ignore
|
||||||
|
bar.text("Cache updated, continuing...") # type:ignore
|
||||||
|
|
||||||
|
def series_ending(bar): # type:ignore
|
||||||
|
bar.title("Completed searching for new volumes") # type:ignore
|
||||||
|
if config.komgrabber.downloader == "qbittorrent":
|
||||||
|
bar.text("All series checked, waiting for downloads to finish...") # type:ignore
|
||||||
|
return
|
||||||
|
bar.text("All series checked, exiting...") # type:ignore
|
||||||
|
|
||||||
|
def skip_ending(bar): # type:ignore
|
||||||
|
bar.title("Skipping series") # type:ignore
|
||||||
|
bar.text("Skipped series, continuing...") # type:ignore
|
||||||
|
|
||||||
|
if config.komgrabber.use_cache:
|
||||||
|
log.info("Cache present, checking for missing entries")
|
||||||
|
cacheBar = alive_it(
|
||||||
|
series,
|
||||||
|
finalize=cache_ending, # type:ignore
|
||||||
|
bar="smooth",
|
||||||
|
spinner="dots",
|
||||||
|
receipt_text=True,
|
||||||
|
)
|
||||||
|
for serie in cacheBar:
|
||||||
|
data = self.cache.fetch_one(SELECT_KOMGRABBER, {"series_id": serie.id})
|
||||||
|
log.debug(
|
||||||
|
f"Cache data: {data}, Serie: {serie.metadata.title}, Status: {serie.metadata.status}"
|
||||||
|
)
|
||||||
|
if data:
|
||||||
|
if data[3] == serie.metadata.status:
|
||||||
|
continue
|
||||||
|
elif data and data[3] != serie.metadata.status:
|
||||||
|
self.cache.update(
|
||||||
|
UPDATE_KOMGRABBER,
|
||||||
|
{
|
||||||
|
"name": serie.metadata.title,
|
||||||
|
"status": serie.metadata.status,
|
||||||
|
"series_id": serie.id,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
log.info(f"Serie {serie.name} updated")
|
||||||
|
time.sleep(0.05)
|
||||||
|
else:
|
||||||
|
self.cache.insert(
|
||||||
|
INSERT_KOMGRABBER,
|
||||||
|
{
|
||||||
|
"name": serie.name,
|
||||||
|
"series_id": serie.id,
|
||||||
|
"status": serie.metadata.status,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
log.info(f"Serie {serie.metadata.title} added to cache")
|
||||||
|
|
||||||
|
log.debug("Cache created, added missing entries")
|
||||||
|
time.sleep(0.5)
|
||||||
|
if all is True:
|
||||||
|
log.info("Searching for all series in the database")
|
||||||
|
komga_series = series
|
||||||
|
else:
|
||||||
|
if config.komgrabber.use_cache:
|
||||||
|
skipBar = alive_it(
|
||||||
|
series,
|
||||||
|
bar="smooth",
|
||||||
|
spinner="dots",
|
||||||
|
receipt_text=True,
|
||||||
|
finalize=skip_ending,
|
||||||
|
)
|
||||||
|
for serie in skipBar:
|
||||||
|
last_checked = self.cache.fetch_one(
|
||||||
|
GET_LASTCHECKED_KOMGRABBER, args={"series_id": serie.id}
|
||||||
|
)[0]
|
||||||
|
log.debug(
|
||||||
|
f"Last checked: {last_checked}, Serie: {serie.name}, Status: {serie.metadata.status}"
|
||||||
|
)
|
||||||
|
# convert timestamp to epoch float for comparison
|
||||||
|
|
||||||
|
if last_checked == 0 or last_checked is None:
|
||||||
|
last_checked = "2024-01-01 00:00:00"
|
||||||
|
last_checked = time.mktime(
|
||||||
|
time.strptime(str(last_checked), "%Y-%m-%d %H:%M:%S")
|
||||||
|
)
|
||||||
|
# if difference between last_checked and today is less than config.komgrabber.cache_check_interval, skip entry
|
||||||
|
time_difference = time_checker(last_checked, today)
|
||||||
|
# check if the series id exists in komga
|
||||||
|
# if time difference is less than set in the settings and the series status is not ended and the book count is not the same as the total book count, skip the entry
|
||||||
|
if time_difference > config.komgrabber.cache_check_interval:
|
||||||
|
komga_series.append(serie)
|
||||||
|
log.debug(
|
||||||
|
f"Added {serie.name} to the checking list, as the last check was more than {time_difference} days ago"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
log.debug(
|
||||||
|
f"Skipped {serie.name} as the last check was {time_difference} days ago, but the interval is {config.komgrabber.cache_check_interval} days"
|
||||||
|
)
|
||||||
|
|
||||||
|
time.sleep(0.005)
|
||||||
|
log.debug(len(komga_series))
|
||||||
|
# order komga_series by series.name
|
||||||
|
komga_series = sorted(komga_series, key=lambda x: x.name)
|
||||||
|
log.info("Finished checking cache, continuing...")
|
||||||
|
log.info("There are {} series to check".format(len(komga_series)))
|
||||||
|
time.sleep(0.05)
|
||||||
|
pBar = alive_it(
|
||||||
|
komga_series,
|
||||||
|
finalize=series_ending,
|
||||||
|
title="Searching for new volumes",
|
||||||
|
)
|
||||||
|
for serie in pBar:
|
||||||
|
pBar.text(f"Searching for new volumes for {serie.metadata.title}")
|
||||||
|
log.info(
|
||||||
|
f"searching for new volumes for {serie.metadata.title}, currently at {serie.booksCount} volumes"
|
||||||
|
)
|
||||||
|
self.series_data = serie
|
||||||
|
|
||||||
|
self.serie = (
|
||||||
|
serie.metadata.title
|
||||||
|
) # replaced serie.name with serie.metadata.title
|
||||||
|
self.serie_id = serie.id
|
||||||
|
in_komga = Komga.series_controller.getSeries(serie.id)
|
||||||
|
if in_komga.__class__.__name__ == "Error" or in_komga.deleted:
|
||||||
|
log.info("Series has been deleted on server, deleting in database")
|
||||||
|
self.cache.query(
|
||||||
|
"DELETE FROM komgrabber WHERE series_id=:series_id",
|
||||||
|
{"series_id": serie.id},
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
in_komga = Komga.series_controller.getSeries(serie.id)
|
||||||
|
if in_komga.__class__.__name__ == "Error" or in_komga.deleted:
|
||||||
|
log.info("Series has been deleted on server, deleting in database")
|
||||||
|
self.cache.query(
|
||||||
|
"DELETE FROM komgrabber WHERE series_id=:series_id",
|
||||||
|
{"series_id": serie.id},
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
found = self.media_grabber(serie, bar=pBar)
|
||||||
|
if config.komgrabber.use_cache:
|
||||||
|
if found:
|
||||||
|
self.cache.update(LASTCHECKED_KOMGRABBER, {"series_id": serie.id})
|
||||||
|
log.info(
|
||||||
|
f"Cache updated for {serie.metadata.title}, new volumes found"
|
||||||
|
) # updated to use serie.metadata.title
|
||||||
|
else:
|
||||||
|
# self.cache.update(LASTCHECKED_KOMGRABBER, {"series_id": serie.id})
|
||||||
|
|
||||||
|
log.critical(
|
||||||
|
"No new volumes found" # f"Cache updated for {serie.metadata.title}, no new volumes found"
|
||||||
|
) # updated to use serie.metadata.title
|
||||||
|
self.cache.update(LASTCHECKED_KOMGRABBER, {"series_id": serie.id})
|
||||||
|
# log.info("Cache updated")
|
||||||
|
return self
|
||||||
|
|
||||||
|
def search_for_new_series(self, series_id: str) -> bool:
|
||||||
|
anilist = AnilistAPI()
|
||||||
|
series = anilist.get_manga(series_id)
|
||||||
|
if series is None:
|
||||||
|
log.error(f"Could not find series with id {series_id}")
|
||||||
|
return False
|
||||||
|
komga_results = Komga.series_controller.getAllSeries(
|
||||||
|
body={
|
||||||
|
"condition": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"title": {
|
||||||
|
"operator": "contains",
|
||||||
|
"value": series.title.english
|
||||||
|
if series.title.english
|
||||||
|
else series.title.romaji,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
if not komga_results:
|
||||||
|
log.error(f"Could not find series with title {series.title.english}")
|
||||||
|
Komga_fake = Series(
|
||||||
|
name=series.title.english
|
||||||
|
if series.title.english
|
||||||
|
else series.title.romaji,
|
||||||
|
booksCount=0,
|
||||||
|
metadata={},
|
||||||
|
booksMetadata={},
|
||||||
|
)
|
||||||
|
rbar = alive_it(
|
||||||
|
[Komga_fake],
|
||||||
|
title="Searching for new volumes",
|
||||||
|
bar="smooth",
|
||||||
|
spinner="dots",
|
||||||
|
receipt_text=True,
|
||||||
|
)
|
||||||
|
for Komga_fake in rbar:
|
||||||
|
rbar.text(f"Searching for new volumes for {Komga_fake.name}")
|
||||||
|
log.info(
|
||||||
|
f"searching for new volumes for {Komga_fake.name}, currently at {Komga_fake.booksCount} volumes"
|
||||||
|
)
|
||||||
|
self.serie = Komga_fake.name
|
||||||
|
result = self.media_grabber(Komga_fake, bar=rbar)
|
||||||
|
if result is False:
|
||||||
|
rbar.title("No new volumes found")
|
||||||
|
log.error(f"Could not find any new volumes for {Komga_fake.name}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def search_for_series(self, serie: list[str]):
|
||||||
|
cache_present = config.komgrabber.use_cache
|
||||||
|
shutil.rmtree(self.download_path, ignore_errors=True)
|
||||||
|
os.mkdir(self.download_path)
|
||||||
|
series_request = []
|
||||||
|
for series in serie:
|
||||||
|
series_request.append(
|
||||||
|
{"title": {"operator": "is", "value": series}},
|
||||||
|
)
|
||||||
|
request_body = {"condition": {"anyOf": series_request}}
|
||||||
|
komga_series = Komga.series_controller.getAllSeries(body=request_body)
|
||||||
|
|
||||||
|
def series_ending(bar):
|
||||||
|
bar.title("Completed searching for new volumes")
|
||||||
|
bar.text("All series checked, exiting...")
|
||||||
|
|
||||||
|
pBar = alive_it(komga_series, finalize=series_ending)
|
||||||
|
for serie in pBar:
|
||||||
|
pBar.text(f"Searching for new volumes for {serie.name}")
|
||||||
|
log.info(
|
||||||
|
f"searching for new volumes for {serie.name}, currently at {serie.booksCount} volumes"
|
||||||
|
)
|
||||||
|
self.series_data = serie
|
||||||
|
self.serie = serie.name
|
||||||
|
self.serie_id = serie.id
|
||||||
|
|
||||||
|
self.media_grabber(serie, bar=pBar)
|
||||||
|
if cache_present:
|
||||||
|
self.cache.update(LASTCHECKED_KOMGRABBER, {"series_id": serie.id})
|
||||||
|
|
||||||
|
time.sleep(5)
|
||||||
|
# print("done", serie.name)
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
def wait_for_qbit(self) -> bool:
|
||||||
|
if config.komgrabber.downloader != "qbittorrent":
|
||||||
|
return False
|
||||||
|
log.info("Waiting for qBittorrent to finish downloads...")
|
||||||
|
for download in self.dl.client.torrents_info():
|
||||||
|
if download.name in [file[0] for file in self.downloaded_files]:
|
||||||
|
komga_name = [
|
||||||
|
file["komga_name"]
|
||||||
|
for file in self.downloaded_files
|
||||||
|
if file[0] == download.name
|
||||||
|
][0]
|
||||||
|
if download.state == "downloading":
|
||||||
|
log.info(f"Download {download.name} is still in progress...")
|
||||||
|
return True
|
||||||
|
elif download.state == "stalled":
|
||||||
|
log.info(f"Download {download.name} is stalled, deleting")
|
||||||
|
self.dl.client.torrents_delete(
|
||||||
|
delete_files=True, hashes=[download.hash]
|
||||||
|
)
|
||||||
|
self.downloaded_files.pop(
|
||||||
|
[
|
||||||
|
i
|
||||||
|
for i, file in enumerate(self.downloaded_files)
|
||||||
|
if file[0] == download.name
|
||||||
|
][0]
|
||||||
|
)
|
||||||
|
incomplete.append(komga_name)
|
||||||
|
return True
|
||||||
|
elif download.state == "pausedUP":
|
||||||
|
log.info(
|
||||||
|
f"Download {download.name} is done, moving to tag location"
|
||||||
|
)
|
||||||
|
shutil.move(
|
||||||
|
Path(config.komgrabber.download_location, download.name),
|
||||||
|
Path(config.komgrabber.tag_location, komga_name),
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
log.info("All downloads completed")
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def avail_check():
|
||||||
|
komga_avail = True
|
||||||
|
return (True, komga_avail)
|
||||||
|
|
||||||
|
|
||||||
|
def search_all(library: Library, all: bool = False):
|
||||||
|
handler = mangaCli(
|
||||||
|
library=library,
|
||||||
|
)
|
||||||
|
handler.search_for_new_volumes(all)
|
||||||
|
if config.komgrabber.downloader == "qbittorrent":
|
||||||
|
if handler.downloaded_files == []:
|
||||||
|
print("No downloads were added to qBittorrent, exiting...")
|
||||||
|
return
|
||||||
|
while handler.wait_for_qbit():
|
||||||
|
print("Waiting for qBittorrent to finish downloads...")
|
||||||
|
time.sleep(10)
|
||||||
|
rename(config.komgrabber.tag_location)
|
||||||
|
detect_chapters(config.komgrabber.tag_location)
|
||||||
|
tag_folder(config.komgrabber.tag_location)
|
||||||
|
move(
|
||||||
|
config.komgrabber.tag_location,
|
||||||
|
Path(config.komga.media_path, library.media_path),
|
||||||
|
)
|
||||||
|
|
||||||
|
Komga.library_controller.scanLibrary(library.id)
|
||||||
|
print(f"Initialized scan for library {library.name}")
|
||||||
|
print("Failed series:\n", failed_items)
|
||||||
|
print("Incomplete series:\n", incomplete)
|
||||||
|
|
||||||
|
|
||||||
|
def search_series(library, series: list[str]):
|
||||||
|
mangaCli(library=library).search_for_series(series)
|
||||||
|
# update_state()
|
||||||
|
print("Failed series:\n", failed_items)
|
||||||
|
|
||||||
|
|
||||||
|
def search_requested():
|
||||||
|
cache = KomCache()
|
||||||
|
series = cache.query("SELECT manga_id from manga_requests WHERE grabbed = 0")
|
||||||
|
|
||||||
|
if series:
|
||||||
|
for serie in series:
|
||||||
|
result = mangaCli().search_for_new_series(int(serie[0]))
|
||||||
|
if result:
|
||||||
|
cache.update(
|
||||||
|
"UPDATE manga_requests SET grabbed = 1 WHERE manga_id = :manga_id",
|
||||||
|
{"manga_id": serie[0]},
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
print("No series found to grab")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
search_all()
|
||||||
@@ -1,15 +1,18 @@
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from komconfig import KomConfig
|
|
||||||
from pathlib import Path
|
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
import jaro
|
|
||||||
import loguru
|
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from komgapi import komgapi
|
from pathlib import Path
|
||||||
|
|
||||||
|
import jaro
|
||||||
|
import loguru
|
||||||
from komcache import KomCache
|
from komcache import KomCache
|
||||||
|
from komconfig import KomConfig
|
||||||
|
from komconfig.config import Library
|
||||||
|
from komgapi import komgapi
|
||||||
|
from komgapi.schemas import Series
|
||||||
|
|
||||||
cfg = KomConfig()
|
cfg = KomConfig()
|
||||||
|
|
||||||
@@ -21,7 +24,7 @@ config = KomConfig()
|
|||||||
komga = komgapi(cfg.komga.user, cfg.komga.password, cfg.komga.url)
|
komga = komgapi(cfg.komga.user, cfg.komga.password, cfg.komga.url)
|
||||||
|
|
||||||
|
|
||||||
def rename(folder: Path = config.komgrabber.download_location) -> None:
|
def rename(folder: Path = config.komgrabber.tag_location) -> None:
|
||||||
"""Rename the files in a folder according to the template.
|
"""Rename the files in a folder according to the template.
|
||||||
Template: [Name] v[nr] #[nr].ext (e.g. "The Flash v1 #1.cbz").
|
Template: [Name] v[nr] #[nr].ext (e.g. "The Flash v1 #1.cbz").
|
||||||
|
|
||||||
@@ -61,14 +64,14 @@ def rename_recursive(folder: str) -> None:
|
|||||||
rename(Path(f"{root}/{dir}"))
|
rename(Path(f"{root}/{dir}"))
|
||||||
|
|
||||||
|
|
||||||
def tag_folder(folder: Path = config.komgrabber.download_location) -> None:
|
def tag_folder(folder: Path = config.komgrabber.tag_location) -> None:
|
||||||
"""
|
"""
|
||||||
Recursively tags all the .cbz files in the folder using ComicTagger
|
Recursively tags all the .cbz files in the folder using ComicTagger
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
folder : Path, optional
|
folder : Path, optional
|
||||||
The path that will be used to tag, by default Path(config.komgrabber.download_location)
|
The path that will be used to tag, by default Path(config.komgrabber.tag_location)
|
||||||
"""
|
"""
|
||||||
# Get the files in the folder
|
# Get the files in the folder
|
||||||
if "~" in str(folder):
|
if "~" in str(folder):
|
||||||
@@ -88,7 +91,7 @@ def tag_folder(folder: Path = config.komgrabber.download_location) -> None:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def move(src: Path, dest: Path = Path(config.komga.media_path)) -> None:
|
def move(src: Path, library_path: str) -> None:
|
||||||
"""
|
"""
|
||||||
Moves the files from the source folder to the destination folder.
|
Moves the files from the source folder to the destination folder.
|
||||||
If the folder already exists in the destination, only move the new files.
|
If the folder already exists in the destination, only move the new files.
|
||||||
@@ -97,12 +100,13 @@ def move(src: Path, dest: Path = Path(config.komga.media_path)) -> None:
|
|||||||
----------
|
----------
|
||||||
src : Path
|
src : Path
|
||||||
The source folder
|
The source folder
|
||||||
dest : Path, optional
|
dest : str
|
||||||
The destination folder used by Komga, by default Path(config.komga.media_path)
|
The destination folder used by Komga for the library, set in config file, defaults to "Manga"
|
||||||
"""
|
"""
|
||||||
# Get the files in the folder
|
# Get the files in the folder
|
||||||
# +move the folders from src to disc, if folder already exists, only move new files
|
# +move the folders from src to disc, if folder already exists, only move new files
|
||||||
|
|
||||||
|
dest = Path(config.komga.media_path, library_path)
|
||||||
folders = os.listdir(src)
|
folders = os.listdir(src)
|
||||||
for folder in folders:
|
for folder in folders:
|
||||||
if not os.path.exists(f"{dest}/{folder}"):
|
if not os.path.exists(f"{dest}/{folder}"):
|
||||||
@@ -116,6 +120,9 @@ def move(src: Path, dest: Path = Path(config.komga.media_path)) -> None:
|
|||||||
else:
|
else:
|
||||||
files = os.listdir(f"{src}/{folder}")
|
files = os.listdir(f"{src}/{folder}")
|
||||||
for file in files:
|
for file in files:
|
||||||
|
if file.startswith("."):
|
||||||
|
log.debug(f"Skipping hidden file {file}")
|
||||||
|
continue
|
||||||
if not os.path.exists(f"{dest}/{folder}/{file}"):
|
if not os.path.exists(f"{dest}/{folder}/{file}"):
|
||||||
log.info(f"Moving {file} to {dest}/{folder}")
|
log.info(f"Moving {file} to {dest}/{folder}")
|
||||||
shutil.move(f"{src}/{folder}/{file}", f"{dest}/{folder}")
|
shutil.move(f"{src}/{folder}/{file}", f"{dest}/{folder}")
|
||||||
@@ -123,7 +130,7 @@ def move(src: Path, dest: Path = Path(config.komga.media_path)) -> None:
|
|||||||
remove_empty_folders(src)
|
remove_empty_folders(src)
|
||||||
|
|
||||||
|
|
||||||
def remove_empty_folders(src):
|
def remove_empty_folders(src: Path):
|
||||||
"""
|
"""
|
||||||
Recursively removes empty folders in the source folder
|
Recursively removes empty folders in the source folder
|
||||||
|
|
||||||
@@ -140,36 +147,67 @@ def remove_empty_folders(src):
|
|||||||
log.info(f"Removing {folder}")
|
log.info(f"Removing {folder}")
|
||||||
os.rmdir(f"{src}/{folder}")
|
os.rmdir(f"{src}/{folder}")
|
||||||
else:
|
else:
|
||||||
remove_empty_folders(f"{src}/{folder}")
|
newPath = Path(f"{src}/{folder}")
|
||||||
|
remove_empty_folders(newPath)
|
||||||
|
|
||||||
|
|
||||||
def detect_chapters(src: Path = config.komgrabber.download_location) -> None:
|
def detect_chapters(
|
||||||
|
src: Path = config.komgrabber.tag_location, valid_extension: str = "cbz|epub"
|
||||||
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Detects and deletes any non-volume file in the source folder
|
Detects and deletes any non-volume file in the source folder
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
src : Path, optional
|
src : Path, optional
|
||||||
The Path to be checked, by default Path(config.komgrabber.download_location)
|
The Path to be checked, by default Path(config.komgrabber.tag_location)
|
||||||
"""
|
"""
|
||||||
|
log.info(f"Checking {src} for chapters")
|
||||||
|
|
||||||
|
regex = re.compile(rf"^.* v(\d+) #(\d+(?:-\d+)?)\.({valid_extension})$")
|
||||||
for folder in os.listdir(src):
|
for folder in os.listdir(src):
|
||||||
if os.path.isdir(f"{src}/{folder}"):
|
if os.path.isdir(f"{src}/{folder}"):
|
||||||
files = os.listdir(f"{src}/{folder}")
|
files = os.listdir(f"{src}/{folder}")
|
||||||
for file in files:
|
for file in files:
|
||||||
|
if os.path.isdir(f"{src}/{folder}/{file}"):
|
||||||
|
folder_files = os.listdir(f"{src}/{folder}/{file}")
|
||||||
|
for folder_file in folder_files:
|
||||||
|
# check for regex "v(d) #(d)" in the file name
|
||||||
|
if regex.search(folder_file):
|
||||||
|
log.debug(f"File {folder_file} is a Volume")
|
||||||
|
else:
|
||||||
|
log.info(f"Deleting chapter {folder_file}")
|
||||||
|
if os.path.isfile(f"{src}/{folder}/{file}/{folder_file}"):
|
||||||
|
os.remove(f"{src}/{folder}/{file}/{folder_file}")
|
||||||
|
else:
|
||||||
|
shutil.rmtree(f"{src}/{folder}/{file}/{folder_file}")
|
||||||
# check for regex "v(d) #(d)" in the file name
|
# check for regex "v(d) #(d)" in the file name
|
||||||
regex = re.compile(r"^.* v(\d+) #(\d+(?:-\d+)?)\.cbz$")
|
|
||||||
if regex.search(file):
|
if regex.search(file):
|
||||||
log.debug(f"File {file} is a Volume")
|
log.debug(f"File {file} is a Volume")
|
||||||
else:
|
else:
|
||||||
log.debug(f"Deleting chapter {file}")
|
log.info(f"Deleting chapter {file}")
|
||||||
if os.path.isdir(f"{src}/{folder}/{file}"):
|
if os.path.isfile(f"{src}/{folder}/{file}"):
|
||||||
shutil.rmtree(f"{src}/{folder}/{file}")
|
|
||||||
else:
|
|
||||||
os.remove(f"{src}/{folder}/{file}")
|
os.remove(f"{src}/{folder}/{file}")
|
||||||
|
else:
|
||||||
|
if os.path.isdir(f"{src}/{folder}/{file}"):
|
||||||
|
for subfile in os.listdir(f"{src}/{folder}/{file}"):
|
||||||
|
if regex.search(subfile):
|
||||||
|
log.debug(f"File {subfile} is a Volume")
|
||||||
|
else:
|
||||||
|
log.info(f"Deleting chapter {subfile}")
|
||||||
|
if os.path.isfile(
|
||||||
|
f"{src}/{folder}/{file}/{subfile}"
|
||||||
|
):
|
||||||
|
os.remove(f"{src}/{folder}/{file}/{subfile}")
|
||||||
|
else:
|
||||||
|
shutil.rmtree(
|
||||||
|
f"{src}/{folder}/{file}/{subfile}"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
shutil.rmtree(f"{src}/{folder}/{file}")
|
||||||
|
|
||||||
|
|
||||||
def folder_similarity(folder1, folder2) -> float:
|
def folder_similarity(folder1: str, folder2: str) -> float:
|
||||||
"""
|
"""
|
||||||
Calculate the similarity between two folder names using Jaro-Winkler distance.
|
Calculate the similarity between two folder names using Jaro-Winkler distance.
|
||||||
|
|
||||||
@@ -184,7 +222,9 @@ def folder_similarity(folder1, folder2) -> float:
|
|||||||
return similarity
|
return similarity
|
||||||
|
|
||||||
|
|
||||||
def rename_folder(src=config.komgrabber.download_location, series=None) -> bool:
|
def rename_folder(
|
||||||
|
src: Path = config.komgrabber.tag_location, series: Series = None
|
||||||
|
) -> bool:
|
||||||
renamer_regex = r"(\s*\([^)]*\))+$"
|
renamer_regex = r"(\s*\([^)]*\))+$"
|
||||||
for folder in os.listdir(src):
|
for folder in os.listdir(src):
|
||||||
if os.path.isdir(f"{src}/{folder}"):
|
if os.path.isdir(f"{src}/{folder}"):
|
||||||
@@ -235,11 +275,16 @@ def calculate_new_volumes(
|
|||||||
present_volumes: list[int], new_volumes: list[int]
|
present_volumes: list[int], new_volumes: list[int]
|
||||||
) -> list[int]:
|
) -> list[int]:
|
||||||
if len(new_volumes) == 1:
|
if len(new_volumes) == 1:
|
||||||
if max(new_volumes) > max(present_volumes):
|
if len(present_volumes) == 0:
|
||||||
return new_volumes
|
return new_volumes
|
||||||
|
if max(new_volumes) > max(present_volumes):
|
||||||
|
# return any new volume that is not in present volumes
|
||||||
|
return [v for v in new_volumes if v not in present_volumes]
|
||||||
else:
|
else:
|
||||||
return []
|
return []
|
||||||
else:
|
else:
|
||||||
|
if len(present_volumes) == 0:
|
||||||
|
return new_volumes
|
||||||
new_volumes = sorted(new_volumes)
|
new_volumes = sorted(new_volumes)
|
||||||
new_volumes = [i for i in new_volumes if i > max(present_volumes)]
|
new_volumes = [i for i in new_volumes if i > max(present_volumes)]
|
||||||
if len(new_volumes) == 0:
|
if len(new_volumes) == 0:
|
||||||
@@ -283,3 +328,138 @@ def get_series_update_date(series_name: str) -> str:
|
|||||||
args=(series_name,),
|
args=(series_name,),
|
||||||
)
|
)
|
||||||
print(update_date)
|
print(update_date)
|
||||||
|
|
||||||
|
|
||||||
|
def process_manga(download_path: Path, library: Library, serie: Series) -> None:
|
||||||
|
"""Process the downloaded manga: rename files, detect chapters, tag, rename folder, and move to library."""
|
||||||
|
|
||||||
|
rename(download_path)
|
||||||
|
if not config.komgrabber.get_chapters:
|
||||||
|
detect_chapters(download_path, "|".join(library.valid_extensions))
|
||||||
|
tag_folder(download_path)
|
||||||
|
if rename_folder(series=serie, src=download_path):
|
||||||
|
move(
|
||||||
|
download_path,
|
||||||
|
library.media_path,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def process_novel(
|
||||||
|
download_path: Path, library: Library, serie: Series, copy: bool = False
|
||||||
|
) -> None:
|
||||||
|
"""Process the downloaded novel: rename files, tag, rename folder, and move to library."""
|
||||||
|
|
||||||
|
# rename the folder to the series name
|
||||||
|
folder = os.listdir(download_path)[0]
|
||||||
|
series_name = serie.name
|
||||||
|
# remove all files that are not valid extensions
|
||||||
|
valid_extensions = library.valid_extensions
|
||||||
|
# flatten subfolders and subsubfolders
|
||||||
|
for root, dirs, files in os.walk(f"{download_path}/{folder}"):
|
||||||
|
for dir in dirs:
|
||||||
|
for file in os.listdir(f"{root}/{dir}"):
|
||||||
|
if file.startswith("."):
|
||||||
|
log.debug(f"Skipping hidden file {file}")
|
||||||
|
continue
|
||||||
|
log.info(f"Moving {file} to {download_path}/{folder}")
|
||||||
|
shutil.move(f"{root}/{dir}/{file}", f"{download_path}/{folder}")
|
||||||
|
os.rmdir(f"{root}/{dir}")
|
||||||
|
|
||||||
|
# removing invalid extensions
|
||||||
|
for file in os.listdir(f"{download_path}/{folder}"):
|
||||||
|
if not any(file.endswith(ext) for ext in valid_extensions):
|
||||||
|
log.info(f"Removing {file} as it is not a valid extension")
|
||||||
|
if os.path.isfile(f"{download_path}/{folder}/{file}"):
|
||||||
|
os.remove(f"{download_path}/{folder}/{file}")
|
||||||
|
else:
|
||||||
|
shutil.rmtree(f"{download_path}/{folder}/{file}")
|
||||||
|
|
||||||
|
# rename files to remove all [] and text within
|
||||||
|
for file in os.listdir(f"{download_path}/{folder}"):
|
||||||
|
filename = file.split(".")[0]
|
||||||
|
|
||||||
|
if f"{series_name} - Volume" in filename:
|
||||||
|
log.debug(f"Skipping {file}, already renamed")
|
||||||
|
continue
|
||||||
|
# extract the volume number, may be a float, either v1, v1.5, v01, v01.5, vol.1, vol.01, vol.1.5, vol.01.5, Vol.1, Vol.01, Vol.1.5, Vol.01.5, Volume 1, Volume 01, Volume 1.5, Volume 01.5
|
||||||
|
regex_volume_pattern = r"(v|vol\.|Vol\.|Volume\s)(\d+(\.\d+)?)"
|
||||||
|
match = re.search(regex_volume_pattern, file, re.IGNORECASE)
|
||||||
|
# from the match, get the volume number
|
||||||
|
volume = match.group(2) if match else None
|
||||||
|
|
||||||
|
# rename the file to series name v(volume).ext
|
||||||
|
ext = file.split(".")[-1]
|
||||||
|
# if volume is not null and less than 10, pad with a 0
|
||||||
|
if volume and float(volume) < 10:
|
||||||
|
volume = f"0{volume}"
|
||||||
|
if volume and "00" in volume:
|
||||||
|
volume = volume.replace("00", "0")
|
||||||
|
fixed = (
|
||||||
|
f"{series_name} - Volume {volume}.{ext}"
|
||||||
|
if volume
|
||||||
|
else f"{series_name}.{ext}"
|
||||||
|
)
|
||||||
|
log.debug(f"Renaming {file} to {fixed}")
|
||||||
|
os.rename(
|
||||||
|
f"{download_path}/{folder}/{file}", f"{download_path}/{folder}/{fixed}"
|
||||||
|
)
|
||||||
|
# flatten subfolders
|
||||||
|
|
||||||
|
os.rename(f"{download_path}/{folder}", f"{download_path}/{series_name}")
|
||||||
|
dest = Path(config.komga.media_path, library.media_path)
|
||||||
|
folders = os.listdir(download_path)
|
||||||
|
log.info(f"Moving {folders} to {dest}")
|
||||||
|
for folder in folders:
|
||||||
|
log.info(f"Processing folder {folder}")
|
||||||
|
time.sleep(1)
|
||||||
|
if not os.path.exists(f"{dest}/{folder}"):
|
||||||
|
log.info(f"Moving {folder} to {dest}")
|
||||||
|
os.mkdir(f"{dest}/{folder}")
|
||||||
|
files = os.listdir(f"{download_path}/{folder}")
|
||||||
|
for file in files:
|
||||||
|
time.sleep(1)
|
||||||
|
log.debug(f"Moving {file} to {dest}/{folder}")
|
||||||
|
if copy:
|
||||||
|
# copy file to komgrabber tag location
|
||||||
|
copy_location = config.komgrabber.copy_location
|
||||||
|
if not os.path.exists(f"{copy_location}"):
|
||||||
|
os.mkdir(f"{copy_location}")
|
||||||
|
shutil.copy(
|
||||||
|
f"{download_path}/{folder}/{file}",
|
||||||
|
f"{copy_location}/{file}",
|
||||||
|
)
|
||||||
|
log.debug(
|
||||||
|
f"Copied from {download_path}/{folder}/{file} to {copy_location}/{file}"
|
||||||
|
)
|
||||||
|
shutil.move(f"{download_path}/{folder}/{file}", f"{dest}/{folder}")
|
||||||
|
# shutil.move(f"{src}/{folder}", dest)
|
||||||
|
else:
|
||||||
|
files = os.listdir(f"{download_path}/{folder}")
|
||||||
|
for file in files:
|
||||||
|
time.sleep(1)
|
||||||
|
log.debug(f"Processing file {file}")
|
||||||
|
if file.startswith("."):
|
||||||
|
log.debug(f"Skipping hidden file {file}")
|
||||||
|
continue
|
||||||
|
if not os.path.exists(f"{dest}/{folder}/{file}"):
|
||||||
|
log.debug(f"Moving {file} to {dest}/{folder}")
|
||||||
|
if copy:
|
||||||
|
# copy file to komgrabber tag location
|
||||||
|
copy_location = config.komgrabber.copy_location
|
||||||
|
if not os.path.exists(f"{copy_location}/{folder}"):
|
||||||
|
os.mkdir(f"{copy_location}")
|
||||||
|
shutil.copy(
|
||||||
|
f"{download_path}/{folder}/{file}",
|
||||||
|
f"{copy_location}/{file}",
|
||||||
|
)
|
||||||
|
log.debug(
|
||||||
|
f"Copied from {download_path}/{folder}/{file} to {copy_location}/{file}"
|
||||||
|
)
|
||||||
|
shutil.move(f"{download_path}/{folder}/{file}", f"{dest}/{folder}")
|
||||||
|
|
||||||
|
log.info("Finished moving files, removing empty folders")
|
||||||
|
remove_empty_folders(download_path)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
print(folder_similarity("Dr. STONE (2018-2023) (Digital) (1r0n)", "Dr. STONE"))
|
||||||
|
|||||||
Reference in New Issue
Block a user