many changes

This commit is contained in:
2025-09-28 20:03:18 +02:00
parent f455918ef4
commit 4d0d412d38
8 changed files with 632 additions and 297 deletions

View File

@@ -5,10 +5,14 @@ description = "Add your description here"
readme = "README.md"
requires-python = ">=3.13"
dependencies = [
"alive-progress>=3.2.0",
"anilistapi",
"aria2p>=0.12.1",
"bencodepy>=0.9.5",
"feedparser>=6.0.11",
"httpx-retries>=0.3.2",
"jaro-winkler>=2.0.3",
"komcache",
"komconfig",
"komgapi",
"komsuite-nyaapy",
@@ -16,9 +20,18 @@ dependencies = [
"loguru>=0.7.3",
"natsort>=8.4.0",
"omegaconf>=2.3.0",
"qbittorrent-api>=2025.7.0",
"regex>=2024.11.6",
]
[tool.uv.sources]
komgapi = { workspace = true }
komsuite-nyaapy = { workspace = true }
komcache = { workspace = true }
komconfig = { workspace = true }
anilistapi = { workspace = true }
[dependency-groups]
test = [
"click>=8.1.8",
]

View File

@@ -1,9 +1,8 @@
import time
import loguru
import regex
from komconfig import KomConfig
from komsuite_nyaapy import Nyaa, Torrent
from natsort import natsorted
import re
from komconfig import KomConfig
import loguru
log = loguru.logger
log.add("logs/nyaasi.log", rotation="1 week")
@@ -27,22 +26,52 @@ class NyaaFeed:
return natsorted(volumes)
def search(self, title: str) -> list[Torrent]:
def __search(title: str):
return Nyaa().search(title, 3, 1)
if "#" in title:
# replace # with whitespace
title = title.replace("#", " ")
regex = r"\b(v\d{2,3}-\d{2,3}|v\d{2,3}-v\d{2,3}|\d{2}|\d{2}-\d{2}|v\d{2,3})\b"
def __search(title: str):
result = Nyaa().search(title, category=3, subcategory=1)
return result
matchregex = (
r"\b(v\d{2,3}-\d{2,3}|v\d{2,3}-v\d{2,3}|\d{2}|\d{2}-\d{2}|v\d{2,3})\b"
)
dataset = None
while dataset is None:
try:
log.debug("Searching for {}".format(title))
dataset = __search(title)
except:
time.sleep(5)
dataset = [
entry
for entry in dataset
if not entry.download_url.startswith("magnet:")
]
datalist = []
log.debug("Found {} results".format(len(dataset)))
return dataset if len(dataset) < 5 else dataset[:5]
# take first 5 results
# if dataset and len(dataset) > 5:
# dataset = dataset[:5]
except Exception as e:
log.error("Error: {}".format(e))
return []
datalist: list[Torrent] = []
if dataset is None:
return datalist
for entry in dataset:
if config.komgrabber.get_chapters is False:
# check if the title has a ch## in it, if so skip
chapter_regex = r"\bch\d+\b"
match = regex.search(chapter_regex, entry.name.lower())
if match:
log.info(
"Skipping {}, Reason: Chapters disabled".format(entry.name)
)
continue
if "chapter" in entry.name.lower():
log.info(
"Skipping {}, Reason: Chapters disabled".format(entry.name)
@@ -54,35 +83,38 @@ class NyaaFeed:
if any(x in name.lower() for x in skip_parameters):
log.info("Skipping {}".format(name))
continue
volumes = re.findall(regex, name)
volumes = regex.findall(matchregex, name)
try:
match = re.match(r"^(.*?)\s(vol\.\s\d{2})|(v\d{2,3})", name.lower())
match = regex.match(r"^(.*?)\s(vol\.\s\d{2})|(v\d{2,3})", name.lower())
if match:
name = match.group(1)
except AttributeError:
# chapter check
try:
match = re.findall(r"(?<!\d)\d{2,3}(?!\d)", name)
match = regex.findall(r"(?<!\d)\d{2,3}(?!\d)", name)
print("Matched chapter: {}".format(match))
print("Found Chapters only, skipping")
continue
except AttributeError:
pass
volumes = self.list_volumes(volumes) if volumes else [0]
download_url = entry.download_url
seeders = entry.seeders
size = entry.size
data = Torrent(
name=name,
download_url=download_url,
seeders=seeders,
size=size,
volumes=volumes,
)
# volumes = self.list_volumes(volumes) if volumes else []
# download_url = entry.download_url
# seeders = entry.seeders
# size = entry.size
# data = Torrent(
# name=name,
# download_url=download_url,
# seeders=seeders,
# size=size,
# volumes=volumes,
# filetypes=entry.filetypes,
# contents=entry.contents,
# )
# print(data)
datalist.append(data)
log.debug("Found: {}, volumes: {}".format(data.name, data.volumes))
if entry.volumes == []:
continue
datalist.append(entry)
log.debug("Found: {}, volumes: {}".format(entry.name, entry.volumes))
log.success("Found {} entries".format(len(datalist)))
return datalist

View File

@@ -1,35 +1,44 @@
import json
import requests
from komgapi import komgapi as KOMGAPI_REST
from src.schema.series import SeriesMetadata
from src.logs.log import Log
from komconfig import KomConfig
from komgapi import komgapi as KOMGAPI_REST
from src.logs.log import Log
from src.schema.series import SeriesMetadata
config = KomConfig()
class KomgaError(Exception):
"""Custom exception for Komga API errors."""
pass
class KomgaAPI(KOMGAPI_REST):
def __init__(self) -> None:
self.logger = Log("KomgaAPI")
url = config.komga.url
self.auth = config.komga_auth
self.auth = config.komga.api_key
super().__init__(
url=url,
username=self.auth[0],
password=self.auth[1],
api_key=self.auth,
timeout=100,
)
self.connected = self.test_connection(url)
if not self.connected:
print("Komga API not connected")
raise KomgaError("Komga API not connected")
exit(1)
def test_connection(self, url) -> bool:
try:
response = requests.get(f"{url}/api/v1/series", auth=self.auth)
if isinstance(self.auth, tuple):
response = requests.get(f"{url}/api/v1/series", auth=self.auth)
else:
response = requests.get(
f"{url}/api/v1/series", headers={"X-Api-Key": self.auth}
)
if response.status_code == 200:
return True
return False
@@ -87,14 +96,7 @@ class KomgaAPI(KOMGAPI_REST):
def getVolumes(
self,
series_id: str,
media_status: list[str] = None,
read_status: list[str] = None,
tag: list[str] = None,
unpaged: bool = True,
page_integer: int = None,
size: int = None,
sort: list[str] = None,
author: list[str] = None,
) -> list[int]:
"""Get a list of all volumes matching the given criteria.
@@ -111,20 +113,14 @@ class KomgaAPI(KOMGAPI_REST):
author (list[str], optional): name,role. Defaults to None.
Returns:
list[int]: _description_
list[int]: The list of volumes for the given series_id
"""
volumes = []
# api_url=f'{self.url}/api/v1/series/{self._get_series_id(title)}/books'
result = self.series_controller.getSeriesBooks(
series_id,
media_status=media_status,
read_status=read_status,
tag=tag,
result = self.book_controller.listBooks(
unpaged=unpaged,
page=page_integer,
size=size,
sort=sort,
query={"condition": {"seriesId": {"operator": "is", "value": series_id}}},
)
if result is None:
return volumes
@@ -140,7 +136,14 @@ class KomgaAPI(KOMGAPI_REST):
m_numba = max(numba)
vol = m_numba
volumes.append(int(float(vol)))
return volumes
if volumes == []:
try:
series = self.series_controller.getSeries(series_id)
volumes = [i for i in range(1, series.booksCount + 1)]
except Exception as e:
self.logger.log_error(f"Error getting volumes: {e}")
return []
return sorted(volumes)
def getReadCount(self, series_id: str) -> int:
"""Get the number of read volumes of a series."""

View File

@@ -1,23 +1,28 @@
class ConnectError(Exception):
def __init__(self, message):
super().__init__(message)
class LoginError(Exception):
def __init__(self, message):
super().__init__(message)
class AccessError(Exception):
def __init__(self, message):
super().__init__(message)
class JSONError(Exception):
def __init__(self, message):
super().__init__(message)
class ResponseError(Exception):
def __init__(self, message):
super().__init__(message)
class ResultError(Exception):
def __init__(self, message):
super().__init__(message)
super().__init__(message)

View File

@@ -6,27 +6,23 @@ CREATE TABLE IF NOT EXISTS komgrabber (
status TEXT NOT NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
last_checked TIMESTAMP DEFAULT 0
);
"""
last_checked TIMESTAMP DEFAULT 0,
complete INTEGER DEFAULT 0
);"""
INSERT_KOMGRABBER = """
INSERT INTO komgrabber (name, series_id, status)
VALUES (?, ?, ?);
VALUES (:name, :series_id, :status);
"""
SELECT_KOMGRABBER = """
SELECT * FROM komgrabber WHERE series_id = ?;
"""
SELECT * FROM komgrabber WHERE series_id = :series_id;"""
UPDATE_KOMGRABBER = """
UPDATE komgrabber
SET name = ?, status = ?, updated_at = CURRENT_TIMESTAMP
WHERE series_id = ?;
"""
SET name = :name, status = :status, updated_at = CURRENT_TIMESTAMP
WHERE series_id = :series_id;"""
LASTCHECKED_KOMGRABBER = """
UPDATE komgrabber
SET last_checked = CURRENT_TIMESTAMP
WHERE series_id = ?;
"""
WHERE series_id = :series_id;"""
GET_LASTCHECKED_KOMGRABBER = """
SELECT last_checked FROM komgrabber WHERE series_id = ?;
"""
SELECT last_checked FROM komgrabber WHERE series_id = :series_id;"""

View File

@@ -1,36 +1,42 @@
import sys
import os
import time
import bencodepy
from .utils import rename
from aria2p import Client, API
from pathlib import Path
import loguru
import sys
import time
from pathlib import Path
import bencodepy
import loguru
import qbittorrentapi
from aria2p import API, Client
from komconfig import KomConfig
from .utils import rename
log = loguru.logger
log.remove()
log.add("application.log", rotation="1 week", retention="1 month")
# log.add(sys.stdout, level='INFO')
config = KomConfig()
class Download:
"""Download a file from a url and start the download using aria2"""
def __init__(self, download_location: Path) -> None:
def __init__(
self, download_location: Path = config.komgrabber.download_location
) -> None:
# self.download_location needs to be a string
self.download_location = download_location.__str__()
self.filename = None
self.torrent_file = None
self.progress = 0
self.canceled = False
self.aria2_running = self.check_aria2()
self.aria2_running = self.check_online()
self.api = API(
client=Client(
host="http://localhost",
port=6800,
secret="",
timeout=60,
host=config.komgrabber.downloader_settings.host,
port=config.komgrabber.downloader_settings.port,
secret=config.komgrabber.downloader_settings.secret,
timeout=config.komgrabber.downloader_settings.timeout,
)
)
self.api.set_global_options({"dir": self.download_location})
@@ -38,7 +44,7 @@ class Download:
log.error("Aria2 is not running")
sys.exit()
def check_aria2(self):
def check_online(self):
# check if aria2 is running
if os.system("ps -A | grep aria2c > /dev/null 2>&1") == 0:
return True
@@ -59,10 +65,6 @@ class Download:
# use wget to download the file to the download location
name = url.split("/")[-1]
dl_url = self.download_location
# while self.get_filename(dl_url) is None:
# if not os.path.exists(dl_url):
# call os.system(f"wget -P {dl_url} {url}"), but suppress output
os.system(f"wget -P {dl_url} {url} > /dev/null 2>&1")
while not os.path.exists(dl_url):
time.sleep(1)
@@ -111,3 +113,33 @@ class Download:
return torrent[b"info"][b"name"].decode("utf-8")
except FileNotFoundError:
return None
class QBitDownload:
def __init__(self) -> None:
self.category = config.komgrabber.downloader_settings.category
self.client = qbittorrentapi.Client(
host=config.komgrabber.downloader_settings.host,
port=config.komgrabber.downloader_settings.port,
username=config.komgrabber.downloader_settings.username,
password=config.komgrabber.downloader_settings.password,
)
if not self.check_online():
raise Exception("qBittorrent is not running or login failed")
def check_online(self):
try:
self.client.auth_log_in()
return True
except qbittorrentapi.LoginFailed as e:
log.error(f"Login failed: {e}")
return False
def add_torrent(self, url):
try:
self.client.torrents_add(urls=url, category=self.category)
log.info("Torrent added")
except Exception as e:
print(f"Error adding torrent: {e}")
return False

View File

@@ -2,50 +2,50 @@ import os
import re
import shutil
import time
from pathlib import Path
from typing import Any, Optional
import jaro
from src.data.komga import KomgaAPI
from komgapi.schemas.Series import Series
from src.data.Feeds.nyaasi import NyaaFeed
from komsuite_nyaapy import Torrent
from src.logic.download import Download
from komconfig import KomConfig
import loguru
from alive_progress import alive_it
from anilistapi import AnilistAPI
from komcache import KomCache
from src.logic.utils import (
detect_chapters,
rename,
tag_folder,
move,
rename_folder,
remove_empty_folders,
time_checker,
folder_similarity,
calculate_new_volumes,
safe_remove_directory,
)
from komconfig import KomConfig
from komconfig.config import Library
from komgapi.schemas.Series import Series
from komsuite_nyaapy import Torrent
from src.data.Feeds.nyaasi import NyaaFeed
from src.data.komga import KomgaAPI
from src.logic.db_schemas import (
KOMGRABBER_TABLE,
GET_LASTCHECKED_KOMGRABBER,
INSERT_KOMGRABBER,
LASTCHECKED_KOMGRABBER,
SELECT_KOMGRABBER,
UPDATE_KOMGRABBER,
LASTCHECKED_KOMGRABBER,
GET_LASTCHECKED_KOMGRABBER,
)
from src.logic.db_schemas_mariadb import (
MARIADB_KOMGRABBER_TABLE
from src.logic.download import Download, QBitDownload
from src.logic.utils import (
calculate_new_volumes,
detect_chapters,
folder_similarity,
move,
process_manga,
process_novel,
remove_empty_folders,
rename,
safe_remove_directory,
tag_folder,
time_checker,
)
import loguru
from pathlib import Path
from alive_progress import alive_it
from typing import Any, Optional
from anilistapi import AnilistAPI
config = KomConfig()
log = loguru.logger
log.remove()
log.add("logs/application.log", level="INFO", rotation="15MB", retention="1 week")
log.add("logs/cli.log", rotation="15MB", retention="1 week") # type:ignore
log.add("logs/application.log", level="INFO", rotation="3MB", retention="1 week")
log.add("logs/cli.log", rotation="3MB", retention="1 week") # type:ignore
# log.add(sys.stdout)
Komga = KomgaAPI()
@@ -56,8 +56,11 @@ incomplete: list[str] = []
class mangaCli:
def __init__(self, library_id: str = "") -> None:
self.dl = Download(config.komgrabber.download_location)
def __init__(self, library: Library) -> None:
if config.komgrabber.downloader == "qbittorrent":
self.dl = QBitDownload()
else:
self.dl = Download()
if os.path.exists(config.komgrabber.download_location):
for file in os.listdir(config.komgrabber.download_location):
try:
@@ -71,41 +74,45 @@ class mangaCli:
self.serie_id = ""
self.series_data: Series
self.volumes = []
self.library = library
self.download_path = config.komgrabber.download_location
self.library_id = library_id
self.cache = KomCache()
pass
def download(self, feed_url: str):
def __chapter_check(title: str) -> bool:
if title.endswith(".cbz") or title.endswith(".cbr"):
if not re.search(r"(v\d{1,3}(-\d{1,3})?)|(Vol\. \d{1,3})", title):
return True
else:
return False
else:
return False
self.downloaded_files: list[tuple[str, str, str]] = []
log.success("Initialized mangaCli")
log.success("Starting search for {} library".format(self.library.name))
log.debug(f"Library ID: {self.library.id}, Library Type: {self.library.type}")
log.info("Ready to process downloads...")
def __epub_check(title: str) -> bool:
if title.endswith(".epub"):
def _chapter_check(self, title: str) -> bool:
if title.endswith(".cbz") or title.endswith(".cbr"):
if not re.search(r"(v\d{1,3}(-\d{1,3})?)|(Vol\. \d{1,3})", title):
return True
else:
return False
else:
return False
def _epub_check(self, title: str) -> bool:
if title.endswith(".epub"):
return True
else:
return False
def aria2_download(self, feed_url: str):
# check if download location is empty, if not, remove everything in it
if os.path.exists(self.download_path):
# force stop the download
if len(self.dl.api.get_downloads()) > 0:
self.dl.api.get_downloads()[0].remove(force=True)
time.sleep(5)
file: str
file: str = ""
file = self.dl.get_file(feed_url)
if __chapter_check(file):
if self._chapter_check(file):
# print(f"Skipping {file}, reason: no volume number, likely a chapter")
return False
if __epub_check(file):
# print(f"Skipping {file}, reason: epub file")
if self._epub_check(file) and self.library.type == "MANGA":
log.error("Skipping epub file, library type is MANGA")
return False
self.file = file
@@ -113,7 +120,7 @@ class mangaCli:
# print(f"Filename: {file}")
file_move = False
new_folder = None
if file.endswith(".cbz") or file.endswith(".cbr"):
if file.endswith(".cbz") or file.endswith(".cbr") or file.endswith(".epub"):
new_folder = Path(self.download_path, self.serie)
os.makedirs(new_folder, exist_ok=True)
file_move = True
@@ -121,6 +128,7 @@ class mangaCli:
state = self.dl.add_torrent(feed_url.split("/")[-1])
if state is False:
# print("Error adding torrent")
log.error("Error adding torrent")
return False
gid = self.dl.api.get_downloads()[0].gid
@@ -137,15 +145,19 @@ class mangaCli:
# if progress remains the same for 30 seconds, stop the download
progress = self.dl.check_progress()
time.sleep(90)
time.sleep(config.komgrabber.check_interval)
n_progress = self.dl.check_progress()
try:
dl_name = self.dl.api.get_downloads()[0].name
except IndexError:
log.error("No downloads found, skipping...")
return False
if not folder_similarity(self.serie.lower(), dl_name.lower()) > 0.8:
if not (
folder_similarity(self.serie.lower(), dl_name.lower()) > 0.8
) or not (
folder_similarity(self.series_data.name.lower(), dl_name.lower()) > 0.8
):
log.error(
f"Folder name {dl_name} does not match {self.serie}, skipping download"
)
@@ -187,6 +199,17 @@ class mangaCli:
return True
return False
def qbit_download(self, feed_url: str):
if (
self.dl.client.torrents_add(urls=feed_url, savepath=self.download_path)
== "Ok."
):
log.info("Torrent added to qBittorrent")
return True
else:
log.error("Failed to add torrent to qBittorrent")
return False
def process_serie(self, data: Series) -> list[Torrent]:
"""Process a single serie based on its title.
The process is as follows:
@@ -196,14 +219,15 @@ class mangaCli:
4. if the volumes from nyaa.si are greater than the volumes from komga, add the entry to the download list.
Args:
- data (dict): a dict containing the title of the serie at ["title"] and the id of the serie at ["id"]
#! TODO: rewrite this docstring
Returns:
- list[dict]: a list of dictionaries containing the entries to download
"""
serie = data.name
log.debug(f"Searching serie: {data.name}")
serie = data.metadata.title if data.metadata else data.name
series_id = data.id
vols = (
@@ -215,14 +239,21 @@ class mangaCli:
f_d = []
if feed_titles == []:
failed_items.append(serie)
log.info(f"No feed entries found for {serie}")
return f_d
added_max_vols = vols if vols else [0]
for entry in feed_titles:
valid_file_extensions = self.library.valid_extensions
min_size = (
config.komgrabber.manga.min_filesize
if self.library.type == "MANGA"
else config.komgrabber.ebook.min_filesize
)
if not any(
filetype in entry.filetypes
for filetype in config.komgrabber.manga.valid_file_extensions
filetype in entry.filetypes for filetype in valid_file_extensions
):
log.info(
f"Skipping {entry.name}, Reason: Filetype not in valid filetypes, wanted: {config.komgrabber.manga.valid_file_extensions}, found: {entry.filetypes}"
log.debug(
f"Skipping {entry.name}, Reason: Filetype not in valid filetypes, wanted: {valid_file_extensions}, found: {entry.filetypes}"
)
continue
if entry.seeders > 0:
@@ -230,15 +261,20 @@ class mangaCli:
serie.lower() in entry.name.lower()
or jaro.jaro_metric(entry.name.lower(), serie.lower()) > 0.7
):
# check if entry name is way longer than the serie name, if so, skip it
# if len(entry.name) > len(serie) + 60:
# log.info(
# f"Skipping {entry.name}, Reason: Title too long compared to series name"
# )
# continue
# get the entry with the most volumes
filesizes = entry.filesizes
volumes = entry.volumes
min_size = len(volumes) * config.komgrabber.manga.min_filesize
if filesizes < min_size:
min_size = len(entry.volumes) * min_size
if entry.filesizes < min_size:
log.info(
f"Skipping {entry.name}, Reason: Filesize is too small"
)
continue
volumes = entry.volumes if entry.volumes != [] else [0]
if max(volumes) > max(added_max_vols):
f_d.append(entry) # = entry
# added_max_vols = volumes
@@ -249,29 +285,24 @@ class mangaCli:
return f_d
def media_grabber(self, serie: Series, bar: Optional[Any] = None) -> bool:
log.debug(f"Processing serie: {serie.name}")
result = self.process_serie(serie)
total_new_volumes: list[tuple[Torrent, list[int]]] = []
fs_per_volume = config.komgrabber.manga.min_filesize
series_volumes = (
Komga.getVolumes(series_id=serie.id, unpaged=True)
if serie.id is not None
else [0]
)
max_new_volume: int = 0
name = serie.metadata.title if serie.metadata else serie.name
if result is None or result == []:
log.info(f"Could not find any new volumes for {serie.metadata.title}")
log.info(f"Could not find any new volumes for {name}")
return False
if bar:
bar.text(f"Downloading new volumes for {serie.metadata.title}...")
bar.text(f"Downloading new volumes for {name}...")
for res in result:
log.info(f"{res.name}, Volumes: {res.volumes}")
if res.volumes != [0]:
min_size = len(res.volumes) * fs_per_volume
if res.filesizes < min_size:
log.info(f"Skipping {res.name}, Reason: Filesize is too small")
result.remove(res)
continue
for res in result:
log.debug("present: {}, new: {}".format(series_volumes, res.volumes))
@@ -290,104 +321,88 @@ class mangaCli:
log.info(f"Found {len(total_new_volumes)} new results for {serie.name}")
for res, new_volumes in total_new_volumes:
if "epub" in res.filetypes and len(res.filetypes) == 1:
log.info(
f"Skipping {res.name}, Reason: Epub file, no other filetypes present"
)
continue
if (
max(new_volumes) > max(series_volumes)
and max(new_volumes) > max_new_volume
):
max_new_volume = max(new_volumes)
log.info(
log.success(
"Found new volumes: {} for series: {}, downloading".format(
new_volumes, serie.name
)
)
# log.info(
# f"Found {len(new_volumes)} new {'volume' if len(new_volumes) == 1 else 'volumes'} for {serie.name}"
# )
# # check if the new volumes were aleady downloaded
# log.info(f"current volumes: {series_volumes}, new volumes: {new_volumes}")
# # print(result)
if self.download(res.download_url) is True:
log.success(f"Downloaded {res.name}")
# self.rename_folder_and_files(self.file, komga_data=serie, remove=True)
# self.move_to_komga(serie=entry)
log.info("Renaming and tagging files")
rename()
if not config.komgrabber.get_chapters:
detect_chapters()
tag_folder()
if rename_folder(series=serie):
move(self.download_path, config.komga.media_path)
else:
log.info("Seems like we grabbed the wrong series, oops")
failed_items.append(serie.metadata.title)
if config.komgrabber.downloader == "aria2":
if self.aria2_download(res.download_url) is True:
log.success(f"Downloaded {res.name}")
# self.rename_folder_and_files(self.file, komga_data=serie, remove=True)
# self.move_to_komga(serie=entry)
log.info("Renaming and tagging files")
if self.library.type == "MANGA":
process_manga(
download_path=self.download_path,
library=self.library,
serie=serie,
)
elif self.library.type == "NOVEL":
process_novel(
download_path=self.download_path,
library=self.library,
serie=serie,
copy=config.komgrabber.copy,
)
else:
log.info("Seems like we grabbed the wrong series, oops")
failed_items.append(name)
# clear folder
# remove the download dir and create it anew
remove_empty_folders(self.download_path)
safe_remove_directory(self.download_path)
# remove the download dir and create it anew
remove_empty_folders(self.download_path)
safe_remove_directory(self.download_path)
else: # use qbit to handle downloads, then move them to the tag location
if self.qbit_download(res.download_url):
self.downloaded_files.append(name, res.name, "downloading")
return True
def search_for_new_volumes(self, all: bool = False):
def search_for_new_volumes(self, all: bool = False) -> "mangaCli":
query = {
"condition": {
"allOf": [
{
"anyOf": [
{
"seriesStatus": {
"operator": "is",
"value": "ONGOING"
}
},
{
"seriesStatus": {
"operator": "is",
"value": "HIATUS"
}
},
{
"allOf": [
{
"seriesStatus": {
"operator": "is",
"value": "ENDED"
}
},
{
"complete": {
"operator": "isFalse"
}
}
]
}
]
}
]
}
}
if self.library_id != "":
"condition": {
"allOf": [
{
"anyOf": [
{"seriesStatus": {"operator": "is", "value": "ONGOING"}},
{"seriesStatus": {"operator": "is", "value": "HIATUS"}},
{
"allOf": [
{
"seriesStatus": {
"operator": "is",
"value": "ENDED",
}
},
{"complete": {"operator": "isFalse"}}, # ,
# {"deleted": {"operator": "isFalse"}},
]
},
]
}
]
}
}
if self.library.id != "":
query["condition"]["allOf"].append(
{
"libraryId": {
"operator": "is",
"value": self.library_id,
"value": self.library.id,
}
}
)
series = Komga.series_controller.getAllSeries(
body= query
)
series = Komga.series_controller.getAllSeries(body=query)
komga_series: list[Series] = []
shutil.rmtree(self.download_path, ignore_errors=True)
os.mkdir(self.download_path)
# log.debug(f"Series: {series}")
log.debug(f"Series: {len(series)}")
today = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
today = time.mktime(time.strptime(today, "%Y-%m-%d %H:%M:%S"))
@@ -397,16 +412,15 @@ class mangaCli:
def series_ending(bar): # type:ignore
bar.title("Completed searching for new volumes") # type:ignore
if config.komgrabber.downloader == "qbittorrent":
bar.text("All series checked, waiting for downloads to finish...") # type:ignore
return
bar.text("All series checked, exiting...") # type:ignore
def skip_ending(bar): # type:ignore
bar.title("Skipping series") # type:ignore
bar.text("Skipped series, continuing...") # type:ignore
def ended_ending(bar): # type:ignore
bar.title("Skipping finished series") # type:ignore
bar.text("Finished check, continuing to search new volumes...") # type:ignore
if config.komgrabber.use_cache:
log.info("Cache present, checking for missing entries")
cacheBar = alive_it(
@@ -437,12 +451,12 @@ class mangaCli:
time.sleep(0.05)
else:
self.cache.insert(
INSERT_KOMGRABBER,
INSERT_KOMGRABBER,
{
"name": serie.name,
"series_id": serie.id,
"status": serie.metadata.status,
}
},
)
log.info(f"Serie {serie.metadata.title} added to cache")
@@ -462,7 +476,7 @@ class mangaCli:
)
for serie in skipBar:
last_checked = self.cache.fetch_one(
GET_LASTCHECKED_KOMGRABBER, {"series_id": serie.id}
GET_LASTCHECKED_KOMGRABBER, args={"series_id": serie.id}
)[0]
log.debug(
f"Last checked: {last_checked}, Serie: {serie.name}, Status: {serie.metadata.status}"
@@ -476,7 +490,7 @@ class mangaCli:
)
# if difference between last_checked and today is less than config.komgrabber.cache_check_interval, skip entry
time_difference = time_checker(last_checked, today)
# check if the series id exists in komga
# if time difference is less than set in the settings and the series status is not ended and the book count is not the same as the total book count, skip the entry
if time_difference > config.komgrabber.cache_check_interval:
komga_series.append(serie)
@@ -489,8 +503,9 @@ class mangaCli:
)
time.sleep(0.005)
log.debug(len(komga_series))
# order komga_series by series.name
komga_series = sorted(komga_series, key=lambda x: x.name)
log.info("Finished checking cache, continuing...")
log.info("There are {} series to check".format(len(komga_series)))
time.sleep(0.05)
@@ -505,10 +520,27 @@ class mangaCli:
f"searching for new volumes for {serie.metadata.title}, currently at {serie.booksCount} volumes"
)
self.series_data = serie
self.serie = (
serie.metadata.title
) # replaced serie.name with serie.metadata.title
self.serie_id = serie.id
in_komga = Komga.series_controller.getSeries(serie.id)
if in_komga.__class__.__name__ == "Error" or in_komga.deleted:
log.info("Series has been deleted on server, deleting in database")
self.cache.query(
"DELETE FROM komgrabber WHERE series_id=:series_id",
{"series_id": serie.id},
)
continue
in_komga = Komga.series_controller.getSeries(serie.id)
if in_komga.__class__.__name__ == "Error" or in_komga.deleted:
log.info("Series has been deleted on server, deleting in database")
self.cache.query(
"DELETE FROM komgrabber WHERE series_id=:series_id",
{"series_id": serie.id},
)
continue
found = self.media_grabber(serie, bar=pBar)
if config.komgrabber.use_cache:
if found:
@@ -517,12 +549,12 @@ class mangaCli:
f"Cache updated for {serie.metadata.title}, new volumes found"
) # updated to use serie.metadata.title
else:
self.cache.update(LASTCHECKED_KOMGRABBER, {"series_id": serie.id})
# self.cache.update(LASTCHECKED_KOMGRABBER, {"series_id": serie.id})
log.info(
f"Cache updated for {serie.metadata.title}, no new volumes found"
log.critical(
"No new volumes found" # f"Cache updated for {serie.metadata.title}, no new volumes found"
) # updated to use serie.metadata.title
# self.cache.update(LASTCHECKED_KOMGRABBER, {"series_id": serie.id})
self.cache.update(LASTCHECKED_KOMGRABBER, {"series_id": serie.id})
# log.info("Cache updated")
return self
@@ -616,22 +648,46 @@ class mangaCli:
pass
def wait_for_qbit(self) -> bool:
if config.komgrabber.downloader != "qbittorrent":
return False
log.info("Waiting for qBittorrent to finish downloads...")
for download in self.dl.client.torrents_info():
if download.name in [file[0] for file in self.downloaded_files]:
komga_name = [
file["komga_name"]
for file in self.downloaded_files
if file[0] == download.name
][0]
if download.state == "downloading":
log.info(f"Download {download.name} is still in progress...")
return True
elif download.state == "stalled":
log.info(f"Download {download.name} is stalled, deleting")
self.dl.client.torrents_delete(
delete_files=True, hashes=[download.hash]
)
self.downloaded_files.pop(
[
i
for i, file in enumerate(self.downloaded_files)
if file[0] == download.name
][0]
)
incomplete.append(komga_name)
return True
elif download.state == "pausedUP":
log.info(
f"Download {download.name} is done, moving to tag location"
)
shutil.move(
Path(config.komgrabber.download_location, download.name),
Path(config.komgrabber.tag_location, komga_name),
)
return True
class ebookCli:
def __init__(self) -> None:
self.dl = Download(config.komgrabber.download_location)
self.file = None
self.serie = ""
self.serie_id = ""
self.series_data: Series
self.volumes = []
self.download_path = config.komgrabber.download_location
# self.allSeries = Komga.getAllSeries()
pass
def search_for_new_volumes(self):
folder = config.komgrabber.ebook.data_directory
series = os.listdir(folder)
log.info("All downloads completed")
return False
def avail_check():
@@ -639,17 +695,34 @@ def avail_check():
return (True, komga_avail)
def search_all(libraryName: str ="", all: bool = False):
libid = config.komga.libraries.get(libraryName)
mangaCli(library_id=libid).search_for_new_volumes(all)
Komga.library_controller.scanLibrary(libid)
print(f"Initialized scan for library {libraryName}")
def search_all(library: Library, all: bool = False):
handler = mangaCli(
library=library,
)
handler.search_for_new_volumes(all)
if config.komgrabber.downloader == "qbittorrent":
if handler.downloaded_files == []:
print("No downloads were added to qBittorrent, exiting...")
return
while handler.wait_for_qbit():
print("Waiting for qBittorrent to finish downloads...")
time.sleep(10)
rename(config.komgrabber.tag_location)
detect_chapters(config.komgrabber.tag_location)
tag_folder(config.komgrabber.tag_location)
move(
config.komgrabber.tag_location,
Path(config.komga.media_path, library.media_path),
)
Komga.library_controller.scanLibrary(library.id)
print(f"Initialized scan for library {library.name}")
print("Failed series:\n", failed_items)
print("Incomplete series:\n", incomplete)
def search_series(series: list[str]):
mangaCli().search_for_series(series)
def search_series(library, series: list[str]):
mangaCli(library=library).search_for_series(series)
# update_state()
print("Failed series:\n", failed_items)
@@ -657,6 +730,7 @@ def search_series(series: list[str]):
def search_requested():
cache = KomCache()
series = cache.query("SELECT manga_id from manga_requests WHERE grabbed = 0")
if series:
for serie in series:
result = mangaCli().search_for_new_series(int(serie[0]))

View File

@@ -1,15 +1,18 @@
import os
import re
from komconfig import KomConfig
from pathlib import Path
import shutil
import subprocess
import jaro
import loguru
import sys
import time
from komgapi import komgapi
from pathlib import Path
import jaro
import loguru
from komcache import KomCache
from komconfig import KomConfig
from komconfig.config import Library
from komgapi import komgapi
from komgapi.schemas import Series
cfg = KomConfig()
@@ -21,7 +24,7 @@ config = KomConfig()
komga = komgapi(cfg.komga.user, cfg.komga.password, cfg.komga.url)
def rename(folder: Path = config.komgrabber.download_location) -> None:
def rename(folder: Path = config.komgrabber.tag_location) -> None:
"""Rename the files in a folder according to the template.
Template: [Name] v[nr] #[nr].ext (e.g. "The Flash v1 #1.cbz").
@@ -61,14 +64,14 @@ def rename_recursive(folder: str) -> None:
rename(Path(f"{root}/{dir}"))
def tag_folder(folder: Path = config.komgrabber.download_location) -> None:
def tag_folder(folder: Path = config.komgrabber.tag_location) -> None:
"""
Recursively tags all the .cbz files in the folder using ComicTagger
Parameters
----------
folder : Path, optional
The path that will be used to tag, by default Path(config.komgrabber.download_location)
The path that will be used to tag, by default Path(config.komgrabber.tag_location)
"""
# Get the files in the folder
if "~" in str(folder):
@@ -88,7 +91,7 @@ def tag_folder(folder: Path = config.komgrabber.download_location) -> None:
)
def move(src: Path, dest: Path = Path(config.komga.media_path)) -> None:
def move(src: Path, library_path: str) -> None:
"""
Moves the files from the source folder to the destination folder.
If the folder already exists in the destination, only move the new files.
@@ -97,12 +100,13 @@ def move(src: Path, dest: Path = Path(config.komga.media_path)) -> None:
----------
src : Path
The source folder
dest : Path, optional
The destination folder used by Komga, by default Path(config.komga.media_path)
dest : str
The destination folder used by Komga for the library, set in config file, defaults to "Manga"
"""
# Get the files in the folder
# +move the folders from src to disc, if folder already exists, only move new files
dest = Path(config.komga.media_path, library_path)
folders = os.listdir(src)
for folder in folders:
if not os.path.exists(f"{dest}/{folder}"):
@@ -116,6 +120,9 @@ def move(src: Path, dest: Path = Path(config.komga.media_path)) -> None:
else:
files = os.listdir(f"{src}/{folder}")
for file in files:
if file.startswith("."):
log.debug(f"Skipping hidden file {file}")
continue
if not os.path.exists(f"{dest}/{folder}/{file}"):
log.info(f"Moving {file} to {dest}/{folder}")
shutil.move(f"{src}/{folder}/{file}", f"{dest}/{folder}")
@@ -123,7 +130,7 @@ def move(src: Path, dest: Path = Path(config.komga.media_path)) -> None:
remove_empty_folders(src)
def remove_empty_folders(src):
def remove_empty_folders(src: Path):
"""
Recursively removes empty folders in the source folder
@@ -140,36 +147,67 @@ def remove_empty_folders(src):
log.info(f"Removing {folder}")
os.rmdir(f"{src}/{folder}")
else:
remove_empty_folders(f"{src}/{folder}")
newPath = Path(f"{src}/{folder}")
remove_empty_folders(newPath)
def detect_chapters(src: Path = config.komgrabber.download_location) -> None:
def detect_chapters(
src: Path = config.komgrabber.tag_location, valid_extension: str = "cbz|epub"
) -> None:
"""
Detects and deletes any non-volume file in the source folder
Parameters
----------
src : Path, optional
The Path to be checked, by default Path(config.komgrabber.download_location)
The Path to be checked, by default Path(config.komgrabber.tag_location)
"""
log.info(f"Checking {src} for chapters")
regex = re.compile(rf"^.* v(\d+) #(\d+(?:-\d+)?)\.({valid_extension})$")
for folder in os.listdir(src):
if os.path.isdir(f"{src}/{folder}"):
files = os.listdir(f"{src}/{folder}")
for file in files:
if os.path.isdir(f"{src}/{folder}/{file}"):
folder_files = os.listdir(f"{src}/{folder}/{file}")
for folder_file in folder_files:
# check for regex "v(d) #(d)" in the file name
if regex.search(folder_file):
log.debug(f"File {folder_file} is a Volume")
else:
log.info(f"Deleting chapter {folder_file}")
if os.path.isfile(f"{src}/{folder}/{file}/{folder_file}"):
os.remove(f"{src}/{folder}/{file}/{folder_file}")
else:
shutil.rmtree(f"{src}/{folder}/{file}/{folder_file}")
# check for regex "v(d) #(d)" in the file name
regex = re.compile(r"^.* v(\d+) #(\d+(?:-\d+)?)\.cbz$")
if regex.search(file):
log.debug(f"File {file} is a Volume")
else:
log.debug(f"Deleting chapter {file}")
if os.path.isdir(f"{src}/{folder}/{file}"):
shutil.rmtree(f"{src}/{folder}/{file}")
else:
log.info(f"Deleting chapter {file}")
if os.path.isfile(f"{src}/{folder}/{file}"):
os.remove(f"{src}/{folder}/{file}")
else:
if os.path.isdir(f"{src}/{folder}/{file}"):
for subfile in os.listdir(f"{src}/{folder}/{file}"):
if regex.search(subfile):
log.debug(f"File {subfile} is a Volume")
else:
log.info(f"Deleting chapter {subfile}")
if os.path.isfile(
f"{src}/{folder}/{file}/{subfile}"
):
os.remove(f"{src}/{folder}/{file}/{subfile}")
else:
shutil.rmtree(
f"{src}/{folder}/{file}/{subfile}"
)
else:
shutil.rmtree(f"{src}/{folder}/{file}")
def folder_similarity(folder1, folder2) -> float:
def folder_similarity(folder1: str, folder2: str) -> float:
"""
Calculate the similarity between two folder names using Jaro-Winkler distance.
@@ -184,7 +222,9 @@ def folder_similarity(folder1, folder2) -> float:
return similarity
def rename_folder(src=config.komgrabber.download_location, series=None) -> bool:
def rename_folder(
src: Path = config.komgrabber.tag_location, series: Series = None
) -> bool:
renamer_regex = r"(\s*\([^)]*\))+$"
for folder in os.listdir(src):
if os.path.isdir(f"{src}/{folder}"):
@@ -235,11 +275,16 @@ def calculate_new_volumes(
present_volumes: list[int], new_volumes: list[int]
) -> list[int]:
if len(new_volumes) == 1:
if max(new_volumes) > max(present_volumes):
if len(present_volumes) == 0:
return new_volumes
if max(new_volumes) > max(present_volumes):
# return any new volume that is not in present volumes
return [v for v in new_volumes if v not in present_volumes]
else:
return []
else:
if len(present_volumes) == 0:
return new_volumes
new_volumes = sorted(new_volumes)
new_volumes = [i for i in new_volumes if i > max(present_volumes)]
if len(new_volumes) == 0:
@@ -283,3 +328,138 @@ def get_series_update_date(series_name: str) -> str:
args=(series_name,),
)
print(update_date)
def process_manga(download_path: Path, library: Library, serie: Series) -> None:
"""Process the downloaded manga: rename files, detect chapters, tag, rename folder, and move to library."""
rename(download_path)
if not config.komgrabber.get_chapters:
detect_chapters(download_path, "|".join(library.valid_extensions))
tag_folder(download_path)
if rename_folder(series=serie, src=download_path):
move(
download_path,
library.media_path,
)
def process_novel(
download_path: Path, library: Library, serie: Series, copy: bool = False
) -> None:
"""Process the downloaded novel: rename files, tag, rename folder, and move to library."""
# rename the folder to the series name
folder = os.listdir(download_path)[0]
series_name = serie.name
# remove all files that are not valid extensions
valid_extensions = library.valid_extensions
# flatten subfolders and subsubfolders
for root, dirs, files in os.walk(f"{download_path}/{folder}"):
for dir in dirs:
for file in os.listdir(f"{root}/{dir}"):
if file.startswith("."):
log.debug(f"Skipping hidden file {file}")
continue
log.info(f"Moving {file} to {download_path}/{folder}")
shutil.move(f"{root}/{dir}/{file}", f"{download_path}/{folder}")
os.rmdir(f"{root}/{dir}")
# removing invalid extensions
for file in os.listdir(f"{download_path}/{folder}"):
if not any(file.endswith(ext) for ext in valid_extensions):
log.info(f"Removing {file} as it is not a valid extension")
if os.path.isfile(f"{download_path}/{folder}/{file}"):
os.remove(f"{download_path}/{folder}/{file}")
else:
shutil.rmtree(f"{download_path}/{folder}/{file}")
# rename files to remove all [] and text within
for file in os.listdir(f"{download_path}/{folder}"):
filename = file.split(".")[0]
if f"{series_name} - Volume" in filename:
log.debug(f"Skipping {file}, already renamed")
continue
# extract the volume number, may be a float, either v1, v1.5, v01, v01.5, vol.1, vol.01, vol.1.5, vol.01.5, Vol.1, Vol.01, Vol.1.5, Vol.01.5, Volume 1, Volume 01, Volume 1.5, Volume 01.5
regex_volume_pattern = r"(v|vol\.|Vol\.|Volume\s)(\d+(\.\d+)?)"
match = re.search(regex_volume_pattern, file, re.IGNORECASE)
# from the match, get the volume number
volume = match.group(2) if match else None
# rename the file to series name v(volume).ext
ext = file.split(".")[-1]
# if volume is not null and less than 10, pad with a 0
if volume and float(volume) < 10:
volume = f"0{volume}"
if volume and "00" in volume:
volume = volume.replace("00", "0")
fixed = (
f"{series_name} - Volume {volume}.{ext}"
if volume
else f"{series_name}.{ext}"
)
log.debug(f"Renaming {file} to {fixed}")
os.rename(
f"{download_path}/{folder}/{file}", f"{download_path}/{folder}/{fixed}"
)
# flatten subfolders
os.rename(f"{download_path}/{folder}", f"{download_path}/{series_name}")
dest = Path(config.komga.media_path, library.media_path)
folders = os.listdir(download_path)
log.info(f"Moving {folders} to {dest}")
for folder in folders:
log.info(f"Processing folder {folder}")
time.sleep(1)
if not os.path.exists(f"{dest}/{folder}"):
log.info(f"Moving {folder} to {dest}")
os.mkdir(f"{dest}/{folder}")
files = os.listdir(f"{download_path}/{folder}")
for file in files:
time.sleep(1)
log.debug(f"Moving {file} to {dest}/{folder}")
if copy:
# copy file to komgrabber tag location
copy_location = config.komgrabber.copy_location
if not os.path.exists(f"{copy_location}"):
os.mkdir(f"{copy_location}")
shutil.copy(
f"{download_path}/{folder}/{file}",
f"{copy_location}/{file}",
)
log.debug(
f"Copied from {download_path}/{folder}/{file} to {copy_location}/{file}"
)
shutil.move(f"{download_path}/{folder}/{file}", f"{dest}/{folder}")
# shutil.move(f"{src}/{folder}", dest)
else:
files = os.listdir(f"{download_path}/{folder}")
for file in files:
time.sleep(1)
log.debug(f"Processing file {file}")
if file.startswith("."):
log.debug(f"Skipping hidden file {file}")
continue
if not os.path.exists(f"{dest}/{folder}/{file}"):
log.debug(f"Moving {file} to {dest}/{folder}")
if copy:
# copy file to komgrabber tag location
copy_location = config.komgrabber.copy_location
if not os.path.exists(f"{copy_location}/{folder}"):
os.mkdir(f"{copy_location}")
shutil.copy(
f"{download_path}/{folder}/{file}",
f"{copy_location}/{file}",
)
log.debug(
f"Copied from {download_path}/{folder}/{file} to {copy_location}/{file}"
)
shutil.move(f"{download_path}/{folder}/{file}", f"{dest}/{folder}")
log.info("Finished moving files, removing empty folders")
remove_empty_folders(download_path)
if __name__ == "__main__":
print(folder_similarity("Dr. STONE (2018-2023) (Digital) (1r0n)", "Dr. STONE"))