Compare commits

..

3 Commits

Author SHA1 Message Date
79698e308b Fix tests
Some checks failed
CI / lint (ubuntu-latest, 3.9) (push) Has been cancelled
CI / build-and-test (macos-13, 3.13) (push) Has been cancelled
CI / build-and-test (macos-13, 3.9) (push) Has been cancelled
CI / build-and-test (macos-14, 3.13) (push) Has been cancelled
CI / build-and-test (macos-14, 3.9) (push) Has been cancelled
CI / build-and-test (ubuntu-22.04, 3.13) (push) Has been cancelled
CI / build-and-test (ubuntu-22.04, 3.9) (push) Has been cancelled
CI / build-and-test (ubuntu-22.04-arm, 3.13) (push) Has been cancelled
CI / build-and-test (ubuntu-22.04-arm, 3.9) (push) Has been cancelled
CI / build-and-test (windows-latest, 3.13) (push) Has been cancelled
CI / build-and-test (windows-latest, 3.9) (push) Has been cancelled
2025-07-18 18:14:22 -07:00
529f52c6cc Update comictaggerlib and comicapi for exception handling
Some checks failed
CI / lint (ubuntu-latest, 3.9) (push) Has been cancelled
CI / build-and-test (macos-13, 3.13) (push) Has been cancelled
CI / build-and-test (macos-13, 3.9) (push) Has been cancelled
CI / build-and-test (macos-14, 3.13) (push) Has been cancelled
CI / build-and-test (macos-14, 3.9) (push) Has been cancelled
CI / build-and-test (ubuntu-22.04, 3.13) (push) Has been cancelled
CI / build-and-test (ubuntu-22.04, 3.9) (push) Has been cancelled
CI / build-and-test (ubuntu-22.04-arm, 3.13) (push) Has been cancelled
CI / build-and-test (ubuntu-22.04-arm, 3.9) (push) Has been cancelled
CI / build-and-test (windows-latest, 3.13) (push) Has been cancelled
CI / build-and-test (windows-latest, 3.9) (push) Has been cancelled
2025-07-18 17:36:03 -07:00
2010d1c3c6 Fix Archive and Tag plugin definitions for exception handling 2025-07-18 17:35:32 -07:00
66 changed files with 1442 additions and 6782 deletions

View File

@ -2,34 +2,34 @@ from __future__ import annotations
import pathlib
from collections.abc import Collection
from typing import Protocol, runtime_checkable
from typing import ClassVar, Protocol, runtime_checkable
@runtime_checkable
class Archiver(Protocol):
"""Archiver Protocol"""
"""The path to the archive"""
path: pathlib.Path
"""The path to the archive"""
exe: ClassVar[str] = ""
"""
The name of the executable used for this archiver. This should be the base name of the executable.
For example if 'rar.exe' is needed this should be "rar".
If an executable is not used this should be the empty string.
"""
exe: str = ""
enabled: ClassVar[bool] = True
"""
Whether or not this archiver is enabled.
If external imports are required and are not available this should be false. See rar.py and sevenzip.py.
"""
enabled: bool = True
hashable: bool = True
"""
If self.path is a single file that can be hashed.
For example directories cannot be hashed.
"""
hashable: bool = True
supported_extensions: Collection[str] = set()
@ -39,21 +39,21 @@ class Archiver(Protocol):
def get_comment(self) -> str:
"""
Returns the comment from the current archive as a string.
Should always return a string. If comments are not supported in the archive the empty string should be returned.
If comments are not supported in the archive the empty string should be returned.
"""
return ""
raise NotImplementedError
def set_comment(self, comment: str) -> bool:
def set_comment(self, comment: str) -> None:
"""
Returns True if the comment was successfully set on the current archive.
Should always return a boolean. If comments are not supported in the archive False should be returned.
Should raise an exception if a comment cannot be set
"""
return False
raise NotImplementedError
def supports_comment(self) -> bool:
"""
Returns True if the current archive supports comments.
Should always return a boolean. If comments are not supported in the archive False should be returned.
Should always return a boolean.
MUST NOT cause an exception.
"""
return False
@ -65,63 +65,59 @@ class Archiver(Protocol):
"""
raise NotImplementedError
def remove_file(self, archive_file: str) -> bool:
def remove_file(self, archive_file: str) -> None:
"""
Removes the named file from the current archive.
archive_file should always come from the output of get_filename_list.
Should always return a boolean. Failures should return False.
archive_file will always come from the output of get_filename_list.
Rebuilding the archive without the named file is a standard way to remove a file.
"""
return False
raise NotImplementedError
def write_file(self, archive_file: str, data: bytes) -> bool:
def write_file(self, archive_file: str, data: bytes) -> None:
"""
Writes the named file to the current archive.
Should always return a boolean. Failures should return False.
"""
return False
raise NotImplementedError
def get_filename_list(self) -> list[str]:
"""
Returns a list of filenames in the current archive.
Should always return a list of string. Failures should return an empty list.
Should always return a list of string.
"""
return []
raise NotImplementedError
def supports_files(self) -> bool:
"""
Returns True if the current archive supports arbitrary non-picture files.
Should always return a boolean.
If arbitrary non-picture files are not supported in the archive False should be returned.
MUST NOT cause an exception.
"""
return False
raise NotImplementedError
def copy_from_archive(self, other_archive: Archiver) -> bool:
def copy_from_archive(self, other_archive: Archiver) -> None:
"""
Copies the contents of another achive to the current archive.
Should always return a boolean. Failures should return False.
"""
return False
raise NotImplementedError
def is_writable(self) -> bool:
"""
Retuns True if the current archive is writeable
Should always return a boolean. Failures should return False.
"""
return False
raise NotImplementedError
def extension(self) -> str:
"""
Returns the extension that this archiver should use eg ".cbz".
Should always return a string. Failures should return the empty string.
MUST NOT cause an exception.
"""
return ""
def name(self) -> str:
"""
Returns the name of this archiver for display purposes eg "CBZ".
Should always return a string. Failures should return the empty string.
MUST NOT cause an exception.
"""
return ""
@ -130,6 +126,7 @@ class Archiver(Protocol):
"""
Returns True if the given path can be opened by this archiver.
Should always return a boolean. Failures should return False.
MUST NOT cause an exception.
"""
return False
@ -138,8 +135,10 @@ class Archiver(Protocol):
"""
Opens the given archive.
Should always return a an Archver.
Should never cause an exception no file operations should take place in this method,
is_valid will always be called before open.
Should validate that file can be opened.
NOTE: is_7zfile from py7zr does not validate that py7zr can open the file
MUST not keep file open.
"""
archiver = cls()
archiver.path = path

View File

@ -25,47 +25,50 @@ class FolderArchiver(Archiver):
except OSError:
return ""
def set_comment(self, comment: str) -> bool:
def set_comment(self, comment: str) -> None:
self._filename_list = []
if comment:
return self.write_file(self.comment_file_name, comment.encode("utf-8"))
(self.path / self.comment_file_name).unlink(missing_ok=True)
return True
try:
if comment:
file_path = self.path / self.comment_file_name
file_path.parent.mkdir(exist_ok=True, parents=True)
file_path.write_text(comment, encoding="utf-8")
else:
(self.path / self.comment_file_name).unlink(missing_ok=True)
except OSError as e:
logger.error(
"Error writing comment for folder archive [%s]: %s :: %s", e, self.path, self.comment_file_name
)
raise OSError(
f"Error writing comment for folder archive [{e}]: {self.path} :: {self.comment_file_name}"
) from e
def supports_comment(self) -> bool:
return True
def read_file(self, archive_file: str) -> bytes:
try:
data = (self.path / archive_file).read_bytes()
return (self.path / archive_file).read_bytes()
except OSError as e:
logger.error("Error reading folder archive [%s]: %s :: %s", e, self.path, archive_file)
raise
raise OSError(f"Error reading folder archive [{e}]: {self.path} :: {archive_file}") from e
return data
def remove_file(self, archive_file: str) -> bool:
def remove_file(self, archive_file: str) -> None:
self._filename_list = []
try:
(self.path / archive_file).unlink(missing_ok=True)
except OSError as e:
logger.error("Error removing file for folder archive [%s]: %s :: %s", e, self.path, archive_file)
return False
else:
return True
raise OSError(f"Error removing file for folder archive [{e}]: {self.path} :: {archive_file}") from e
def write_file(self, archive_file: str, data: bytes) -> bool:
def write_file(self, archive_file: str, data: bytes) -> None:
self._filename_list = []
try:
file_path = self.path / archive_file
file_path.parent.mkdir(exist_ok=True, parents=True)
with open(self.path / archive_file, mode="wb") as f:
f.write(data)
file_path.write_bytes(data)
except OSError as e:
logger.error("Error writing folder archive [%s]: %s :: %s", e, self.path, archive_file)
return False
else:
return True
raise OSError(f"Error writing folder archive [{e}]: {self.path} :: {archive_file}") from e
def get_filename_list(self) -> list[str]:
if self._filename_list:
@ -79,12 +82,12 @@ class FolderArchiver(Archiver):
return filenames
except OSError as e:
logger.error("Error listing files in folder archive [%s]: %s", e, self.path)
return []
raise OSError(f"Error listing files in folder archive [{e}]: {self.path}") from e
def supports_files(self) -> bool:
return True
def copy_from_archive(self, other_archive: Archiver) -> bool:
def copy_from_archive(self, other_archive: Archiver) -> None:
"""Replace the current zip with one copied from another archive"""
self._filename_list = []
try:
@ -94,15 +97,14 @@ class FolderArchiver(Archiver):
self.write_file(filename, data)
# preserve the old comment
comment = other_archive.get_comment()
if comment is not None:
if not self.set_comment(comment):
return False
except Exception:
logger.exception("Error while copying archive from %s to %s", other_archive.path, self.path)
return False
else:
return True
self.set_comment(other_archive.get_comment())
except Exception as e:
logger.exception(
"Error while copying to folder archive [%s]: from %s to %s", e, other_archive.path, self.path
)
raise OSError(
f"Error while copying to folder archive [{e}]: from {str(other_archive)!r} to {str(self.path)!r}"
) from e
def is_writable(self) -> bool:
return True

View File

@ -49,47 +49,47 @@ class RarArchiver(Archiver):
rarc = self.get_rar_obj()
return (rarc.comment if rarc else "") or ""
def set_comment(self, comment: str) -> bool:
def set_comment(self, comment: str) -> None:
self._reset()
if rar_support and self.exe:
try:
# write comment to temp file
with tempfile.TemporaryDirectory() as tmp_dir:
tmp_file = pathlib.Path(tmp_dir) / "rar_comment.txt"
tmp_file.write_text(comment, encoding="utf-8")
if not (rar_support and self.exe):
return
working_dir = os.path.dirname(os.path.abspath(self.path))
try:
# write comment to temp file
with tempfile.TemporaryDirectory() as tmp_dir:
tmp_file = pathlib.Path(tmp_dir) / "rar_comment.txt"
tmp_file.write_text(comment, encoding="utf-8")
# use external program to write comment to Rar archive
proc_args = [
self.exe,
"c",
f"-w{working_dir}",
"-c-",
f"-z{tmp_file}",
str(self.path),
]
result = subprocess.run(
proc_args,
startupinfo=STARTUPINFO,
stdin=subprocess.DEVNULL,
capture_output=True,
encoding="utf-8",
cwd=tmp_dir,
)
if result.returncode != 0:
logger.error(
"Error writing comment to rar archive [exitcode: %d]: %s :: %s",
result.returncode,
self.path,
result.stderr,
)
return False
except OSError as e:
logger.exception("Error writing comment to rar archive [%s]: %s", e, self.path)
return False
return True
return False
working_dir = os.path.dirname(os.path.abspath(self.path))
# use external program to write comment to Rar archive
proc_args = [
self.exe,
"c",
f"-w{working_dir}",
"-c-",
f"-z{tmp_file}",
str(self.path),
]
result = subprocess.run(
proc_args,
startupinfo=STARTUPINFO,
stdin=subprocess.DEVNULL,
capture_output=True,
encoding="utf-8",
cwd=tmp_dir,
)
except Exception as e:
logger.exception("Error writing comment to rar archive [%s]: %s", e, self.path)
raise OSError(f"Error writing comment to rar archive [{e}]: {self.path}")
if result.returncode != 0:
logger.error(
"Error writing comment to rar archive [exitcode: %d]: %s :: %s",
result.returncode,
self.path,
result.stderr,
)
raise OSError(f"Error writing comment to rar archive [exitcode: {result.returncode}]: {self.path}")
def supports_comment(self) -> bool:
return True
@ -100,9 +100,11 @@ class RarArchiver(Archiver):
return b""
tries = 0
error = None
entries = []
while tries < 7:
tries += 1
try:
tries = tries + 1
data: bytes = rarc.open(archive_file).read()
entries = [(rarc.getinfo(archive_file), data)]
@ -115,10 +117,24 @@ class RarArchiver(Archiver):
archive_file,
tries,
)
error = OSError(
'"Error reading rar archive [file is not expected size: {:d} vs {:d}] {} :: {} :: tries #{:d}"'.format(
entries[0][0].file_size,
len(entries[0][1]),
self.path,
archive_file,
tries,
)
)
continue
except OSError as e:
logger.error("Error reading rar archive [%s]: %s :: %s :: tries #%d", e, self.path, archive_file, tries)
logger.error(
"Error reading file from rar archive [%s]: %s :: %s :: tries #%d", e, self.path, archive_file, tries
)
error = OSError(
f"Error reading file from rar archive [{e}]: {self.path} :: {archive_file} :: tries#{tries}"
)
except Exception as e:
logger.error(
"Unexpected exception reading rar archive [%s]: %s :: %s :: tries #%d",
@ -127,22 +143,30 @@ class RarArchiver(Archiver):
archive_file,
tries,
)
break
raise RuntimeError(
f"Unexpected exception reading file from rar archive [{e}]: {self.path} :: {archive_file} :: tries#{tries}"
)
else:
# Success. Entries is a list of of tuples: ( rarinfo, filedata)
if error is None:
# Success, return early. Entries is a list of of tuples: ( rarinfo, filedata)
if len(entries) == 1:
return entries[0][1]
raise OSError(
f"Error reading file from rar archive [File not found]: {self.path} :: {archive_file} :: tries#{tries}"
)
raise OSError
if error is None:
# Somehow we have success but exited the loop
raise RuntimeError("Something failed")
raise error
raise OSError
def remove_file(self, archive_file: str) -> bool:
def remove_file(self, archive_file: str) -> None:
self._reset()
if self.exe:
working_dir = os.path.dirname(os.path.abspath(self.path))
# use external program to remove file from Rar archive
if not self.exe:
return
working_dir = os.path.dirname(os.path.abspath(self.path))
# use external program to remove file from Rar archive
try:
result = subprocess.run(
[self.exe, "d", f"-w{working_dir}", "-c-", self.path, archive_file],
startupinfo=STARTUPINFO,
@ -151,26 +175,35 @@ class RarArchiver(Archiver):
encoding="utf-8",
cwd=self.path.absolute().parent,
)
except Exception as e:
raise OSError(f"Error removing file from rar archive [{e}]: {self.path}:: {archive_file}")
if result.returncode != 0:
logger.error(
"Error removing file from rar archive [exitcode: %d]: %s :: %s",
if result.returncode != 0:
logger.error(
"Error removing file from rar archive [exitcode: %d]: %s :: %s",
result.returncode,
self.path,
archive_file,
)
raise RuntimeError(
"Error removing file from rar archive [exitcode: {:d}]: {} :: {}".format(
result.returncode,
self.path,
archive_file,
)
return False
return True
return False
)
def write_file(self, archive_file: str, data: bytes) -> bool:
def write_file(self, archive_file: str, data: bytes) -> None:
self._reset()
if self.exe:
archive_path = pathlib.PurePosixPath(archive_file)
archive_name = archive_path.name
archive_parent = str(archive_path.parent).lstrip("./")
working_dir = os.path.dirname(os.path.abspath(self.path))
if not self.exe:
return
archive_path = pathlib.PurePosixPath(archive_file)
archive_name = archive_path.name
archive_parent = str(archive_path.parent).lstrip("./")
working_dir = os.path.dirname(os.path.abspath(self.path))
try:
# use external program to write file to Rar archive
result = subprocess.run(
[
@ -188,45 +221,53 @@ class RarArchiver(Archiver):
capture_output=True,
cwd=self.path.absolute().parent,
)
if result.returncode != 0:
logger.error(
"Error writing rar archive [exitcode: %d]: %s :: %s :: %s",
result.returncode,
self.path,
archive_file,
result.stderr,
)
return False
return True
return False
except Exception as e:
raise OSError(f"Error writing file to rar archive [{e}]: {self.path}:: {archive_file}")
if result.returncode != 0:
logger.error(
"Error writing rar archive [exitcode: %d]: %s :: %s :: %s",
result.returncode,
self.path,
archive_file,
result.stderr,
)
raise OSError(
f"Error writing file to rar archive [exitcode: {result.returncode}]: {self.path}:: {archive_file}"
)
def get_filename_list(self) -> list[str]:
if self._filename_list:
return self._filename_list
rarc = self.get_rar_obj()
tries = 0
if rar_support and rarc:
while tries < 7:
try:
tries = tries + 1
namelist = []
for item in rarc.infolist():
if item.file_size != 0:
namelist.append(item.filename)
if not (rar_support and rarc):
return []
except OSError as e:
logger.error("Error listing files in rar archive [%s]: %s :: attempt #%d", e, self.path, tries)
error = None
while tries < 7:
tries += 1
try:
namelist = []
for item in rarc.infolist():
if item.file_size != 0:
namelist.append(item.filename)
else:
self._filename_list = namelist
return namelist
return []
except OSError as e:
logger.error("Error listing files in rar archive [%s]: %s :: attempt #%d", e, self.path, tries)
error = OSError(f"Error listing files in rar archive [{e}]: {self.path} :: tries#{tries}")
else:
self._filename_list = namelist
return self._filename_list
if error is None:
# Somehow we have success but exited the loop
raise RuntimeError("Something failed")
raise error
def supports_files(self) -> bool:
return True
def copy_from_archive(self, other_archive: Archiver) -> bool:
def copy_from_archive(self, other_archive: Archiver) -> None:
"""Replace the current archive with one copied from another archive"""
self._reset()
try:
@ -251,22 +292,22 @@ class RarArchiver(Archiver):
capture_output=True,
encoding="utf-8",
)
if result.returncode != 0:
logger.error(
"Error while copying to rar archive [exitcode: %d]: %s: %s",
result.returncode,
self.path,
result.stderr,
)
return False
self.path.unlink(missing_ok=True)
shutil.move(rar_path, self.path)
except Exception as e:
logger.exception("Error while copying to rar archive [%s]: from %s to %s", e, other_archive.path, self.path)
return False
else:
return True
raise OSError(f"Error listing files in rar archive [{e}]: from {other_archive.path} to {self.path}") from e
if result.returncode != 0:
logger.error(
"Error while copying to rar archive [exitcode: %d]: %s: %s",
result.returncode,
self.path,
result.stderr,
)
raise OSError(
f"Error while copying to rar archive [exitcode: {result.returncode}]: {self.path}: {result.stderr}"
)
@classmethod
@functools.cache

View File

@ -28,46 +28,37 @@ class SevenZipArchiver(Archiver):
super().__init__()
self._filename_list: list[str] = []
# @todo: Implement Comment?
def get_comment(self) -> str:
return ""
def set_comment(self, comment: str) -> bool:
return False
def read_file(self, archive_file: str) -> bytes:
data = b""
try:
with py7zr.SevenZipFile(self.path, "r") as zf:
data = zf.read([archive_file])[archive_file].read()
except (py7zr.Bad7zFile, OSError) as e:
logger.error("Error reading 7zip archive [%s]: %s :: %s", e, self.path, archive_file)
raise
logger.error("Error reading file in 7zip archive [%s]: %s :: %s", e, self.path, archive_file)
raise OSError(f"Error reading file in 7zip archive [{e}]: {self.path} :: {archive_file}") from e
return data
def remove_file(self, archive_file: str) -> bool:
def remove_file(self, archive_file: str) -> None:
self._filename_list = []
return self.rebuild([archive_file])
def write_file(self, archive_file: str, data: bytes) -> bool:
def write_file(self, archive_file: str, data: bytes) -> None:
# At the moment, no other option but to rebuild the whole
# archive w/o the indicated file. Very sucky, but maybe
# another solution can be found
files = self.get_filename_list()
self._filename_list = []
if archive_file in files:
if not self.rebuild([archive_file]):
return False
self.rebuild([archive_file])
try:
# now just add the archive file as a new one
with py7zr.SevenZipFile(self.path, "a") as zf:
zf.writestr(data, archive_file)
return True
except (py7zr.Bad7zFile, OSError) as e:
logger.error("Error writing 7zip archive [%s]: %s :: %s", e, self.path, archive_file)
return False
logger.error("Error writing file in 7zip archive [%s]: %s :: %s", e, self.path, archive_file)
raise OSError(f"Error writing file in 7zip archive [{e}]: {self.path} :: {archive_file}") from e
def get_filename_list(self) -> list[str]:
if self._filename_list:
@ -80,12 +71,12 @@ class SevenZipArchiver(Archiver):
return namelist
except (py7zr.Bad7zFile, OSError) as e:
logger.error("Error listing files in 7zip archive [%s]: %s", e, self.path)
return []
raise OSError(f"Error listing files in 7zip archive [{e}]: {self.path}") from e
def supports_files(self) -> bool:
return True
def rebuild(self, exclude_list: list[str]) -> bool:
def rebuild(self, exclude_list: list[str]) -> None:
"""Zip helper func
This recompresses the zip archive, without the files in the exclude_list
@ -108,11 +99,10 @@ class SevenZipArchiver(Archiver):
shutil.move(tmp_file.name, self.path)
except (py7zr.Bad7zFile, OSError) as e:
logger.error("Error rebuilding 7zip file [%s]: %s", e, self.path)
return False
return True
logger.error("Error rebuilding 7zip archive [%s]: %s", e, self.path)
raise OSError(f"Error rebuilding 7zip archive [{e}]: {self.path}") from e
def copy_from_archive(self, other_archive: Archiver) -> bool:
def copy_from_archive(self, other_archive: Archiver) -> None:
"""Replace the current zip with one copied from another archive"""
self._filename_list = []
try:
@ -125,9 +115,7 @@ class SevenZipArchiver(Archiver):
zout.writestr(data, filename)
except Exception as e:
logger.error("Error while copying to 7zip archive [%s]: from %s to %s", e, other_archive.path, self.path)
return False
else:
return True
raise OSError(f"Error while copying to 7zip archive [{e}]: from {other_archive.path} to {self.path}") from e
def is_writable(self) -> bool:
return True

View File

@ -40,33 +40,35 @@ class ZipArchiver(Archiver):
comment = zf.comment.decode("utf-8", errors="replace")
return comment
def set_comment(self, comment: str) -> bool:
with ZipFile(self.path, mode="a") as zf:
zf.comment = bytes(comment, "utf-8")
return True
def set_comment(self, comment: str) -> None:
try:
with ZipFile(self.path, mode="a") as zf:
zf.comment = bytes(comment, "utf-8")
except Exception as e:
logger.error("Error writing zip comment [%s]: %s", e, self.path)
raise OSError(f"Error writing zip comment [{e}]: {self.path}") from e
def read_file(self, archive_file: str) -> bytes:
with ZipFile(self.path, mode="r") as zf:
try:
data = zf.read(archive_file)
except (zipfile.BadZipfile, OSError) as e:
logger.exception("Error reading zip archive [%s]: %s :: %s", e, self.path, archive_file)
raise
logger.exception("Error reading file in zip archive [%s]: %s :: %s", e, self.path, archive_file)
raise OSError(f"Error reading file in zip archive [{e}]: {self.path} :: {archive_file}") from e
return data
def remove_file(self, archive_file: str) -> bool:
def remove_file(self, archive_file: str) -> None:
files = self.get_filename_list()
self._filename_list = []
try:
with ZipFile(self.path, mode="a", allowZip64=True, compression=zipfile.ZIP_DEFLATED) as zf:
if archive_file in files:
zf.repack([zf.remove(archive_file)])
return True
except (zipfile.BadZipfile, OSError) as e:
logger.error("Error writing zip archive [%s]: %s :: %s", e, self.path, archive_file)
return False
logger.error("Error removing file in zip archive [%s]: %s :: %s", e, self.path, archive_file)
raise OSError(f"Error removing file in zip archive [{e}]: {self.path} :: {archive_file}") from e
def write_file(self, archive_file: str, data: bytes) -> bool:
def write_file(self, archive_file: str, data: bytes) -> None:
files = self.get_filename_list()
self._filename_list = []
@ -76,10 +78,9 @@ class ZipArchiver(Archiver):
if archive_file in files:
zf.repack([zf.remove(archive_file)])
zf.writestr(archive_file, data)
return True
except (zipfile.BadZipfile, OSError) as e:
logger.error("Error writing zip archive [%s]: %s :: %s", e, self.path, archive_file)
return False
raise OSError(f"Error writing zip archive [{e}]: {self.path} :: {archive_file}") from e
def get_filename_list(self) -> list[str]:
if self._filename_list:
@ -90,7 +91,7 @@ class ZipArchiver(Archiver):
return self._filename_list
except (zipfile.BadZipfile, OSError) as e:
logger.error("Error listing files in zip archive [%s]: %s", e, self.path)
return []
raise OSError(f"Error listing files in zip archive [{e}]: {self.path}") from e
def supports_files(self) -> bool:
return True
@ -125,7 +126,7 @@ class ZipArchiver(Archiver):
return False
return True
def copy_from_archive(self, other_archive: Archiver) -> bool:
def copy_from_archive(self, other_archive: Archiver) -> None:
"""Replace the current zip with one copied from another archive"""
self._filename_list = []
try:
@ -136,15 +137,12 @@ class ZipArchiver(Archiver):
zout.writestr(filename, data)
# preserve the old comment
comment = other_archive.get_comment()
if comment is not None:
if not self.set_comment(comment):
return False
self.set_comment(other_archive.get_comment())
except Exception as e:
logger.error("Error while copying to zip archive [%s]: from %s to %s", e, other_archive.path, self.path)
return False
else:
return True
raise OSError(
f"Error while copying to zip archive [{e}]: from {str(other_archive)!r} to {str(self.path)!r}"
) from e
def is_writable(self) -> bool:
return True

View File

@ -122,6 +122,8 @@ def load_tag_plugins(version: str = f"ComicAPI/{version}", local_plugins: Iterab
class ComicArchive:
"""Exceptions from tags/archive should already be logged. Caller must handle display to user and recovery"""
logo_data = b""
pil_available: bool | None = None
@ -208,15 +210,18 @@ class ComicArchive:
return True
def is_zip(self) -> bool:
return self.archiver.name() == "ZIP"
return self.archiver.extension() == ".cbz"
def seems_to_be_a_comic_archive(self) -> bool:
if (
not (isinstance(self.archiver, UnknownArchiver))
and self.get_number_of_pages() > 0
and self.archiver.is_valid(self.path)
):
return True
try:
if (
not (isinstance(self.archiver, UnknownArchiver))
and self.get_number_of_pages() > 0
and self.archiver.is_valid(self.path)
):
return True
except Exception:
...
return False
@ -238,15 +243,15 @@ class ComicArchive:
return ""
return tags[tag_id].read_raw_tags(self.archiver)
def write_tags(self, metadata: GenericMetadata, tag_id: str) -> bool:
def write_tags(self, metadata: GenericMetadata, tag_id: str) -> None:
if tag_id in self.md:
del self.md[tag_id]
if not tags[tag_id].enabled:
logger.warning("%s tags not enabled", tags[tag_id].name())
return False
return
self.apply_archive_info_to_metadata(metadata, True, True, hash_archive=self.hash_archive)
return tags[tag_id].write_tags(metadata, self.archiver)
tags[tag_id].write_tags(metadata, self.archiver)
def has_tags(self, tag_id: str) -> bool:
if tag_id in self.md:
@ -255,12 +260,12 @@ class ComicArchive:
return False
return tags[tag_id].has_tags(self.archiver)
def remove_tags(self, tag_id: str) -> bool:
def remove_tags(self, tag_id: str) -> None:
if tag_id in self.md:
del self.md[tag_id]
if not tags[tag_id].enabled:
return False
return tags[tag_id].remove_tags(self.archiver)
return
tags[tag_id].remove_tags(self.archiver)
def get_page(self, index: int) -> bytes:
image_data = b""
@ -464,10 +469,17 @@ class ComicArchive:
metadata.is_empty = False
return metadata
def export_as_zip(self, zip_filename: pathlib.Path) -> bool:
if self.archiver.name() == "ZIP":
# nothing to do, we're already a zip
return True
def export_as(self, new_filename: pathlib.Path, extension: str = ".zip") -> None:
"""
Unconditionally creates a new file. Does not check the current archive.
zip_archiver = ZipArchiver.open(zip_filename)
return zip_archiver.copy_from_archive(self.archiver)
If extension cannot be find reverts to .zip
"""
zip_archiver = UnknownArchiver.open(new_filename)
for archiver in archivers:
if extension in archiver.supported_extensions:
zip_archiver = archiver.open(new_filename)
if isinstance(zip_archiver, UnknownArchiver):
zip_archiver = ZipArchiver.open(new_filename)
zip_archiver.copy_from_archive(self.archiver)

View File

@ -94,11 +94,12 @@ class ComicRack(Tag):
and self.file in archive.get_filename_list()
and self._validate_bytes(archive.read_file(self.file))
)
except Exception:
return False
except Exception as e:
raise RuntimeError(f"Failed to Read {self.id} tags from {archive.path}({archive.name()})") from e
def remove_tags(self, archive: Archiver) -> bool:
return self.has_tags(archive) and archive.remove_file(self.file)
def remove_tags(self, archive: Archiver) -> None:
if self.has_tags(archive):
archive.remove_file(self.file)
def read_tags(self, archive: Archiver) -> GenericMetadata:
if self.has_tags(archive):
@ -106,8 +107,8 @@ class ComicRack(Tag):
metadata = archive.read_file(self.file) or b""
if self._validate_bytes(metadata):
return self._metadata_from_bytes(metadata)
except Exception:
...
except Exception as e:
raise RuntimeError(f"Failed to Read {self.id} tags from {archive.path}({archive.name()})") from e
return GenericMetadata()
def read_raw_tags(self, archive: Archiver) -> str:
@ -116,22 +117,20 @@ class ComicRack(Tag):
b = archive.read_file(self.file)
# ET.fromstring is used as xml can declare the encoding
return ET.tostring(ET.fromstring(b), encoding="unicode", xml_declaration=True)
except Exception:
...
except Exception as e:
raise RuntimeError(f"Failed to Read {self.id} tags from {archive.path}({archive.name()})") from e
return ""
def write_tags(self, metadata: GenericMetadata, archive: Archiver) -> bool:
def write_tags(self, metadata: GenericMetadata, archive: Archiver) -> None:
if self.supports_tags(archive):
xml = b""
try: # read_file can cause an exception
if self.has_tags(archive):
xml = archive.read_file(self.file)
return archive.write_file(self.file, self._bytes_from_metadata(metadata, xml))
except Exception:
...
else:
logger.warning("Archive %s(%s) does not support '%s' metadata", archive.path, archive.name(), self.name())
return False
except Exception as e:
raise RuntimeError(f"Failed to write {self.id} tags to {archive.path}({archive.name()})") from e
logger.warning("Archive %s(%s) does not support '%s' metadata", archive.path, archive.name(), self.name())
def name(self) -> str:
return "Comic Rack"

View File

@ -1,126 +1,144 @@
from __future__ import annotations
from typing import ClassVar
from comicapi.archivers import Archiver
from comicapi.genericmetadata import GenericMetadata
class Tag:
id: ClassVar[str] = ""
"""
ID form this tag format.
Currently known used IDs are cr, cix, comet, cbi, metroninfo and acbf.
You can use an existing ID to override it's behaiour. It is not recommended to do so.
"""
enabled: bool = False
id: str = ""
"""When set to False it will be excluded from selection in ComicTagger"""
supported_attributes: set[str] = {
"data_origin",
"issue_id",
"series_id",
"original_hash",
"series",
"series_aliases",
"issue",
"issue_count",
"title",
"title_aliases",
"volume",
"volume_count",
"genres",
"description",
"notes",
"alternate_series",
"alternate_number",
"alternate_count",
"gtin",
"story_arcs",
"series_groups",
"publisher",
"imprint",
"day",
"month",
"year",
"language",
"country",
"web_link",
"format",
"manga",
"black_and_white",
"maturity_rating",
"critical_rating",
"scan_info",
"tags",
"pages",
"pages.type",
"pages.bookmark",
"pages.double_page",
"pages.image_index",
"pages.size",
"pages.height",
"pages.width",
"page_count",
"characters",
"teams",
"locations",
"credits",
"credits.person",
"credits.role",
"credits.primary",
"credits.language",
"price",
"is_version_of",
"rights",
"identifier",
"last_mark",
}
"""Set of GenericMetadata attributes this tag format can handle"""
version: str
"""Current version of ComicTagger"""
def __init__(self, version: str) -> None:
self.version: str = version
self.supported_attributes = {
"data_origin",
"issue_id",
"series_id",
"original_hash",
"series",
"series_aliases",
"issue",
"issue_count",
"title",
"title_aliases",
"volume",
"volume_count",
"genres",
"description",
"notes",
"alternate_series",
"alternate_number",
"alternate_count",
"gtin",
"story_arcs",
"series_groups",
"publisher",
"imprint",
"day",
"month",
"year",
"language",
"country",
"web_link",
"format",
"manga",
"black_and_white",
"maturity_rating",
"critical_rating",
"scan_info",
"tags",
"pages",
"pages.type",
"pages.bookmark",
"pages.double_page",
"pages.image_index",
"pages.size",
"pages.height",
"pages.width",
"page_count",
"characters",
"teams",
"locations",
"credits",
"credits.person",
"credits.role",
"credits.primary",
"credits.language",
"price",
"is_version_of",
"rights",
"identifier",
"last_mark",
}
def supports_credit_role(self, role: str) -> bool:
"""
Return True if this tag format can handle this credit role.
Should always return a bool.
MUST NOT cause an exception.
"""
self.supported_attributes
return False
def supports_tags(self, archive: Archiver) -> bool:
"""
Checks the given archive for the ability to save these tags.
Should always return a bool. Failures should return False.
Should always return a bool.
Typically consists of a call to either `archive.supports_comment` or `archive.supports_file`
"""
return False
raise NotImplementedError
def has_tags(self, archive: Archiver) -> bool:
"""
Checks the given archive for tags.
Should always return a bool. Failures should return False.
Should always return a bool.
"""
return False
raise NotImplementedError
def remove_tags(self, archive: Archiver) -> bool:
def remove_tags(self, archive: Archiver) -> None:
"""
Removes the tags from the given archive.
Should always return a bool. Failures should return False.
"""
return False
raise NotImplementedError
def read_tags(self, archive: Archiver) -> GenericMetadata:
"""
Returns a GenericMetadata representing the tags saved in the given archive.
Should always return a GenericMetadata. Failures should return an empty metadata object.
"""
return GenericMetadata()
raise NotImplementedError
def read_raw_tags(self, archive: Archiver) -> str:
"""
Returns the raw tags as a string.
If the tags are a binary format a roughly similar text format should be used.
Should always return a string. Failures should return the empty string.
"""
return ""
raise NotImplementedError
def write_tags(self, metadata: GenericMetadata, archive: Archiver) -> bool:
def write_tags(self, metadata: GenericMetadata, archive: Archiver) -> None:
"""
Saves the given metadata to the given archive.
Should always return a bool. Failures should return False.
Should always return a bool
"""
return False
raise NotImplementedError
def name(self) -> str:
"""
Returns the name of these tags for display purposes eg "Comic Rack".
Should always return a string. Failures should return the empty string.
Should always return a string.
MUST NOT cause an exception.
"""
return ""

View File

@ -18,10 +18,12 @@ from __future__ import annotations
import logging
import os
from typing import Callable
from PyQt6 import QtCore, QtGui, QtWidgets, uic
from comicapi.comicarchive import ComicArchive, tags
from comicapi.genericmetadata import GenericMetadata
from comictaggerlib.coverimagewidget import CoverImageWidget
from comictaggerlib.ctsettings import ct_ns
from comictaggerlib.md import prepare_metadata
@ -38,6 +40,7 @@ class AutoTagMatchWindow(QtWidgets.QDialog):
parent: QtWidgets.QWidget,
match_set_list: list[Result],
read_tags: list[str],
fetch_func: Callable[[IssueResult], GenericMetadata],
config: ct_ns,
talker: ComicTalker,
) -> None:
@ -76,7 +79,7 @@ class AutoTagMatchWindow(QtWidgets.QDialog):
self.match_set_list = match_set_list
self._tags = read_tags
self.talker = talker
self.fetch_func = fetch_func
self.current_match_set_idx = 0
@ -249,7 +252,7 @@ class AutoTagMatchWindow(QtWidgets.QDialog):
# now get the particular issue data
try:
self.current_match_set.md = ct_md = self.talker.fetch_comic_data(issue_id=match.issue_id)
self.current_match_set.md = ct_md = self.fetch_func(match)
except TalkerError as e:
QtWidgets.QApplication.restoreOverrideCursor()
QtWidgets.QMessageBox.critical(self, f"{e.source} {e.code_name} Error", f"{e}")
@ -262,14 +265,15 @@ class AutoTagMatchWindow(QtWidgets.QDialog):
QtWidgets.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.CursorShape.WaitCursor))
md = prepare_metadata(md, ct_md, self.config)
for tag_id in self._tags:
success = ca.write_tags(md, tag_id)
QtWidgets.QApplication.restoreOverrideCursor()
if not success:
try:
ca.write_tags(md, tag_id)
except Exception as e:
QtWidgets.QMessageBox.warning(
self,
"Write Error",
f"Saving {tags[tag_id].name()} the tags to the archive seemed to fail!",
f"Saving {tags[tag_id].name()} the tags to the archive seemed to fail! {e}",
)
break
QtWidgets.QApplication.restoreOverrideCursor()
ca.reset_cache()

View File

@ -17,206 +17,16 @@
from __future__ import annotations
import logging
import pathlib
import re
from PyQt6 import QtCore, QtWidgets, uic
from comicapi import utils
from comicapi.comicarchive import ComicArchive, tags
from comicapi.genericmetadata import GenericMetadata
from comictaggerlib.coverimagewidget import CoverImageWidget
from comictaggerlib.ctsettings.settngs_namespace import SettngsNS
from comictaggerlib.issueidentifier import IssueIdentifierCancelled
from comictaggerlib.md import read_selected_tags
from comictaggerlib.resulttypes import Action, OnlineMatchResults, Result, Status
from comictaggerlib.tag import identify_comic
from comictaggerlib.ui import ui_path
from comictalker.comictalker import ComicTalker, RLCallBack
from comictalker.comictalker import ComicTalker
logger = logging.getLogger(__name__)
class AutoTagThread(QtCore.QThread):
autoTagComplete = QtCore.pyqtSignal(OnlineMatchResults, list)
autoTagLogMsg = QtCore.pyqtSignal(str)
autoTagProgress = QtCore.pyqtSignal(object, object, object, bytes, bytes) # see progress_callback
ratelimit = QtCore.pyqtSignal(float, float)
def __init__(
self, series_override: str, ca_list: list[ComicArchive], config: SettngsNS, talker: ComicTalker
) -> None:
QtCore.QThread.__init__(self)
self.series_override = series_override
self.ca_list = ca_list
self.config = config
self.talker = talker
self.canceled = False
def log_output(self, text: str) -> None:
self.autoTagLogMsg.emit(str(text))
def progress_callback(
self, cur: int | None, total: int | None, path: pathlib.Path | None, archive_image: bytes, remote_image: bytes
) -> None:
self.autoTagProgress.emit(cur, total, path, archive_image, remote_image)
def run(self) -> None:
match_results = OnlineMatchResults()
archives_to_remove = []
for prog_idx, ca in enumerate(self.ca_list):
self.log_output("==========================================================================\n")
self.log_output(f"Auto-Tagging {prog_idx} of {len(self.ca_list)}\n")
self.log_output(f"{ca.path}\n")
try:
cover_idx = ca.read_tags(self.config.internal__read_tags[0]).get_cover_page_index_list()[0]
except Exception as e:
cover_idx = 0
logger.error("Failed to load metadata for %s: %s", ca.path, e)
image_data = ca.get_page(cover_idx)
self.progress_callback(prog_idx, len(self.ca_list), ca.path, image_data, b"")
if self.canceled:
break
if ca.is_writable():
success, match_results = self.identify_and_tag_single_archive(ca, match_results)
if self.canceled:
break
if success and self.config.internal__remove_archive_after_successful_match:
archives_to_remove.append(ca)
self.autoTagComplete.emit(match_results, archives_to_remove)
def on_rate_limit(self, full_time: float, sleep_time: float) -> None:
if self.canceled:
raise IssueIdentifierCancelled
self.log_output(
f"Rate limit reached: {full_time:.0f}s until next request. Waiting {sleep_time:.0f}s for ratelimit"
)
self.ratelimit.emit(full_time, sleep_time)
def identify_and_tag_single_archive(
self, ca: ComicArchive, match_results: OnlineMatchResults
) -> tuple[Result, OnlineMatchResults]:
ratelimit_callback = RLCallBack(
self.on_rate_limit,
60,
)
# read in tags, and parse file name if not there
md, tags_used, error = read_selected_tags(self.config.internal__read_tags, ca)
if error is not None:
QtWidgets.QMessageBox.warning(
None,
"Aborting...",
f"One or more of the read tags failed to load for {ca.path}. Aborting to prevent any possible further damage. Check log for details.",
)
logger.error("Failed to load tags from %s: %s", ca.path, error)
return (
Result(
Action.save,
original_path=ca.path,
status=Status.read_failure,
),
match_results,
)
if md.is_empty:
md = ca.metadata_from_filename(
self.config.Filename_Parsing__filename_parser,
self.config.Filename_Parsing__remove_c2c,
self.config.Filename_Parsing__remove_fcbd,
self.config.Filename_Parsing__remove_publisher,
self.config.Filename_Parsing__split_words,
self.config.Filename_Parsing__allow_issue_start_with_letter,
self.config.Filename_Parsing__protofolius_issue_number_scheme,
)
if self.config.Auto_Tag__ignore_leading_numbers_in_filename and md.series is not None:
# remove all leading numbers
md.series = re.sub(r"(^[\d.]*)(.*)", r"\2", md.series)
# use the dialog specified search string
if self.series_override:
md.series = self.series_override
if not self.config.Auto_Tag__use_year_when_identifying:
md.year = None
# If it's empty we need it to stay empty for identify_comic to report the correct error
if (md.issue is None or md.issue == "") and not md.is_empty:
if self.config.Auto_Tag__assume_issue_one:
md.issue = "1"
else:
md.issue = utils.xlate(md.volume)
def on_progress(x: int, y: int, image: bytes) -> None:
# We don't (currently) care about the progress of an individual comic here we just want the cover for the autotagprogresswindow
self.progress_callback(None, None, None, b"", image)
if self.canceled:
return (
Result(
Action.save,
original_path=ca.path,
status=Status.read_failure,
),
match_results,
)
try:
res, match_results = identify_comic(
ca,
md,
tags_used,
match_results,
self.config,
self.talker,
self.log_output,
on_rate_limit=ratelimit_callback,
on_progress=on_progress,
)
except IssueIdentifierCancelled:
return (
Result(
Action.save,
original_path=ca.path,
status=Status.fetch_data_failure,
),
match_results,
)
if self.canceled:
return res, match_results
if res.status == Status.success:
assert res.md
def write_Tags(ca: ComicArchive, md: GenericMetadata) -> bool:
for tag_id in self.config.Runtime_Options__tags_write:
# write out the new data
if not ca.write_tags(md, tag_id):
self.log_output(f"{tags[tag_id].name()} save failed! Aborting any additional tag saves.\n")
return False
return True
# Save tags
if write_Tags(ca, res.md):
match_results.good_matches.append(res)
res.tags_written = self.config.Runtime_Options__tags_write
self.log_output("Save complete!\n")
else:
res.status = Status.write_failure
match_results.write_failures.append(res)
ca.reset_cache()
ca.load_cache({*self.config.Runtime_Options__tags_read})
return res, match_results
def cancel(self) -> None:
self.canceled = True
class AutoTagProgressWindow(QtWidgets.QDialog):
def __init__(self, parent: QtWidgets.QWidget, talker: ComicTalker) -> None:
super().__init__(parent)
@ -236,6 +46,8 @@ class AutoTagProgressWindow(QtWidgets.QDialog):
gridlayout.addWidget(self.testCoverWidget)
gridlayout.setContentsMargins(0, 0, 0, 0)
self.isdone = False
self.setWindowFlags(
QtCore.Qt.WindowType(
self.windowFlags()
@ -254,20 +66,6 @@ class AutoTagProgressWindow(QtWidgets.QDialog):
widget.set_image_data(img_data)
QtCore.QCoreApplication.processEvents()
# @QtCore.pyqtSlot(int, int, 'Optional[pathlib.Path]', bytes, bytes)
def on_progress(
self, x: int | None, y: int | None, title: pathlib.Path | None, archive_image: bytes, remote_image: bytes
) -> None:
if x is not None and y is not None:
self.progressBar: QtWidgets.QProgressBar
self.progressBar.setValue(x)
self.progressBar.setMaximum(y)
if title:
self.setWindowTitle(str(title))
if archive_image:
self.set_archive_image(archive_image)
if remote_image:
self.set_test_image(remote_image)
def reject(self) -> None:
QtWidgets.QDialog.reject(self)
self.isdone = True

View File

@ -17,13 +17,14 @@
from __future__ import annotations
import dataclasses
import functools
import json
import logging
import os
import pathlib
import re
import sys
from collections.abc import Collection
from functools import partial
from typing import Any, TextIO
from comicapi import merge, utils
@ -33,10 +34,10 @@ from comictaggerlib.cbltransformer import CBLTransformer
from comictaggerlib.ctsettings import ct_ns
from comictaggerlib.filerenamer import FileRenamer, get_rename_dir
from comictaggerlib.graphics import graphics_path
from comictaggerlib.issueidentifier import IssueIdentifier
from comictaggerlib.md import prepare_metadata
from comictaggerlib.quick_tag import QuickTag
from comictaggerlib.resulttypes import Action, MatchStatus, OnlineMatchResults, Result, Status
from comictaggerlib.tag import identify_comic
from comictaggerlib.resulttypes import Action, IssueResult, MatchStatus, OnlineMatchResults, Result, Status
from comictalker.comictalker import ComicTalker, TalkerError
logger = logging.getLogger(__name__)
@ -132,7 +133,7 @@ class CLI:
def fetch_metadata(self, issue_id: str) -> GenericMetadata:
# now get the particular issue data
try:
ct_md = self.current_talker().fetch_comic_data(issue_id=issue_id, on_rate_limit=None)
ct_md = self.current_talker().fetch_comic_data(issue_id)
except Exception as e:
logger.error("Error retrieving issue details '%s'. Save aborted.", e)
return GenericMetadata()
@ -146,8 +147,10 @@ class CLI:
if not self.config.Runtime_Options__dryrun:
for tag_id in self.config.Runtime_Options__tags_write:
# write out the new data
if not ca.write_tags(md, tag_id):
logger.error("The tag save seemed to fail for: %s!", tags[tag_id].name())
try:
ca.write_tags(md, tag_id)
except Exception:
# Error is already displayed in the log
return False
self.output("Save complete.")
@ -351,19 +354,21 @@ class CLI:
def delete_tags(self, ca: ComicArchive, tag_id: str) -> Status:
tag_name = tags[tag_id].name()
if ca.has_tags(tag_id):
if not self.config.Runtime_Options__dryrun:
if ca.remove_tags(tag_id):
self.output(f"{ca.path}: Removed {tag_name} tags.")
return Status.success
else:
self.output(f"{ca.path}: Tag removal seemed to fail!")
return Status.write_failure
else:
self.output(f"{ca.path}: dry-run. {tag_name} tags not removed")
return Status.success
self.output(f"{ca.path}: This archive doesn't have {tag_name} tags to remove.")
return Status.success
if not ca.has_tags(tag_id):
self.output(f"{ca.path}: This archive doesn't have {tag_name} tags to remove.")
return Status.success
if self.config.Runtime_Options__dryrun:
self.output(f"{ca.path}: dry-run. {tag_name} tags would be removed")
return Status.success
try:
ca.remove_tags(tag_id)
self.output(f"{ca.path}: Removed {tag_name} tags.")
return Status.success
except Exception:
self.output(f"{ca.path}: Tag removal seemed to fail!")
return Status.write_failure
def delete(self, ca: ComicArchive) -> Result:
res = Result(Action.delete, Status.success, ca.path)
@ -385,18 +390,21 @@ class CLI:
self.output(f"{ca.path}: Destination and source are same: {dst_tag_name}. Nothing to do.")
return Status.existing_tags
if not self.config.Runtime_Options__dryrun:
if self.config.Metadata_Options__apply_transform_on_bulk_operation and dst_tag_id == "cbi":
md = CBLTransformer(md, self.config).apply()
if ca.write_tags(md, dst_tag_id):
self.output(f"{ca.path}: Copied {source_names} tags to {dst_tag_name}.")
else:
self.output(f"{ca.path}: Tag copy seemed to fail!")
return Status.write_failure
else:
if self.config.Runtime_Options__dryrun:
self.output(f"{ca.path}: dry-run. {source_names} tags not copied")
return Status.success
return Status.success
if self.config.Metadata_Options__apply_transform_on_bulk_operation and dst_tag_id == "cbi":
md = CBLTransformer(md, self.config).apply()
try:
ca.write_tags(md, dst_tag_id)
self.output(f"{ca.path}: Copied {source_names} tags to {dst_tag_name}.")
return Status.success
except Exception:
self.output(f"{ca.path}: Tag copy seemed to fail!")
return Status.write_failure
def copy(self, ca: ComicArchive) -> Result:
res = Result(Action.copy, Status.success, ca.path)
@ -457,6 +465,123 @@ class CLI:
logger.debug("", exc_info=True)
return None
def normal_tag(
self, ca: ComicArchive, tags_read: list[str], md: GenericMetadata, match_results: OnlineMatchResults
) -> tuple[GenericMetadata, list[IssueResult], Result | None, OnlineMatchResults]:
# ct_md, results, matches, match_results
if md is None or md.is_empty:
logger.error("No metadata given to search online with!")
res = Result(
Action.save,
status=Status.match_failure,
original_path=ca.path,
match_status=MatchStatus.no_match,
tags_written=self.config.Runtime_Options__tags_write,
tags_read=tags_read,
)
match_results.no_matches.append(res)
return GenericMetadata(), [], res, match_results
ii = IssueIdentifier(ca, self.config, self.current_talker())
ii.set_output_function(functools.partial(self.output, already_logged=True))
if not self.config.Auto_Tag__use_year_when_identifying:
md.year = None
if self.config.Auto_Tag__ignore_leading_numbers_in_filename and md.series is not None:
md.series = re.sub(r"^([\d.]+)(.*)", r"\2", md.series)
result, matches = ii.identify(ca, md)
found_match = False
choices = False
low_confidence = False
if result == IssueIdentifier.result_no_matches:
pass
elif result == IssueIdentifier.result_found_match_but_bad_cover_score:
low_confidence = True
found_match = True
elif result == IssueIdentifier.result_found_match_but_not_first_page:
found_match = True
elif result == IssueIdentifier.result_multiple_matches_with_bad_image_scores:
low_confidence = True
choices = True
elif result == IssueIdentifier.result_one_good_match:
found_match = True
elif result == IssueIdentifier.result_multiple_good_matches:
choices = True
if choices:
if low_confidence:
logger.error("Online search: Multiple low confidence matches. Save aborted")
res = Result(
Action.save,
status=Status.match_failure,
original_path=ca.path,
online_results=matches,
match_status=MatchStatus.low_confidence_match,
tags_written=self.config.Runtime_Options__tags_write,
tags_read=tags_read,
)
match_results.low_confidence_matches.append(res)
return GenericMetadata(), matches, res, match_results
logger.error("Online search: Multiple good matches. Save aborted")
res = Result(
Action.save,
status=Status.match_failure,
original_path=ca.path,
online_results=matches,
match_status=MatchStatus.multiple_match,
tags_written=self.config.Runtime_Options__tags_write,
tags_read=tags_read,
)
match_results.multiple_matches.append(res)
return GenericMetadata(), matches, res, match_results
if low_confidence and self.config.Runtime_Options__abort_on_low_confidence:
logger.error("Online search: Low confidence match. Save aborted")
res = Result(
Action.save,
status=Status.match_failure,
original_path=ca.path,
online_results=matches,
match_status=MatchStatus.low_confidence_match,
tags_written=self.config.Runtime_Options__tags_write,
tags_read=tags_read,
)
match_results.low_confidence_matches.append(res)
return GenericMetadata(), matches, res, match_results
if not found_match:
logger.error("Online search: No match found. Save aborted")
res = Result(
Action.save,
status=Status.match_failure,
original_path=ca.path,
online_results=matches,
match_status=MatchStatus.no_match,
tags_written=self.config.Runtime_Options__tags_write,
tags_read=tags_read,
)
match_results.no_matches.append(res)
return GenericMetadata(), matches, res, match_results
# we got here, so we have a single match
# now get the particular issue data
ct_md = self.fetch_metadata(matches[0].issue_id)
if ct_md.is_empty:
res = Result(
Action.save,
status=Status.fetch_data_failure,
original_path=ca.path,
online_results=matches,
match_status=MatchStatus.good_match,
tags_written=self.config.Runtime_Options__tags_write,
tags_read=tags_read,
)
match_results.fetch_data_failures.append(res)
return GenericMetadata(), matches, res, match_results
return ct_md, matches, None, match_results
def save(self, ca: ComicArchive, match_results: OnlineMatchResults) -> tuple[Result, OnlineMatchResults]:
if self.config.Runtime_Options__skip_existing_tags:
for tag_id in self.config.Runtime_Options__tags_write:
@ -467,6 +592,7 @@ class CLI:
Action.save,
original_path=ca.path,
status=Status.existing_tags,
tags_written=self.config.Runtime_Options__tags_write,
),
match_results,
)
@ -476,30 +602,22 @@ class CLI:
md, tags_read = self.create_local_metadata(ca, self.config.Runtime_Options__tags_read)
# matches: list[IssueResult] = []
matches: list[IssueResult] = []
# now, search online
ct_md = GenericMetadata()
res = Result(
Action.save,
status=Status.success,
original_path=ca.path,
md=prepare_metadata(md, ct_md, self.config),
tags_read=tags_read,
)
if self.config.Auto_Tag__online:
if self.config.Auto_Tag__issue_id is not None:
# we were given the actual issue ID to search with
try:
ct_md = self.current_talker().fetch_comic_data(
issue_id=self.config.Auto_Tag__issue_id, on_rate_limit=None
)
ct_md = self.current_talker().fetch_comic_data(self.config.Auto_Tag__issue_id)
except TalkerError as e:
logger.error("Error retrieving issue details. Save aborted. %s", e)
res = Result(
Action.save,
original_path=ca.path,
status=Status.fetch_data_failure,
tags_written=self.config.Runtime_Options__tags_write,
tags_read=tags_read,
)
match_results.fetch_data_failures.append(res)
@ -512,18 +630,11 @@ class CLI:
status=Status.match_failure,
original_path=ca.path,
match_status=MatchStatus.no_match,
tags_written=self.config.Runtime_Options__tags_write,
tags_read=tags_read,
)
match_results.no_matches.append(res)
return res, match_results
res = Result(
Action.save,
status=Status.success,
original_path=ca.path,
match_status=MatchStatus.good_match,
md=prepare_metadata(md, ct_md, self.config),
tags_read=tags_read,
)
else:
query_md = md.copy()
@ -534,24 +645,42 @@ class CLI:
if qt_md is None or qt_md.is_empty:
if qt_md is not None:
self.output("Failed to find match via quick tag")
res, match_results = identify_comic(
ca,
md,
tags_read,
match_results,
self.config,
self.current_talker(),
partial(self.output, already_logged=True),
on_rate_limit=None,
)
if res.status != Status.success:
ct_md, matches, res, match_results = self.normal_tag(ca, tags_read, query_md, match_results) # type: ignore[assignment]
if res is not None:
return res, match_results
else:
self.output("Successfully matched via quick tag")
assert res.md
ct_md = qt_md
matches = [
IssueResult(
series=ct_md.series or "",
distance=-1,
issue_number=ct_md.issue or "",
issue_count=ct_md.issue_count,
url_image_hash=-1,
issue_title=ct_md.title or "",
issue_id=ct_md.issue_id or "",
series_id=ct_md.series_id or "",
month=ct_md.month,
year=ct_md.year,
publisher=None,
image_url=str(ct_md._cover_image) or "",
alt_image_urls=[],
description=ct_md.description or "",
)
]
res.tags_written = self.config.Runtime_Options__tags_write
res = Result(
Action.save,
status=Status.success,
original_path=ca.path,
online_results=matches,
match_status=MatchStatus.good_match,
md=prepare_metadata(md, ct_md, self.config),
tags_written=self.config.Runtime_Options__tags_write,
tags_read=tags_read,
)
assert res.md
# ok, done building our metadata. time to save
if self.write_tags(ca, res.md):
match_results.good_matches.append(res)
@ -648,25 +777,25 @@ class CLI:
delete_success = False
export_success = False
if not self.config.Runtime_Options__dryrun:
if ca.export_as_zip(new_file):
export_success = True
if self.config.Runtime_Options__delete_original:
try:
filename_path.unlink(missing_ok=True)
delete_success = True
except OSError:
logger.exception("%sError deleting original archive after export", msg_hdr)
else:
# last export failed, so remove the zip, if it exists
new_file.unlink(missing_ok=True)
else:
if self.config.Runtime_Options__dryrun:
msg = msg_hdr + f"Dry-run: Would try to create {os.path.split(new_file)[1]}"
if self.config.Runtime_Options__delete_original:
msg += " and delete original."
self.output(msg)
return Result(Action.export, Status.success, ca.path, new_file)
try:
ca.export_as(new_file)
export_success = True
if self.config.Runtime_Options__delete_original:
try:
filename_path.unlink(missing_ok=False)
delete_success = True
except OSError:
logger.exception("%sError deleting original archive after export", msg_hdr)
except Exception:
new_file.unlink(missing_ok=True)
msg = msg_hdr
if export_success:
msg += f"Archive exported successfully to: {os.path.split(new_file)[1]}"

View File

@ -128,8 +128,7 @@ def open_tagger_window(
ctypes.windll.user32.SetWindowPos(console_wnd, None, 0, 0, 0, 0, swp_hidewindow) # type: ignore[attr-defined]
if platform.system() != "Linux":
img = QtGui.QPixmap()
img.loadFromData((graphics_path / "tags.png").read_bytes())
img = QtGui.QPixmap(str(graphics_path / "tags.png"))
splash = QtWidgets.QSplashScreen(img)
splash.show()

View File

@ -17,12 +17,8 @@
from __future__ import annotations
import copy
import dataclasses
import functools
import io
import logging
import pathlib
from enum import Enum, auto
from operator import attrgetter
from typing import Any, Callable
@ -32,10 +28,11 @@ from comicapi import utils
from comicapi.comicarchive import ComicArchive
from comicapi.genericmetadata import ComicSeries, GenericMetadata, ImageHash
from comicapi.issuestring import IssueString
from comictaggerlib.ctsettings import ct_ns
from comictaggerlib.imagefetcher import ImageFetcher, ImageFetcherException
from comictaggerlib.imagehasher import ImageHasher
from comictaggerlib.resulttypes import IssueResult
from comictalker.comictalker import ComicTalker, RLCallBack, TalkerError
from comictalker.comictalker import ComicTalker, TalkerError
logger = logging.getLogger(__name__)
@ -73,36 +70,25 @@ class IssueIdentifierNetworkError(Exception): ...
class IssueIdentifierCancelled(Exception): ...
class Result(Enum):
single_good_match = auto()
no_matches = auto()
single_bad_cover_score = auto()
multiple_bad_cover_scores = auto()
multiple_good_matches = auto()
@dataclasses.dataclass
class IssueIdentifierOptions:
series_match_search_thresh: int
series_match_identify_thresh: int
use_publisher_filter: bool
publisher_filter: list[str]
quiet: bool
cache_dir: pathlib.Path
border_crop_percent: int
talker: ComicTalker
class IssueIdentifier:
result_no_matches = 0
result_found_match_but_bad_cover_score = 1
result_found_match_but_not_first_page = 2
result_multiple_matches_with_bad_image_scores = 3
result_one_good_match = 4
result_multiple_good_matches = 5
def __init__(
self,
config: IssueIdentifierOptions,
on_rate_limit: RLCallBack | None,
output: Callable[[str], Any] = print,
on_progress: Callable[[int, int, bytes], Any] | None = None,
comic_archive: ComicArchive,
config: ct_ns,
talker: ComicTalker,
metadata: GenericMetadata = GenericMetadata(),
) -> None:
self.config = config
self.talker = config.talker
self.talker = talker
self.comic_archive: ComicArchive = comic_archive
self.md = metadata
self.image_hasher = 1
self.only_use_additional_meta_data = False
@ -123,24 +109,30 @@ class IssueIdentifier:
# used to eliminate series names that are too long based on our search
# string
self.series_match_thresh = config.series_match_identify_thresh
self.series_match_thresh = config.Issue_Identifier__series_match_identify_thresh
# used to eliminate unlikely publishers
self.use_publisher_filter = config.use_publisher_filter
self.publisher_filter = [s.strip().casefold() for s in config.publisher_filter]
self.use_publisher_filter = config.Auto_Tag__use_publisher_filter
self.publisher_filter = [s.strip().casefold() for s in config.Auto_Tag__publisher_filter]
self.additional_metadata = GenericMetadata()
self.output_function = output
self.progress_callback: Callable[[int, int, bytes], Any] = lambda *x: ...
if on_progress:
self.progress_callback = on_progress
self.on_rate_limit = on_rate_limit
self.search_result = Result.no_matches
self.output_function: Callable[[str], None] = print
self.progress_callback: Callable[[int, int], None] | None = None
self.cover_url_callback: Callable[[bytes], None] | None = None
self.search_result = self.result_no_matches
self.cancel = False
self.current_progress = (0, 0)
self.match_list: list[IssueResult] = []
def set_output_function(self, func: Callable[[str], None]) -> None:
self.output_function = func
def set_progress_callback(self, cb_func: Callable[[int, int], None]) -> None:
self.progress_callback = cb_func
def set_cover_url_callback(self, cb_func: Callable[[bytes], None]) -> None:
self.cover_url_callback = cb_func
def calculate_hash(self, image_data: bytes = b"", image: Image.Image | None = None) -> int:
if self.image_hasher == 3:
return ImageHasher(data=image_data, image=image).perception_hash()
@ -170,23 +162,23 @@ class IssueIdentifier:
# Always send to logger so that we have a record for troubleshooting
logger.info(log_msg, **kwargs)
# If we are quiet we don't need to call the output function
if self.config.quiet:
# If we are verbose or quiet we don't need to call the output function
if self.config.Runtime_Options__verbose > 0 or self.config.Runtime_Options__quiet:
return
# default output is stdout
self.output_function(*args, **kwargs)
def identify(self, ca: ComicArchive, md: GenericMetadata) -> tuple[Result, list[IssueResult]]:
def identify(self, ca: ComicArchive, md: GenericMetadata) -> tuple[int, list[IssueResult]]:
if not self._check_requirements(ca):
return Result.no_matches, []
return self.result_no_matches, []
terms, images, extra_images = self._get_search_terms(ca, md)
# we need, at minimum, a series and issue number
if not (terms["series"] and terms["issue_number"]):
self.log_msg("Not enough info for a search!")
return Result.no_matches, []
return self.result_no_matches, []
self._print_terms(terms, images)
@ -215,28 +207,28 @@ class IssueIdentifier:
self.log_msg("--------------------------------------------------------------------------")
self._print_match(final_cover_matching[0])
self.log_msg("--------------------------------------------------------------------------")
search_result = Result.single_bad_cover_score
search_result = self.result_found_match_but_bad_cover_score
else:
self.log_msg("--------------------------------------------------------------------------")
self.log_msg("Multiple bad cover matches! Need to use other info...")
self.log_msg("--------------------------------------------------------------------------")
search_result = Result.multiple_bad_cover_scores
search_result = self.result_multiple_matches_with_bad_image_scores
else:
if len(final_cover_matching) == 1:
self.log_msg("--------------------------------------------------------------------------")
self._print_match(final_cover_matching[0])
self.log_msg("--------------------------------------------------------------------------")
search_result = Result.single_good_match
search_result = self.result_one_good_match
elif not final_cover_matching:
self.log_msg("--------------------------------------------------------------------------")
self.log_msg("No matches found :(")
self.log_msg("--------------------------------------------------------------------------")
search_result = Result.no_matches
search_result = self.result_no_matches
else:
# we've got multiple good matches:
self.log_msg("More than one likely candidate.")
search_result = Result.multiple_good_matches
search_result = self.result_multiple_good_matches
final_cover_matching = full # display more options for the user to pick
self.log_msg("--------------------------------------------------------------------------")
for match_item in final_cover_matching:
@ -298,16 +290,14 @@ class IssueIdentifier:
remote_hashes: list[tuple[str, int]] = []
for url in urls:
try:
alt_url_image_data = ImageFetcher(self.config.cache_dir).fetch(url, blocking=True)
alt_url_image_data = ImageFetcher(self.config.Runtime_Options__config.user_cache_dir).fetch(
url, blocking=True
)
except ImageFetcherException as e:
self.log_msg(f"Network issue while fetching alt. cover image from {self.talker.name}. Aborting...")
raise IssueIdentifierNetworkError from e
self._user_canceled(
functools.partial(
self.progress_callback, self.current_progress[0], self.current_progress[1], alt_url_image_data
)
)
self._user_canceled(self.cover_url_callback, alt_url_image_data)
remote_hashes.append((url, self.calculate_hash(alt_url_image_data)))
@ -329,7 +319,7 @@ class IssueIdentifier:
if primary_img_url is None or (not primary_img_url.Kind and not primary_img_url.URL and not use_alt_urls):
return Score(score=100, url="", remote_hash=0, local_hash=0, local_hash_name="0")
# self._user_canceled()
self._user_canceled()
remote_hashes = []
@ -408,7 +398,7 @@ class IssueIdentifier:
images.append(("double page", im))
# Check and remove black borders. Helps in identifying comics with an excessive black border like https://comicvine.gamespot.com/marvel-graphic-novel-1-the-death-of-captain-marvel/4000-21782/
cropped = self._crop_border(cover_image, self.config.border_crop_percent)
cropped = self._crop_border(cover_image, self.config.Issue_Identifier__border_crop_percent)
if cropped is not None:
images.append(("black border cropped", cropped))
@ -448,11 +438,11 @@ class IssueIdentifier:
) -> tuple[SearchKeys, list[tuple[str, Image.Image]], list[tuple[str, Image.Image]]]:
return self._get_search_keys(md), self._get_images(ca, md), self._get_extra_images(ca, md)
def _user_canceled(self, callback: Callable[[], Any] | None = None) -> Any:
def _user_canceled(self, callback: Callable[..., Any] | None = None, *args: Any) -> Any:
if self.cancel:
raise IssueIdentifierCancelled
if callback is not None:
return callback()
return callback(*args)
def _print_terms(self, keys: SearchKeys, images: list[tuple[str, Image.Image]]) -> None:
assert keys["series"]
@ -535,8 +525,7 @@ class IssueIdentifier:
if use_alternates:
alternate = " Alternate"
for series, issue in issues:
self.current_progress = counter, len(issues)
self._user_canceled(functools.partial(self.progress_callback, counter, len(issues), b""))
self._user_canceled(self.progress_callback, counter, len(issues))
counter += 1
self.log_msg(
@ -597,9 +586,8 @@ class IssueIdentifier:
try:
search_results = self.talker.search_for_series(
terms["series"],
callback=lambda x, y: self._user_canceled(functools.partial(self.progress_callback, x, y, b"")),
series_match_thresh=self.config.series_match_search_thresh,
on_rate_limit=self.on_rate_limit,
callback=lambda x, y: self._user_canceled(self.progress_callback, x, y),
series_match_thresh=self.config.Issue_Identifier__series_match_search_thresh,
)
except TalkerError as e:
self.log_msg(f"Error searching for series.\n{e}")
@ -616,16 +604,13 @@ class IssueIdentifier:
self.log_msg(f"Searching in {len(filtered_series)} series")
self._user_canceled(functools.partial(self.progress_callback, 0, len(filtered_series), b""))
self._user_canceled(self.progress_callback, 0, len(filtered_series))
series_by_id = {series.id: series for series in filtered_series}
try:
talker_result = self.talker.fetch_issues_by_series_issue_num_and_year(
list(series_by_id.keys()),
terms["issue_number"],
terms["year"],
on_rate_limit=self.on_rate_limit,
list(series_by_id.keys()), terms["issue_number"], terms["year"]
)
except TalkerError as e:
self.log_msg(f"Issue with while searching for series details. Aborting...\n{e}")
@ -636,7 +621,7 @@ class IssueIdentifier:
if not talker_result:
return []
self._user_canceled(functools.partial(self.progress_callback, 0, 0, b""))
self._user_canceled(self.progress_callback, 0, 0)
issues: list[tuple[ComicSeries, GenericMetadata]] = []

View File

@ -18,15 +18,15 @@ from __future__ import annotations
import logging
from PyQt6 import QtCore, QtGui, QtWidgets
from PyQt6 import QtCore, QtGui, QtWidgets, uic
from comicapi.genericmetadata import GenericMetadata
from comicapi.issuestring import IssueString
from comictaggerlib.coverimagewidget import CoverImageWidget
from comictaggerlib.ctsettings import ct_ns
from comictaggerlib.seriesselectionwindow import SelectionWindow
from comictaggerlib.ui import ui_path
from comictalker.comictalker import ComicTalker, RLCallBack, TalkerError
from comictaggerlib.ui import qtutils, ui_path
from comictaggerlib.ui.qtutils import new_web_view
from comictalker.comictalker import ComicTalker, TalkerError
logger = logging.getLogger(__name__)
@ -39,82 +39,117 @@ class IssueNumberTableWidgetItem(QtWidgets.QTableWidgetItem):
return (IssueString(self_str).as_float() or 0) < (IssueString(other_str).as_float() or 0)
class QueryThread(QtCore.QThread):
def __init__(
self,
talker: ComicTalker,
series_id: str,
finish: QtCore.pyqtSignal,
on_ratelimit: QtCore.pyqtSignal,
) -> None:
super().__init__()
self.series_id = series_id
self.talker = talker
self.finish = finish
self.on_ratelimit = on_ratelimit
def run(self) -> None:
# QtWidgets.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.CursorShape.WaitCursor))
try:
issue_list = [
x
for x in self.talker.fetch_issues_in_series(
self.series_id, on_rate_limit=RLCallBack(lambda x, y: self.on_ratelimit.emit(x, y), 10)
)
if x.issue_id is not None
]
except TalkerError as e:
logger.exception("Failed to retrieve issue list: %s", e)
# QtWidgets.QApplication.restoreOverrideCursor()
# QtWidgets.QMessageBox.critical(None, f"{e.source} {e.code_name} Error", f"{e}")
return
# QtWidgets.QApplication.restoreOverrideCursor()
self.finish.emit(issue_list)
class IssueSelectionWindow(SelectionWindow):
ui_file = ui_path / "issueselectionwindow.ui"
CoverImageMode = CoverImageWidget.AltCoverMode
finish = QtCore.pyqtSignal(list)
class IssueSelectionWindow(QtWidgets.QDialog):
def __init__(
self,
parent: QtWidgets.QWidget,
config: ct_ns,
talker: ComicTalker,
series_id: str = "",
issue_number: str = "",
series_id: str,
issue_number: str,
) -> None:
super().__init__(parent, config, talker)
super().__init__(parent)
with (ui_path / "issueselectionwindow.ui").open(encoding="utf-8") as uifile:
uic.loadUi(uifile, self)
self.coverWidget = CoverImageWidget(
self.coverImageContainer,
CoverImageWidget.AltCoverMode,
config.Runtime_Options__config.user_cache_dir,
)
gridlayout = QtWidgets.QGridLayout(self.coverImageContainer)
gridlayout.addWidget(self.coverWidget)
gridlayout.setContentsMargins(0, 0, 0, 0)
self.teDescription: QtWidgets.QWidget
webengine = new_web_view(self)
if webengine:
self.teDescription = qtutils.replaceWidget(self.splitter, self.teDescription, webengine)
logger.info("successfully loaded QWebEngineView")
else:
logger.info("failed to open QWebEngineView")
self.setWindowFlags(
QtCore.Qt.WindowType(
self.windowFlags()
| QtCore.Qt.WindowType.WindowSystemMenuHint
| QtCore.Qt.WindowType.WindowMaximizeButtonHint
)
)
self.series_id = series_id
self.issue_id: str = ""
self.config = config
self.talker = talker
self.issue_list: dict[str, GenericMetadata] = {}
self.issue_number = issue_number
# Display talker logo and set url
self.lblIssuesSourceName.setText(talker.attribution)
self.imageIssuesSourceWidget = CoverImageWidget(
self.imageIssuesSourceLogo,
CoverImageWidget.URLMode,
config.Runtime_Options__config.user_cache_dir,
False,
)
self.imageIssuesSourceWidget.showControls = False
gridlayoutIssuesSourceLogo = QtWidgets.QGridLayout(self.imageIssuesSourceLogo)
gridlayoutIssuesSourceLogo.addWidget(self.imageIssuesSourceWidget)
gridlayoutIssuesSourceLogo.setContentsMargins(0, 2, 0, 0)
self.imageIssuesSourceWidget.set_url(talker.logo_url)
if issue_number is None or issue_number == "":
self.issue_number = "1"
else:
self.issue_number = issue_number
self.initial_id: str = ""
self.perform_query()
self.twList.resizeColumnsToContents()
self.twList.currentItemChanged.connect(self.current_item_changed)
self.twList.cellDoubleClicked.connect(self.cell_double_clicked)
# now that the list has been sorted, find the initial record, and
# select it
if not self.initial_id:
self.twList.selectRow(0)
else:
for r in range(0, self.twList.rowCount()):
issue_id = self.twList.item(r, 0).data(QtCore.Qt.ItemDataRole.UserRole)
if issue_id == self.initial_id:
self.twList.selectRow(r)
break
self.leFilter.textChanged.connect(self.filter)
self.finish.connect(self.query_finished)
def perform_query(self) -> None: # type: ignore[override]
self.querythread = QueryThread(
self.talker,
self.series_id,
self.finish,
self.ratelimit,
)
self.querythread.start()
def filter(self, text: str) -> None:
rows = set(range(self.twList.rowCount()))
for r in rows:
self.twList.showRow(r)
if text.strip():
shown_rows = {x.row() for x in self.twList.findItems(text, QtCore.Qt.MatchFlag.MatchContains)}
for r in rows - shown_rows:
self.twList.hideRow(r)
def perform_query(self) -> None:
QtWidgets.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.CursorShape.WaitCursor))
try:
self.issue_list = {
x.issue_id: x for x in self.talker.fetch_issues_in_series(self.series_id) if x.issue_id is not None
}
except TalkerError as e:
QtWidgets.QApplication.restoreOverrideCursor()
QtWidgets.QMessageBox.critical(self, f"{e.source} {e.code_name} Error", f"{e}")
return
def query_finished(self, issues: list[GenericMetadata]) -> None:
self.twList.setRowCount(0)
self.twList.setSortingEnabled(False)
self.issue_list = {i.issue_id: i for i in issues if i.issue_id is not None}
self.twList.clear()
for row, issue in enumerate(issues):
for row, issue in enumerate(self.issue_list.values()):
self.twList.insertRow(row)
self.twList.setItem(row, 0, IssueNumberTableWidgetItem())
self.twList.setItem(row, 1, QtWidgets.QTableWidgetItem())
@ -127,22 +162,20 @@ class IssueSelectionWindow(SelectionWindow):
self.twList.setSortingEnabled(True)
self.twList.sortItems(0, QtCore.Qt.SortOrder.AscendingOrder)
self.twList: QtWidgets.QTableWidget
if self.initial_id:
for r in range(0, self.twList.rowCount()):
item = self.twList.item(r, 0)
issue_id = item.data(QtCore.Qt.ItemDataRole.UserRole)
if issue_id == self.initial_id:
self.twList.selectRow(r)
self.twList.scrollToItem(item, QtWidgets.QAbstractItemView.ScrollHint.EnsureVisible)
break
self.show()
QtWidgets.QApplication.restoreOverrideCursor()
def cell_double_clicked(self, r: int, c: int) -> None:
self.accept()
def update_row(self, row: int, issue: GenericMetadata) -> None: # type: ignore[override]
self.twList.setStyleSheet(self.twList.styleSheet())
def set_description(self, widget: QtWidgets.QWidget, text: str) -> None:
if isinstance(widget, QtWidgets.QTextEdit):
widget.setText(text.replace("</figure>", "</div>").replace("<figure", "<div"))
else:
html = text
widget.setHtml(html, QtCore.QUrl(self.talker.website))
def update_row(self, row: int, issue: GenericMetadata) -> None:
item_text = issue.issue or ""
item = self.twList.item(row, 0)
item.setText(item_text)
@ -168,23 +201,35 @@ class IssueSelectionWindow(SelectionWindow):
qtw_item.setData(QtCore.Qt.ItemDataRole.ToolTipRole, item_text)
qtw_item.setFlags(QtCore.Qt.ItemFlag.ItemIsSelectable | QtCore.Qt.ItemFlag.ItemIsEnabled)
def _fetch(self, row: int) -> GenericMetadata: # type: ignore[override]
def current_item_changed(self, curr: QtCore.QModelIndex | None, prev: QtCore.QModelIndex | None) -> None:
if curr is None:
return
if prev is not None and prev.row() == curr.row():
return
row = curr.row()
self.issue_id = self.twList.item(row, 0).data(QtCore.Qt.ItemDataRole.UserRole)
# list selection was changed, update the issue cover
issue = self.issue_list[self.issue_id]
if not (issue.issue and issue.year and issue.month and issue._cover_image and issue.title):
QtWidgets.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.CursorShape.WaitCursor))
try:
issue = self.talker.fetch_comic_data(
issue_id=self.issue_id, on_rate_limit=RLCallBack(self.on_ratelimit, 10)
)
issue = self.talker.fetch_comic_data(issue_id=self.issue_id)
except TalkerError:
pass
QtWidgets.QApplication.restoreOverrideCursor()
self.issue_number = issue.issue or ""
# We don't currently have a way to display hashes to the user
# TODO: display the hash to the user so they know it will be used for cover matching
alt_images = [url.URL for url in issue._alternate_images]
cover = issue._cover_image.URL if issue._cover_image else ""
self.cover_widget.set_issue_details(self.issue_id, [cover, *alt_images])
self.set_description(self.teDescription, issue.description or "")
return issue
self.coverWidget.set_issue_details(self.issue_id, [cover, *alt_images])
if issue.description is None:
self.set_description(self.teDescription, "")
else:
self.set_description(self.teDescription, issue.description)
# Update current record information
self.update_row(row, issue)

View File

@ -28,6 +28,7 @@ def setup_logging(verbose: int, log_dir: pathlib.Path) -> None:
logging.getLogger("comicapi").setLevel(logging.DEBUG)
logging.getLogger("comictaggerlib").setLevel(logging.DEBUG)
logging.getLogger("comictalker").setLevel(logging.DEBUG)
logging.getLogger("pyrate_limiter").setLevel(logging.DEBUG)
log_file = log_dir / "ComicTagger.log"
log_dir.mkdir(parents=True, exist_ok=True)

View File

@ -2,8 +2,7 @@ from __future__ import annotations
from datetime import datetime
from comicapi import merge, utils
from comicapi.comicarchive import ComicArchive
from comicapi import utils
from comicapi.genericmetadata import GenericMetadata
from comictaggerlib import ctversion
from comictaggerlib.cbltransformer import CBLTransformer
@ -38,25 +37,3 @@ def prepare_metadata(md: GenericMetadata, new_md: GenericMetadata, config: Settn
notes=utils.combine_notes(final_md.notes, notes, "Tagged with ComicTagger"),
description=cleanup_html(final_md.description, config.Metadata_Options__remove_html_tables) or None,
)
def read_selected_tags(
tag_ids: list[str], ca: ComicArchive, mode: merge.Mode = merge.Mode.OVERLAY, merge_lists: bool = False
) -> tuple[GenericMetadata, list[str], Exception | None]:
md = GenericMetadata()
error = None
tags_used = []
try:
for tag_id in tag_ids:
metadata = ca.read_tags(tag_id)
if not metadata.is_empty:
md.overlay(
metadata,
mode=mode,
merge_lists=merge_lists,
)
tags_used.append(tag_id)
except Exception as e:
error = e
return md, tags_used, error

View File

@ -381,7 +381,7 @@ class QuickTag:
aggressive_results, display_results, ca, tags, interactive, aggressive_filtering
)
if chosen_result:
return self.talker.fetch_comic_data(issue_id=chosen_result.ID, on_rate_limit=None)
return self.talker.fetch_comic_data(issue_id=chosen_result.ID)
return None
def SearchHashes(
@ -421,10 +421,10 @@ class QuickTag:
self.output(f"Retrieving basic {self.talker.name} data for {len(relevant_ids)} results")
# Try to do a bulk fetch of basic issue data, if we have more than 1 id
if hasattr(self.talker, "fetch_comics") and len(all_ids) > 1:
md_results = self.talker.fetch_comics(issue_ids=list(all_ids), on_rate_limit=None)
md_results = self.talker.fetch_comics(issue_ids=list(all_ids))
else:
for md_id in all_ids:
md_results.append(self.talker.fetch_comic_data(issue_id=md_id, on_rate_limit=None))
md_results.append(self.talker.fetch_comic_data(issue_id=md_id))
retrieved_ids = {ID(self.domain, md.issue_id) for md in md_results} # type: ignore[arg-type]
bad_ids = relevant_ids - retrieved_ids

View File

@ -82,7 +82,7 @@ class RenameWindow(QtWidgets.QDialog):
new_ext = ca.extension()
if md is None or md.is_empty:
md, _, error = self.parent().read_selected_tags(self.read_tag_ids, ca)
md, error = self.parent().read_selected_tags(self.read_tag_ids, ca)
if error is not None:
logger.error("Failed to load tags from %s: %s", ca.path, error)
QtWidgets.QMessageBox.warning(

View File

@ -19,25 +19,23 @@ from __future__ import annotations
import difflib
import itertools
import logging
from abc import ABCMeta, abstractmethod
import natsort
from PyQt6 import QtCore, QtGui, QtWidgets, uic
from PyQt6.QtCore import Qt, QUrl, pyqtSignal
from PyQt6.QtCore import QUrl, pyqtSignal
from comicapi import utils
from comicapi.comicarchive import ComicArchive
from comicapi.genericmetadata import ComicSeries, GenericMetadata
from comictaggerlib.coverimagewidget import CoverImageWidget
from comictaggerlib.ctsettings import ct_ns
from comictaggerlib.ctsettings.settngs_namespace import SettngsNS
from comictaggerlib.issueidentifier import IssueIdentifier, IssueIdentifierOptions
from comictaggerlib.issueidentifier import Result as IIResult
from comictaggerlib.issueidentifier import IssueIdentifier
from comictaggerlib.issueselectionwindow import IssueSelectionWindow
from comictaggerlib.matchselectionwindow import MatchSelectionWindow
from comictaggerlib.progresswindow import IDProgressWindow
from comictaggerlib.resulttypes import IssueResult
from comictaggerlib.ui import qtutils, ui_path
from comictalker.comictalker import ComicTalker, RLCallBack, TalkerError
from comictalker.comictalker import ComicTalker, TalkerError
logger = logging.getLogger(__name__)
@ -45,7 +43,6 @@ logger = logging.getLogger(__name__)
class SearchThread(QtCore.QThread):
searchComplete = pyqtSignal()
progressUpdate = pyqtSignal(int, int)
ratelimit = pyqtSignal(float, float)
def __init__(
self, talker: ComicTalker, series_name: str, refresh: bool, literal: bool = False, series_match_thresh: int = 90
@ -64,12 +61,7 @@ class SearchThread(QtCore.QThread):
try:
self.ct_error = False
self.ct_search_results = self.talker.search_for_series(
self.series_name,
callback=self.prog_callback,
refresh_cache=self.refresh,
literal=self.literal,
series_match_thresh=self.series_match_thresh,
on_rate_limit=RLCallBack(self.on_ratelimit, 10),
self.series_name, self.prog_callback, self.refresh, self.literal, self.series_match_thresh
)
except TalkerError as e:
self.ct_search_results = []
@ -82,94 +74,60 @@ class SearchThread(QtCore.QThread):
def prog_callback(self, current: int, total: int) -> None:
self.progressUpdate.emit(current, total)
def on_ratelimit(self, full_time: float, sleep_time: float) -> None:
self.ratelimit.emit(full_time, sleep_time)
class IdentifyThread(QtCore.QThread):
ratelimit = pyqtSignal(float, float)
identifyComplete = pyqtSignal(IIResult, list)
identifyComplete = pyqtSignal((int, list))
identifyLogMsg = pyqtSignal(str)
identifyProgress = pyqtSignal(int, int)
def __init__(self, ca: ComicArchive, config: SettngsNS, talker: ComicTalker, md: GenericMetadata) -> None:
def __init__(self, identifier: IssueIdentifier, ca: ComicArchive, md: GenericMetadata) -> None:
QtCore.QThread.__init__(self)
iio = IssueIdentifierOptions(
series_match_search_thresh=config.Issue_Identifier__series_match_search_thresh,
series_match_identify_thresh=config.Issue_Identifier__series_match_identify_thresh,
use_publisher_filter=config.Auto_Tag__use_publisher_filter,
publisher_filter=config.Auto_Tag__publisher_filter,
quiet=config.Runtime_Options__quiet,
cache_dir=config.Runtime_Options__config.user_cache_dir,
border_crop_percent=config.Issue_Identifier__border_crop_percent,
talker=talker,
)
self.identifier = IssueIdentifier(
iio,
on_rate_limit=RLCallBack(self.on_ratelimit, 10),
output=self.log_output,
on_progress=self.progress_callback,
)
self.identifier = identifier
self.identifier.set_output_function(self.log_output)
self.identifier.set_progress_callback(self.progress_callback)
self.ca = ca
self.md = md
def log_output(self, text: str) -> None:
self.identifyLogMsg.emit(str(text))
def progress_callback(self, cur: int, total: int, image: bytes) -> None:
def progress_callback(self, cur: int, total: int) -> None:
self.identifyProgress.emit(cur, total)
def run(self) -> None:
self.identifyComplete.emit(*self.identifier.identify(self.ca, self.md))
def cancel(self) -> None:
self.identifier.cancel = True
def on_ratelimit(self, full_time: float, sleep_time: float) -> None:
self.ratelimit.emit(full_time, sleep_time)
class SelectionWindow(QtWidgets.QDialog):
__metaclass__ = ABCMeta
ui_file = ui_path / "seriesselectionwindow.ui"
CoverImageMode = CoverImageWidget.URLMode
ratelimit = pyqtSignal(float, float)
class SeriesSelectionWindow(QtWidgets.QDialog):
def __init__(
self,
parent: QtWidgets.QWidget,
series_name: str,
issue_number: str,
year: int | None,
issue_count: int | None,
comic_archive: ComicArchive | None,
config: ct_ns,
talker: ComicTalker,
series_name: str = "",
issue_number: str = "",
comic_archive: ComicArchive | None = None,
year: int | None = None,
issue_count: int | None = None,
autoselect: bool = False,
literal: bool = False,
) -> None:
super().__init__(parent)
self.setWindowModality(Qt.WindowModality.WindowModal)
with self.ui_file.open(encoding="utf-8") as uifile:
with (ui_path / "seriesselectionwindow.ui").open(encoding="utf-8") as uifile:
uic.loadUi(uifile, self)
self.cover_widget = CoverImageWidget(
self.coverImageContainer,
self.CoverImageMode,
config.Runtime_Options__config.user_cache_dir,
self.imageWidget = CoverImageWidget(
self.imageContainer, CoverImageWidget.URLMode, config.Runtime_Options__config.user_cache_dir
)
gridlayout = QtWidgets.QGridLayout(self.coverImageContainer)
gridlayout.addWidget(self.cover_widget)
gridlayout = QtWidgets.QGridLayout(self.imageContainer)
gridlayout.addWidget(self.imageWidget)
gridlayout.setContentsMargins(0, 0, 0, 0)
self.teDescription: QtWidgets.QWidget
self.teDetails: QtWidgets.QWidget
webengine = qtutils.new_web_view(self)
if webengine:
self.teDescription = qtutils.replaceWidget(self.splitter, self.teDescription, webengine)
logger.info("successfully loaded QWebEngineView")
else:
logger.info("failed to open QWebEngineView")
self.teDetails = qtutils.replaceWidget(self.splitter, self.teDetails, webengine)
self.setWindowFlags(
QtCore.Qt.WindowType(
@ -180,11 +138,29 @@ class SelectionWindow(QtWidgets.QDialog):
)
self.config = config
self.talker = talker
self.series_name = series_name
self.issue_number = issue_number
self.issue_id: str = ""
self.year = year
self.issue_count = issue_count
self.series_id: str = ""
self.comic_archive = comic_archive
self.immediate_autoselect = autoselect
self.series_list: dict[str, ComicSeries] = {}
self.literal = literal
self.ii: IssueIdentifier | None = None
self.iddialog: IDProgressWindow | None = None
self.id_thread: IdentifyThread | None = None
self.progdialog: QtWidgets.QProgressDialog | None = None
self.search_thread: SearchThread | None = None
self.use_filter = self.config.Auto_Tag__use_publisher_filter
# Load to retrieve settings
self.talker = talker
# Display talker logo and set url
self.lblIssuesSourceName.setText(talker.attribution)
self.lblSourceName.setText(talker.attribution)
self.imageSourceWidget = CoverImageWidget(
self.imageSourceLogo,
@ -201,181 +177,19 @@ class SelectionWindow(QtWidgets.QDialog):
# Set the minimum row height to the default.
# this way rows will be more consistent when resizeRowsToContents is called
self.twList.verticalHeader().setMinimumSectionSize(self.twList.verticalHeader().defaultSectionSize())
self.twList.resizeColumnsToContents()
self.twList.currentItemChanged.connect(self.current_item_changed)
self.twList.cellDoubleClicked.connect(self.cell_double_clicked)
self.leFilter.textChanged.connect(self.filter)
self.twList.selectRow(0)
@abstractmethod
def perform_query(self, refresh: bool = False) -> None: ...
@abstractmethod
def cell_double_clicked(self, r: int, c: int) -> None: ...
@abstractmethod
def update_row(self, row: int, series: ComicSeries) -> None: ...
def set_description(self, widget: QtWidgets.QWidget, text: str) -> None:
if isinstance(widget, QtWidgets.QTextEdit):
widget.setText(text.replace("</figure>", "</div>").replace("<figure", "<div"))
else:
html = text
widget.setHtml(html, QUrl(self.talker.website))
def filter(self, text: str) -> None:
rows = set(range(self.twList.rowCount()))
for r in rows:
self.twList.showRow(r)
if text.strip():
shown_rows = {x.row() for x in self.twList.findItems(text, QtCore.Qt.MatchFlag.MatchContains)}
for r in rows - shown_rows:
self.twList.hideRow(r)
@abstractmethod
def _fetch(self, row: int) -> ComicSeries: ...
def on_ratelimit(self, full_time: float, sleep_time: float) -> None:
self.ratelimit.emit(full_time, sleep_time)
def current_item_changed(self, curr: QtCore.QModelIndex | None, prev: QtCore.QModelIndex | None) -> None:
if curr is None:
return
if prev is not None and prev.row() == curr.row():
return
row = curr.row()
item = self._fetch(row)
QtWidgets.QApplication.restoreOverrideCursor()
# Update current record information
self.update_row(row, item)
class SeriesSelectionWindow(SelectionWindow):
ui_file = ui_path / "seriesselectionwindow.ui"
CoverImageMode = CoverImageWidget.URLMode
def __init__(
self,
parent: QtWidgets.QWidget,
config: ct_ns,
talker: ComicTalker,
series_name: str = "",
issue_number: str = "",
comic_archive: ComicArchive | None = None,
year: int | None = None,
issue_count: int | None = None,
autoselect: bool = False,
literal: bool = False,
) -> None:
from comictaggerlib.issueselectionwindow import IssueSelectionWindow
super().__init__(
parent,
config,
talker,
series_name,
issue_number,
comic_archive,
year,
issue_count,
autoselect,
literal,
)
self.count = 0
self.series_name = series_name
self.issue_number = issue_number
self.year = year
self.issue_count = issue_count
self.series_id: str = ""
self.comic_archive = comic_archive
self.immediate_autoselect = autoselect
self.series_list: dict[str, ComicSeries] = {}
self.literal = literal
self.iddialog: IDProgressWindow | None = None
self.id_thread: IdentifyThread | None = None
self.progdialog: QtWidgets.QProgressDialog | None = None
self.search_thread: SearchThread | None = None
self.use_publisher_filter = self.config.Auto_Tag__use_publisher_filter
self.btnRequery.clicked.connect(self.requery)
self.btnIssues.clicked.connect(self.show_issues)
self.btnAutoSelect.clicked.connect(self.auto_select)
self.cbxPublisherFilter.setChecked(self.use_publisher_filter)
self.cbxPublisherFilter.toggled.connect(self.publisher_filter_toggled)
self.ratelimit.connect(self.ratelimit_message)
self.cbxFilter.setChecked(self.use_filter)
self.cbxFilter.toggled.connect(self.filter_toggled)
self.update_buttons()
self.twList.selectRow(0)
self.selector = IssueSelectionWindow(self, self.config, self.talker, self.series_id, self.issue_number)
self.selector.ratelimit.connect(self.ratelimit)
self.selector.finished.connect(self.issue_selected)
def perform_query(self, refresh: bool = False) -> None:
self.search_thread = SearchThread(
self.talker,
self.series_name,
refresh,
self.literal,
self.config.Issue_Identifier__series_match_search_thresh,
)
self.search_thread.searchComplete.connect(self.search_complete)
self.search_thread.progressUpdate.connect(self.search_progress_update)
self.search_thread.ratelimit.connect(self.ratelimit)
self.search_thread.start()
self.progdialog = QtWidgets.QProgressDialog("Searching Online", "Cancel", 0, 100, self)
self.progdialog.setWindowTitle("Online Search")
self.progdialog.canceled.connect(self.search_canceled)
self.progdialog.setModal(True)
self.progdialog.setMinimumDuration(300)
if refresh or self.search_thread.isRunning():
self.progdialog.exec()
else:
self.progdialog = None
def cell_double_clicked(self, r: int, c: int) -> None:
self.show_issues()
def update_row(self, row: int, series: ComicSeries) -> None:
item_text = series.name
item = self.twList.item(row, 0)
item.setText(item_text)
item.setData(QtCore.Qt.ItemDataRole.ToolTipRole, item_text)
item.setData(QtCore.Qt.ItemDataRole.UserRole, series.id)
item.setFlags(QtCore.Qt.ItemFlag.ItemIsSelectable | QtCore.Qt.ItemFlag.ItemIsEnabled)
item_text = f"{series.start_year:04}" if series.start_year is not None else ""
item = self.twList.item(row, 1)
item.setText(item_text)
item.setData(QtCore.Qt.ItemDataRole.ToolTipRole, item_text)
item.setFlags(QtCore.Qt.ItemFlag.ItemIsSelectable | QtCore.Qt.ItemFlag.ItemIsEnabled)
item_text = f"{series.count_of_issues:04}" if series.count_of_issues is not None else ""
item = self.twList.item(row, 2)
item.setText(item_text)
item.setData(QtCore.Qt.ItemDataRole.ToolTipRole, item_text)
item.setData(QtCore.Qt.ItemDataRole.DisplayRole, series.count_of_issues)
item.setFlags(QtCore.Qt.ItemFlag.ItemIsSelectable | QtCore.Qt.ItemFlag.ItemIsEnabled)
item_text = series.publisher if series.publisher is not None else ""
item = self.twList.item(row, 3)
item.setText(item_text)
item.setData(QtCore.Qt.ItemDataRole.ToolTipRole, item_text)
item.setFlags(QtCore.Qt.ItemFlag.ItemIsSelectable | QtCore.Qt.ItemFlag.ItemIsEnabled)
def set_description(self, widget: QtWidgets.QWidget, text: str) -> None:
if isinstance(widget, QtWidgets.QTextEdit):
widget.setText(text.replace("</figure>", "</div>").replace("<figure", "<div"))
else:
html = text
widget.setHtml(html, QUrl(self.talker.website))
self.leFilter.textChanged.connect(self.filter)
def filter(self, text: str) -> None:
rows = set(range(self.twList.rowCount()))
@ -386,28 +200,6 @@ class SeriesSelectionWindow(SelectionWindow):
for r in rows - shown_rows:
self.twList.hideRow(r)
def _fetch(self, row: int) -> ComicSeries:
self.series_id = self.twList.item(row, 0).data(QtCore.Qt.ItemDataRole.UserRole)
# list selection was changed, update the info on the series
series = self.series_list[self.series_id]
if not (
series.name
and series.start_year
and series.count_of_issues
and series.publisher
and series.description
and series.image_url
):
QtWidgets.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.CursorShape.WaitCursor))
try:
series = self.talker.fetch_series(self.series_id, on_rate_limit=RLCallBack(self.on_ratelimit, 10))
except TalkerError:
pass
self.set_description(self.teDescription, series.description or "")
self.cover_widget.set_url(series.image_url)
return series
def update_buttons(self) -> None:
enabled = bool(self.series_list)
@ -422,8 +214,8 @@ class SeriesSelectionWindow(SelectionWindow):
self.perform_query(refresh=True)
self.twList.selectRow(0)
def publisher_filter_toggled(self) -> None:
self.use_publisher_filter = self.cbxPublisherFilter.isChecked()
def filter_toggled(self) -> None:
self.use_filter = not self.use_filter
self.perform_query(refresh=False)
def auto_select(self) -> None:
@ -434,102 +226,112 @@ class SeriesSelectionWindow(SelectionWindow):
if self.issue_number is None or self.issue_number == "":
QtWidgets.QMessageBox.information(self, "Auto-Select", "Can't auto-select without an issue number (yet!)")
return
self.iddialog = IDProgressWindow(self)
self.iddialog.setModal(True)
self.iddialog.rejected.connect(self.identify_cancel)
self.iddialog.show()
self.ii = IssueIdentifier(self.comic_archive, self.config, self.talker)
md = GenericMetadata()
md.series = self.series_name
md.issue = self.issue_number
md.year = self.year
md.issue_count = self.issue_count
self.id_thread = IdentifyThread(self.comic_archive, self.config, self.talker, md)
self.id_thread = IdentifyThread(self.ii, self.comic_archive, md)
self.id_thread.identifyComplete.connect(self.identify_complete)
self.id_thread.identifyLogMsg.connect(self.log_output)
self.id_thread.identifyLogMsg.connect(self.log_id_output)
self.id_thread.identifyProgress.connect(self.identify_progress)
self.id_thread.ratelimit.connect(self.ratelimit)
self.iddialog.rejected.connect(self.id_thread.cancel)
self.id_thread.start()
self.iddialog.exec()
def log_output(self, text: str) -> None:
if self.iddialog is None:
return
self.iddialog.textEdit.append(text.rstrip())
self.iddialog.textEdit.ensureCursorVisible()
QtCore.QCoreApplication.processEvents()
def log_id_output(self, text: str) -> None:
if self.iddialog is not None:
self.iddialog.textEdit.append(text.rstrip())
self.iddialog.textEdit.ensureCursorVisible()
QtCore.QCoreApplication.processEvents()
def identify_progress(self, cur: int, total: int) -> None:
if self.iddialog is None:
return
self.iddialog.progressBar.setMaximum(total)
self.iddialog.progressBar.setValue(cur)
if self.iddialog is not None:
self.iddialog.progressBar.setMaximum(total)
self.iddialog.progressBar.setValue(cur)
def identify_complete(self, result: IIResult, issues: list[IssueResult]) -> None:
if not (self.iddialog is not None and self.comic_archive is not None):
return
def identify_cancel(self) -> None:
if self.ii is not None:
self.ii.cancel = True
found_match = None
choices = False
if result == IIResult.no_matches:
QtWidgets.QMessageBox.information(self, "Auto-Select Result", " No issues found :-(")
elif result == IIResult.single_bad_cover_score:
QtWidgets.QMessageBox.information(
self,
"Auto-Select Result",
" Found a match, but cover doesn't seem the same. Verify before committing!",
)
found_match = issues[0]
elif result == IIResult.multiple_bad_cover_scores:
QtWidgets.QMessageBox.information(
self, "Auto-Select Result", " Found some possibilities, but no confidence. Proceed manually."
)
choices = True
elif result == IIResult.single_good_match:
found_match = issues[0]
elif result == IIResult.multiple_good_matches:
QtWidgets.QMessageBox.information(
self, "Auto-Select Result", " Found multiple likely matches. Please select."
)
choices = True
def identify_complete(self, result: int, issues: list[IssueResult]) -> None:
if self.iddialog is not None and self.comic_archive is not None:
if choices:
selector = MatchSelectionWindow(self, issues, self.comic_archive, talker=self.talker, config=self.config)
selector.exec()
if selector.result():
# we should now have a list index
found_match = selector.current_match()
found_match = None
choices = False
if result == IssueIdentifier.result_no_matches:
QtWidgets.QMessageBox.information(self, "Auto-Select Result", " No issues found :-(")
elif result == IssueIdentifier.result_found_match_but_bad_cover_score:
QtWidgets.QMessageBox.information(
self,
"Auto-Select Result",
" Found a match, but cover doesn't seem the same. Verify before committing!",
)
found_match = issues[0]
elif result == IssueIdentifier.result_found_match_but_not_first_page:
QtWidgets.QMessageBox.information(
self, "Auto-Select Result", " Found a match, but not with the first page of the archive."
)
found_match = issues[0]
elif result == IssueIdentifier.result_multiple_matches_with_bad_image_scores:
QtWidgets.QMessageBox.information(
self, "Auto-Select Result", " Found some possibilities, but no confidence. Proceed manually."
)
choices = True
elif result == IssueIdentifier.result_one_good_match:
found_match = issues[0]
elif result == IssueIdentifier.result_multiple_good_matches:
QtWidgets.QMessageBox.information(
self, "Auto-Select Result", " Found multiple likely matches. Please select."
)
choices = True
if found_match is not None:
self.iddialog.accept()
if choices:
selector = MatchSelectionWindow(
self, issues, self.comic_archive, talker=self.talker, config=self.config
)
selector.setModal(True)
selector.exec()
if selector.result():
# we should now have a list index
found_match = selector.current_match()
self.series_id = utils.xlate(found_match.series_id) or ""
self.issue_number = found_match.issue_number
self.select_by_id()
self.show_issues()
if found_match is not None:
self.iddialog.accept()
self.series_id = utils.xlate(found_match.series_id) or ""
self.issue_number = found_match.issue_number
self.select_by_id()
self.show_issues()
def show_issues(self) -> None:
selector = IssueSelectionWindow(self, self.config, self.talker, self.series_id, self.issue_number)
title = ""
for series in self.series_list.values():
if series.id == self.series_id:
title = f"{series.name} ({series.start_year:04}) - " if series.start_year else f"{series.name} - "
break
self.selector.setWindowTitle(title + "Select Issue")
self.selector.series_id = self.series_id
self.selector.perform_query()
def issue_selected(self, result: list[GenericMetadata]) -> None:
if result and self.selector:
selector.setWindowTitle(title + "Select Issue")
selector.setModal(True)
selector.exec()
if selector.result():
# we should now have a series ID
self.issue_number = self.selector.issue_number
self.issue_id = self.selector.issue_id
self.issue_number = selector.issue_number
self.issue_id = selector.issue_id
self.accept()
else:
self.cover_widget.update_content()
self.imageWidget.update_content()
def select_by_id(self) -> None:
for r in range(self.twList.rowCount()):
@ -537,30 +339,46 @@ class SeriesSelectionWindow(SelectionWindow):
self.twList.selectRow(r)
break
def perform_query(self, refresh: bool = False) -> None:
self.search_thread = SearchThread(
self.talker,
self.series_name,
refresh,
self.literal,
self.config.Issue_Identifier__series_match_search_thresh,
)
self.search_thread.searchComplete.connect(self.search_complete)
self.search_thread.progressUpdate.connect(self.search_progress_update)
self.search_thread.start()
self.progdialog = QtWidgets.QProgressDialog("Searching Online", "Cancel", 0, 100, self)
self.progdialog.setWindowTitle("Online Search")
self.progdialog.canceled.connect(self.search_canceled)
self.progdialog.setModal(True)
self.progdialog.setMinimumDuration(300)
if refresh or self.search_thread.isRunning():
self.progdialog.exec()
else:
self.progdialog = None
def search_canceled(self) -> None:
if self.progdialog is None:
return
logger.info("query cancelled")
if self.search_thread is not None:
self.search_thread.searchComplete.disconnect()
self.search_thread.progressUpdate.disconnect()
self.progdialog.canceled.disconnect()
self.progdialog.reject()
QtCore.QTimer.singleShot(200, self.close_me)
if self.progdialog is not None:
logger.info("query cancelled")
if self.search_thread is not None:
self.search_thread.searchComplete.disconnect()
self.search_thread.progressUpdate.disconnect()
self.progdialog.canceled.disconnect()
self.progdialog.reject()
QtCore.QTimer.singleShot(200, self.close_me)
def close_me(self) -> None:
self.reject()
def search_progress_update(self, current: int, total: int) -> None:
if self.progdialog is None:
return
try:
QtCore.QCoreApplication.processEvents()
if self.progdialog is not None:
self.progdialog.setMaximum(total)
self.progdialog.setValue(current + 1)
QtCore.QCoreApplication.processEvents()
except Exception:
...
def search_complete(self) -> None:
if self.progdialog is not None:
@ -578,7 +396,7 @@ class SeriesSelectionWindow(SelectionWindow):
tmp_list = self.search_thread.ct_search_results if self.search_thread is not None else []
self.series_list = {x.id: x for x in tmp_list}
# filter the publishers if enabled set
if self.use_publisher_filter:
if self.use_filter:
try:
publisher_filter = {s.strip().casefold() for s in self.config.Auto_Tag__publisher_filter}
# use '' as publisher name if None
@ -673,21 +491,58 @@ class SeriesSelectionWindow(SelectionWindow):
# Resize row height so the whole series can still be seen
self.twList.resizeRowsToContents()
def showEvent(self, event: QtGui.QShowEvent) -> None:
self.perform_query()
QtCore.QCoreApplication.processEvents()
if not self.series_list:
QtWidgets.QMessageBox.information(self, "Search Result", "No matches found!\nSeriesSelectionWindow")
QtWidgets.QMessageBox.information(self, "Search Result", "No matches found!")
QtCore.QTimer.singleShot(200, self.close_me)
elif self.immediate_autoselect:
# defer the immediate autoselect so this dialog has time to pop up
self.show()
QtCore.QTimer.singleShot(10, self.do_immediate_autoselect)
else:
self.show()
def do_immediate_autoselect(self) -> None:
self.immediate_autoselect = False
self.auto_select()
def cell_double_clicked(self, r: int, c: int) -> None:
self.show_issues()
def set_description(self, widget: QtWidgets.QWidget, text: str) -> None:
if isinstance(widget, QtWidgets.QTextEdit):
widget.setText(text.replace("</figure>", "</div>").replace("<figure", "<div"))
else:
html = text
widget.setHtml(html, QUrl(self.talker.website))
def update_row(self, row: int, series: ComicSeries) -> None:
item_text = series.name
item = self.twList.item(row, 0)
item.setText(item_text)
item.setData(QtCore.Qt.ItemDataRole.ToolTipRole, item_text)
item.setData(QtCore.Qt.ItemDataRole.UserRole, series.id)
item.setFlags(QtCore.Qt.ItemFlag.ItemIsSelectable | QtCore.Qt.ItemFlag.ItemIsEnabled)
item_text = f"{series.start_year:04}" if series.start_year is not None else ""
item = self.twList.item(row, 1)
item.setText(item_text)
item.setData(QtCore.Qt.ItemDataRole.ToolTipRole, item_text)
item.setFlags(QtCore.Qt.ItemFlag.ItemIsSelectable | QtCore.Qt.ItemFlag.ItemIsEnabled)
item_text = f"{series.count_of_issues:04}" if series.count_of_issues is not None else ""
item = self.twList.item(row, 2)
item.setText(item_text)
item.setData(QtCore.Qt.ItemDataRole.ToolTipRole, item_text)
item.setData(QtCore.Qt.ItemDataRole.DisplayRole, series.count_of_issues)
item.setFlags(QtCore.Qt.ItemFlag.ItemIsSelectable | QtCore.Qt.ItemFlag.ItemIsEnabled)
item_text = series.publisher if series.publisher is not None else ""
item = self.twList.item(row, 3)
item.setText(item_text)
item.setData(QtCore.Qt.ItemDataRole.ToolTipRole, item_text)
item.setFlags(QtCore.Qt.ItemFlag.ItemIsSelectable | QtCore.Qt.ItemFlag.ItemIsEnabled)
def current_item_changed(self, curr: QtCore.QModelIndex | None, prev: QtCore.QModelIndex | None) -> None:
if curr is None:
return
@ -695,14 +550,31 @@ class SeriesSelectionWindow(SelectionWindow):
return
row = curr.row()
self.series_id = self.twList.item(row, 0).data(QtCore.Qt.ItemDataRole.UserRole)
item = self._fetch(row)
# list selection was changed, update the info on the series
series = self.series_list[self.series_id]
if not (
series.name
and series.start_year
and series.count_of_issues
and series.publisher
and series.description
and series.image_url
):
QtWidgets.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.CursorShape.WaitCursor))
# Changing of usernames and passwords with using cache can cause talker errors to crash out
try:
series = self.talker.fetch_series(self.series_id)
except TalkerError:
pass
QtWidgets.QApplication.restoreOverrideCursor()
# Update current record information
self.update_row(row, item)
if series.description is None:
self.set_description(self.teDetails, "")
else:
self.set_description(self.teDetails, series.description)
self.imageWidget.set_url(series.image_url)
def ratelimit_message(self, full_time: float, sleep_time: float) -> None:
self.log_output(
f"Rate limit reached: {full_time:.0f}s until next request. Waiting {sleep_time:.0f}s for ratelimit"
)
# Update current record information
self.update_row(row, series)

View File

@ -680,6 +680,7 @@ class SettingsWindow(QtWidgets.QDialog):
def show_template_help(self) -> None:
template_help_win = TemplateHelpWindow(self)
template_help_win.setModal(False)
template_help_win.show()

View File

@ -1,123 +0,0 @@
from __future__ import annotations
import logging
import re
from typing import Any, Callable
from comicapi.comicarchive import ComicArchive
from comicapi.genericmetadata import GenericMetadata
from comictaggerlib.ctsettings import ct_ns
from comictaggerlib.issueidentifier import IssueIdentifier, IssueIdentifierOptions
from comictaggerlib.issueidentifier import Result as IIResult
from comictaggerlib.md import prepare_metadata
from comictaggerlib.resulttypes import Action, MatchStatus, OnlineMatchResults, Result, Status
from comictalker.comictalker import ComicTalker, RLCallBack, TalkerError
logger = logging.getLogger(__name__)
def identify_comic(
ca: ComicArchive,
md: GenericMetadata,
tags_read: list[str],
match_results: OnlineMatchResults,
config: ct_ns,
talker: ComicTalker,
output: Callable[[str], Any],
on_rate_limit: RLCallBack | None,
on_progress: Callable[[int, int, bytes], Any] | None = None,
) -> tuple[Result, OnlineMatchResults]:
# ct_md, results, matches, match_results
if md is None or md.is_empty:
logger.error("No metadata given to search online with!")
res = Result(
Action.save,
status=Status.match_failure,
original_path=ca.path,
match_status=MatchStatus.no_match,
tags_read=tags_read,
)
match_results.no_matches.append(res)
return res, match_results
iio = IssueIdentifierOptions(
series_match_search_thresh=config.Issue_Identifier__series_match_search_thresh,
series_match_identify_thresh=config.Issue_Identifier__series_match_identify_thresh,
use_publisher_filter=config.Auto_Tag__use_publisher_filter,
publisher_filter=config.Auto_Tag__publisher_filter,
quiet=config.Runtime_Options__quiet,
cache_dir=config.Runtime_Options__config.user_cache_dir,
border_crop_percent=config.Issue_Identifier__border_crop_percent,
talker=talker,
)
ii = IssueIdentifier(
iio,
output=output,
on_rate_limit=on_rate_limit,
on_progress=on_progress,
)
if not config.Auto_Tag__use_year_when_identifying:
md.year = None
if config.Auto_Tag__ignore_leading_numbers_in_filename and md.series is not None:
md.series = re.sub(r"^([\d.]+)", "", md.series)
result, matches = ii.identify(ca, md)
res = Result(
Action.save,
status=Status.match_failure,
original_path=ca.path,
online_results=matches,
tags_read=tags_read,
)
if result == IIResult.multiple_bad_cover_scores:
res.match_status = MatchStatus.low_confidence_match
logger.error("Online search: Multiple low confidence matches. Save aborted")
match_results.low_confidence_matches.append(res)
return res, match_results
if result == IIResult.single_bad_cover_score and config.Runtime_Options__abort_on_low_confidence:
logger.error("Online search: Low confidence match. Save aborted")
res.match_status = MatchStatus.low_confidence_match
match_results.low_confidence_matches.append(res)
return res, match_results
if result == IIResult.multiple_good_matches:
logger.error("Online search: Multiple good matches. Save aborted")
res.match_status = MatchStatus.multiple_match
match_results.multiple_matches.append(res)
return res, match_results
if result == IIResult.no_matches:
logger.error("Online search: No match found. Save aborted")
res.match_status = MatchStatus.no_match
match_results.no_matches.append(res)
return res, match_results
# we got here, so we have a single match
# now get the particular issue data
try:
ct_md = talker.fetch_comic_data(issue_id=matches[0].issue_id, on_rate_limit=on_rate_limit)
except TalkerError as e:
logger.exception("Error retrieving issue details. Save aborted. %s", e)
ct_md = GenericMetadata()
ct_md = prepare_metadata(md, ct_md, config)
if ct_md.is_empty:
res.status = Status.fetch_data_failure
res.match_status = MatchStatus.good_match
match_results.fetch_data_failures.append(res)
return res, match_results
res.status = Status.success
res.md = ct_md
if result == IIResult.single_good_match:
res.match_status = MatchStatus.good_match
return res, match_results

View File

@ -23,6 +23,7 @@ import operator
import os
import pickle
import platform
import re
import sys
import webbrowser
from collections.abc import Sequence
@ -43,7 +44,7 @@ from comicapi.issuestring import IssueString
from comictaggerlib import ctsettings, ctversion
from comictaggerlib.applicationlogwindow import ApplicationLogWindow, QTextEditLogger
from comictaggerlib.autotagmatchwindow import AutoTagMatchWindow
from comictaggerlib.autotagprogresswindow import AutoTagProgressWindow, AutoTagThread
from comictaggerlib.autotagprogresswindow import AutoTagProgressWindow
from comictaggerlib.autotagstartwindow import AutoTagStartWindow
from comictaggerlib.cbltransformer import CBLTransformer
from comictaggerlib.coverimagewidget import CoverImageWidget
@ -53,20 +54,20 @@ from comictaggerlib.exportwindow import ExportConflictOpts, ExportWindow
from comictaggerlib.fileselectionlist import FileSelectionList
from comictaggerlib.graphics import graphics_path
from comictaggerlib.gtinvalidator import is_valid_gtin
from comictaggerlib.issueidentifier import IssueIdentifier
from comictaggerlib.logwindow import LogWindow
from comictaggerlib.md import prepare_metadata, read_selected_tags
from comictaggerlib.md import prepare_metadata
from comictaggerlib.optionalmsgdialog import OptionalMessageDialog
from comictaggerlib.pagebrowser import PageBrowserWindow
from comictaggerlib.pagelisteditor import PageListEditor
from comictaggerlib.renamewindow import RenameWindow
from comictaggerlib.resulttypes import OnlineMatchResults
from comictaggerlib.resulttypes import Action, MatchStatus, OnlineMatchResults, Result, Status
from comictaggerlib.seriesselectionwindow import SeriesSelectionWindow
from comictaggerlib.settingswindow import SettingsWindow
from comictaggerlib.ui import qtutils, ui_path
from comictaggerlib.ui.pyqttoast import Toast, ToastPreset
from comictaggerlib.ui import ui_path
from comictaggerlib.ui.qtutils import center_window_on_parent, enable_widget
from comictaggerlib.versionchecker import VersionChecker
from comictalker.comictalker import ComicTalker, RLCallBack, TalkerError
from comictalker.comictalker import ComicTalker, TalkerError
logger = logging.getLogger(__name__)
@ -75,43 +76,9 @@ def execute(f: Callable[[], Any]) -> None:
f()
class QueryThread(QtCore.QThread):
def __init__(
self,
talker: ComicTalker,
issue_id: str,
series_id: str,
issue_number: str,
finish: QtCore.pyqtSignal,
on_rate_limit: QtCore.pyqtSignal,
) -> None:
super().__init__()
self.issue_id = issue_id
self.series_id = series_id
self.issue_number = issue_number
self.talker = talker
self.finish = finish
self.on_rate_limit = on_rate_limit
def run(self) -> None:
try:
new_metadata = self.talker.fetch_comic_data(
issue_id=self.issue_id,
series_id=self.series_id,
issue_number=self.issue_number,
on_rate_limit=RLCallBack(lambda x, y: self.on_rate_limit.emit(x, y), 60),
)
except TalkerError as e:
QtWidgets.QMessageBox.critical(None, f"{e.source} {e.code_name} Error", f"{e}")
return
self.finish.emit(new_metadata, self.issue_number)
class TaggerWindow(QtWidgets.QMainWindow):
appName = "ComicTagger"
version = ctversion.version
ratelimit = QtCore.pyqtSignal(float, float)
query_finished = QtCore.pyqtSignal(GenericMetadata, str)
def __init__(
self,
@ -328,9 +295,6 @@ class TaggerWindow(QtWidgets.QMainWindow):
self.page_list_editor.set_blur(self.config[0].General__blur)
self.ratelimit.connect(self.on_ratelimit)
self.query_finished.connect(self.apply_query_metadata)
def _sync_blur(*args: Any) -> None:
self.config[0].General__blur = self.page_list_editor.blur
@ -569,7 +533,7 @@ class TaggerWindow(QtWidgets.QMainWindow):
def repackage_archive(self) -> None:
ca_list = self.fileSelectionList.get_selected_archive_list()
non_zip_count = 0
to_zip = []
to_zip: list[ComicArchive] = []
largest_page_size = 0
for ca in ca_list:
largest_page_size = max(largest_page_size, len(ca.get_page_name_list()))
@ -617,7 +581,7 @@ class TaggerWindow(QtWidgets.QMainWindow):
new_archives_to_add = []
archives_to_remove = []
skipped_list = []
failed_list = []
failed_list: list[Exception] = []
success_count = 0
logger.debug("Exporting %d comics to zip", len(to_zip))
@ -643,7 +607,8 @@ class TaggerWindow(QtWidgets.QMainWindow):
if export:
logger.debug("Exporting %s to %s", ca.path, export_name)
if ca.export_as_zip(export_name):
try:
ca.export_as(export_name)
success_count += 1
if EW.addToList:
new_archives_to_add.append(str(export_name))
@ -651,9 +616,8 @@ class TaggerWindow(QtWidgets.QMainWindow):
archives_to_remove.append(ca)
ca.path.unlink(missing_ok=True)
else:
# last export failed, so remove the zip, if it exists
failed_list.append(ca.path)
except Exception as e:
failed_list.append(OSError(f"Failed to export {ca.path} to {export_name}: {e}"))
if export_name.exists():
export_name.unlink(missing_ok=True)
@ -669,11 +633,9 @@ class TaggerWindow(QtWidgets.QMainWindow):
for f in skipped_list:
summary += f"\t{f}\n"
if failed_list:
summary += (
f"\n\nThe following {len(failed_list)} archive(s) failed to export due to read/write errors:\n"
)
for f in failed_list:
summary += f"\t{f}\n"
summary += f"\n\nThe following {len(failed_list)} archive(s) failed to export:\n"
for ex in failed_list:
summary += f"\t{ex}\n"
logger.info(summary)
dlg = LogWindow(self)
@ -1168,72 +1130,50 @@ class TaggerWindow(QtWidgets.QMainWindow):
issue_count = utils.xlate_int(self.leIssueCount.text())
self.selector = SeriesSelectionWindow(
selector = SeriesSelectionWindow(
self,
self.config[0],
self.current_talker(),
series_name,
issue_number,
self.comic_archive,
year,
issue_count,
self.comic_archive,
self.config[0],
self.current_talker(),
autoselect,
literal,
)
self.selector.ratelimit.connect(self.on_ratelimit)
self.selector.setWindowTitle(f"Search: '{series_name}' - Select Series")
self.selector.finished.connect(self.finish_query)
selector.setWindowTitle(f"Search: '{series_name}' - Select Series")
self.selector.perform_query()
selector.setModal(True)
selector.exec()
def finish_query(self, result: list[GenericMetadata]) -> None:
if not (result and self.selector):
return
if selector.result():
# we should now have a series ID
QtWidgets.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.CursorShape.WaitCursor))
self.querythread = QueryThread(
self.current_talker(),
self.selector.issue_id,
self.selector.series_id,
self.selector.issue_number,
self.query_finished,
self.ratelimit,
)
self.querythread.start()
# copy the form onto metadata object
self.form_to_metadata()
def apply_query_metadata(self, new_metadata: GenericMetadata, issue_number: str) -> None:
# we should now have a series ID
# QtWidgets.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.CursorShape.WaitCursor))
QtWidgets.QApplication.restoreOverrideCursor()
try:
new_metadata = self.current_talker().fetch_comic_data(
issue_id=selector.issue_id, series_id=selector.series_id, issue_number=selector.issue_number
)
except TalkerError as e:
QtWidgets.QApplication.restoreOverrideCursor()
QtWidgets.QMessageBox.critical(self, f"{e.source} {e.code_name} Error", f"{e}")
return
QtWidgets.QApplication.restoreOverrideCursor()
# copy the form onto metadata object
self.form_to_metadata()
if new_metadata is None or new_metadata.is_empty:
QtWidgets.QMessageBox.critical(
self, "Search", f"Could not find an issue {selector.issue_number} for that series"
)
return
if new_metadata is None or new_metadata.is_empty:
QtWidgets.QMessageBox.critical(None, "Search", f"Could not find an issue {new_metadata} for that series")
return
self.metadata = prepare_metadata(self.metadata, new_metadata, self.config[0])
# Now push the new combined data into the edit controls
self.metadata_to_form()
def on_ratelimit(self, full_time: float, sleep_time: float) -> None:
self.toast = Toast(QtWidgets.QApplication.activeWindow())
if qtutils.is_dark_mode():
self.toast.applyPreset(ToastPreset.WARNING_DARK)
else:
self.toast.applyPreset(ToastPreset.WARNING)
# Convert to milliseconds, add 200ms because python is slow
self.toast.setDuration(abs(int(sleep_time * 1000) + 200))
self.toast.setResetDurationOnHover(False)
self.toast.setFadeOutDuration(50)
self.toast.setTitle("Rate Limit Hit!")
self.toast.setText(
f"Rate limit reached: {full_time:.0f}s until next request. Waiting {sleep_time:.0f}s for ratelimit"
)
self.toast.setPositionRelativeToWidget(self)
self.toast.show()
self.metadata = prepare_metadata(self.metadata, new_metadata, self.config[0])
# Now push the new combined data into the edit controls
self.metadata_to_form()
def write_tags(self) -> None:
if self.metadata is not None and self.comic_archive is not None:
@ -1256,8 +1196,9 @@ class TaggerWindow(QtWidgets.QMainWindow):
failed_tag: str = ""
# Save each tag
for tag_id in self.selected_write_tags:
success = self.comic_archive.write_tags(self.metadata, tag_id)
if not success:
try:
self.comic_archive.write_tags(self.metadata, tag_id)
except Exception:
failed_tag = tags[tag_id].name()
break
@ -1276,14 +1217,14 @@ class TaggerWindow(QtWidgets.QMainWindow):
self.update_menus()
# Only try to read if write was successful
self.metadata, _, error = self.read_selected_tags(self.selected_read_tags, self.comic_archive)
self.metadata, error = self.read_selected_tags(self.selected_read_tags, self.comic_archive)
if error is not None:
QtWidgets.QMessageBox.warning(
self,
"Read Failed!",
f"One or more of the selected read tags failed to load for {self.comic_archive.path}, check log for details",
)
logger.error("Failed to load metadata for %s: %s", self.ca.path, error)
logger.error("Failed to load metadata for %s: %s", self.comic_archive.path, error)
self.fileSelectionList.update_current_row()
self.update_ui_for_archive()
@ -1465,6 +1406,7 @@ class TaggerWindow(QtWidgets.QMainWindow):
def show_settings(self) -> None:
settingswin = SettingsWindow(self, self.config, self.talkers)
settingswin.setModal(True)
settingswin.exec()
settingswin.result()
self.adjust_source_combo()
@ -1668,7 +1610,7 @@ class TaggerWindow(QtWidgets.QMainWindow):
progdialog.setMinimumDuration(300)
center_window_on_parent(progdialog)
failed_list = []
failed_list: list[Exception] = []
success_count = 0
for prog_idx, ca in enumerate(ca_list, 1):
if prog_idx % 10 == 0:
@ -1679,10 +1621,13 @@ class TaggerWindow(QtWidgets.QMainWindow):
progdialog.setLabelText(str(ca.path))
for tag_id in tag_ids:
if ca.has_tags(tag_id) and ca.is_writable():
if ca.remove_tags(tag_id):
try:
ca.remove_tags(tag_id)
success_count += 1
else:
failed_list.append(ca.path)
except Exception as e:
failed_list.append(
OSError(f"Failed to remove {tags[tag_id].name()} from {ca.path}: {e}")
)
# Abandon any further tag removals to prevent any greater damage to archive
break
ca.reset_cache()
@ -1697,8 +1642,8 @@ class TaggerWindow(QtWidgets.QMainWindow):
summary = f"Successfully removed {success_count} tags in archive(s)."
if failed_list:
summary += f"\n\nThe remove operation failed in the following {len(failed_list)} archive(s):\n"
for f in failed_list:
summary += f"\t{f}\n"
for ex in failed_list:
summary += f"\t{ex}\n"
dlg = LogWindow(self)
dlg.set_text(summary)
@ -1743,13 +1688,13 @@ class TaggerWindow(QtWidgets.QMainWindow):
return
if has_src_count != 0:
src_tags = ", ".join([tags[tag_id].name() for tag_id in src_tag_ids])
dst_tags = ", ".join([tags[tag_id].name() for tag_id in dest_tag_ids])
reply = QtWidgets.QMessageBox.question(
self,
"Copy Tags",
f"Are you sure you wish to copy the combined (with overlay order) tags of "
f"{', '.join([tags[tag_id].name() for tag_id in src_tag_ids])} "
f"to {', '.join([tags[tag_id].name() for tag_id in dest_tag_ids])} tags in "
f"{has_src_count} archive(s)?",
f"{src_tags} to {dst_tags} tags in {has_src_count} archive(s)?",
QtWidgets.QMessageBox.StandardButton.Yes,
QtWidgets.QMessageBox.StandardButton.No,
)
@ -1762,15 +1707,15 @@ class TaggerWindow(QtWidgets.QMainWindow):
center_window_on_parent(prog_dialog)
QtCore.QCoreApplication.processEvents()
failed_list = []
failed_list: list[Exception] = []
success_count = 0
for prog_idx, ca in enumerate(ca_list, 1):
if prog_idx % 10 == 0:
QtCore.QCoreApplication.processEvents()
ca_saved = False
md, _, error = self.read_selected_tags(src_tag_ids, ca)
md, error = self.read_selected_tags(src_tag_ids, ca)
if error is not None:
failed_list.append(ca.path)
failed_list.append(error)
continue
if md.is_empty:
continue
@ -1787,12 +1732,15 @@ class TaggerWindow(QtWidgets.QMainWindow):
if tag_id == "cbi" and self.config[0].Metadata_Options__apply_transform_on_bulk_operation:
md = CBLTransformer(md, self.config[0]).apply()
if ca.write_tags(md, tag_id):
try:
ca.write_tags(md, tag_id)
if not ca_saved:
success_count += 1
ca_saved = True
else:
failed_list.append(ca.path)
except Exception as e:
failed_list.append(
OSError(f"Failed to copy {src_tags} to {dst_tags} tags for {ca.path}: {e}")
)
ca.reset_cache()
ca.load_cache({*self.selected_read_tags, *self.selected_write_tags})
@ -1806,8 +1754,8 @@ class TaggerWindow(QtWidgets.QMainWindow):
summary = f"Successfully copied tags in {success_count} archive(s)."
if failed_list:
summary += f"\n\nThe copy operation failed in the following {len(failed_list)} archive(s):\n"
for f in failed_list:
summary += f"\t{f}\n"
for ex in failed_list:
summary += f"\t{ex}\n"
dlg = LogWindow(self)
dlg.set_text(summary)
@ -1820,6 +1768,195 @@ class TaggerWindow(QtWidgets.QMainWindow):
self.atprogdialog.textEdit.ensureCursorVisible()
QtCore.QCoreApplication.processEvents()
def identify_and_tag_single_archive(
self, ca: ComicArchive, match_results: OnlineMatchResults, dlg: AutoTagStartWindow
) -> tuple[bool, OnlineMatchResults]:
success = False
ii = IssueIdentifier(ca, self.config[0], self.current_talker())
# read in tags, and parse file name if not there
md, error = self.read_selected_tags(self.selected_read_tags, ca)
if error is not None:
QtWidgets.QMessageBox.warning(
self,
"Aborting...",
f"One or more of the read tags failed to load for {ca.path}. Aborting to prevent any possible further damage. Check log for details.",
)
logger.error("Failed to load tags from %s: %s", ca.path, error)
return False, match_results
if md.is_empty:
md = ca.metadata_from_filename(
self.config[0].Filename_Parsing__filename_parser,
self.config[0].Filename_Parsing__remove_c2c,
self.config[0].Filename_Parsing__remove_fcbd,
self.config[0].Filename_Parsing__remove_publisher,
dlg.split_words,
self.config[0].Filename_Parsing__allow_issue_start_with_letter,
self.config[0].Filename_Parsing__protofolius_issue_number_scheme,
)
if dlg.ignore_leading_digits_in_filename and md.series is not None:
# remove all leading numbers
md.series = re.sub(r"(^[\d.]*)(.*)", r"\2", md.series)
# use the dialog specified search string
if dlg.search_string:
md.series = dlg.search_string
if md is None or md.is_empty:
logger.error("No metadata given to search online with!")
return False, match_results
if dlg.dont_use_year:
md.year = None
if md.issue is None or md.issue == "":
if dlg.assume_issue_one:
md.issue = "1"
else:
md.issue = utils.xlate(md.volume)
ii.set_output_function(self.auto_tag_log)
if self.atprogdialog is not None:
ii.set_cover_url_callback(self.atprogdialog.set_test_image)
ii.series_match_thresh = dlg.name_length_match_tolerance
result, matches = ii.identify(ca, md)
found_match = False
choices = False
low_confidence = False
if result == ii.result_no_matches:
pass
elif result == ii.result_found_match_but_bad_cover_score:
low_confidence = True
found_match = True
elif result == ii.result_found_match_but_not_first_page:
found_match = True
elif result == ii.result_multiple_matches_with_bad_image_scores:
low_confidence = True
choices = True
elif result == ii.result_one_good_match:
found_match = True
elif result == ii.result_multiple_good_matches:
choices = True
if choices:
if low_confidence:
self.auto_tag_log("Online search: Multiple low-confidence matches. Save aborted\n")
match_results.low_confidence_matches.append(
Result(
Action.save,
Status.match_failure,
ca.path,
online_results=matches,
match_status=MatchStatus.low_confidence_match,
)
)
else:
self.auto_tag_log("Online search: Multiple matches. Save aborted\n")
match_results.multiple_matches.append(
Result(
Action.save,
Status.match_failure,
ca.path,
online_results=matches,
match_status=MatchStatus.multiple_match,
)
)
elif low_confidence and not dlg.auto_save_on_low:
self.auto_tag_log("Online search: Low confidence match. Save aborted\n")
match_results.low_confidence_matches.append(
Result(
Action.save,
Status.match_failure,
ca.path,
online_results=matches,
match_status=MatchStatus.low_confidence_match,
)
)
elif not found_match:
self.auto_tag_log("Online search: No match found. Save aborted\n")
match_results.no_matches.append(
Result(
Action.save,
Status.match_failure,
ca.path,
online_results=matches,
match_status=MatchStatus.no_match,
)
)
else:
# a single match!
if low_confidence:
self.auto_tag_log("Online search: Low confidence match, but saving anyways, as indicated...\n")
# now get the particular issue data
QtWidgets.QApplication.setOverrideCursor(QtGui.QCursor(QtCore.Qt.CursorShape.WaitCursor))
try:
ct_md = self.current_talker().fetch_comic_data(matches[0].issue_id)
except TalkerError:
logger.exception("Save aborted.")
return False, match_results
QtWidgets.QApplication.restoreOverrideCursor()
if ct_md is None or ct_md.is_empty:
match_results.fetch_data_failures.append(
Result(
Action.save,
Status.fetch_data_failure,
ca.path,
online_results=matches,
match_status=MatchStatus.good_match,
)
)
if ct_md is not None:
temp_opts = cast(ct_ns, settngs.get_namespace(self.config, True, True, True, False)[0])
temp_opts.Auto_Tag__clear_tags = dlg.cbxClearMetadata.isChecked()
md = prepare_metadata(md, ct_md, temp_opts)
res = Result(
Action.save,
status=Status.success,
original_path=ca.path,
online_results=matches,
match_status=MatchStatus.good_match,
md=md,
tags_written=self.selected_write_tags,
)
def write_Tags() -> bool:
for tag_id in self.selected_write_tags:
# write out the new data
try:
ca.write_tags(md, tag_id)
except Exception as e:
self.auto_tag_log(
f"{tags[tag_id].name()} save failed! {e}\nAborting any additional tag saves.\n"
)
return False
return True
# Save tags
if write_Tags():
match_results.good_matches.append(res)
success = True
self.auto_tag_log("Save complete!\n")
else:
res.status = Status.write_failure
match_results.write_failures.append(res)
ca.reset_cache()
ca.load_cache({*self.selected_read_tags, *self.selected_write_tags})
return success, match_results
def auto_tag(self) -> None:
ca_list = self.fileSelectionList.get_selected_archive_list()
tag_names = ", ".join([tags[tag_id].name() for tag_id in self.selected_write_tags])
@ -1848,40 +1985,47 @@ class TaggerWindow(QtWidgets.QMainWindow):
return
self.atprogdialog = AutoTagProgressWindow(self, self.current_talker())
self.atprogdialog.open()
self.atprogdialog.setModal(True)
self.atprogdialog.show()
self.atprogdialog.progressBar.setMaximum(len(ca_list))
self.atprogdialog.setWindowTitle("Auto-Tagging")
center_window_on_parent(self.atprogdialog)
temp_opts = cast(ct_ns, settngs.get_namespace(self.config, True, True, True, False)[0])
temp_opts.Auto_Tag__clear_tags = atstartdlg.cbxClearMetadata.isChecked()
temp_opts.Issue_Identifier__series_match_identify_thresh = atstartdlg.name_length_match_tolerance
temp_opts.Auto_Tag__ignore_leading_numbers_in_filename = atstartdlg.ignore_leading_digits_in_filename
temp_opts.Auto_Tag__use_year_when_identifying = not atstartdlg.dont_use_year
temp_opts.Auto_Tag__assume_issue_one = atstartdlg.assume_issue_one
temp_opts.internal__remove_archive_after_successful_match = atstartdlg.remove_after_success
temp_opts.Runtime_Options__tags_read = self.selected_read_tags
temp_opts.Runtime_Options__tags_write = self.selected_write_tags
self.autotagthread = AutoTagThread(atstartdlg.search_string, ca_list, self.config[0], self.current_talker())
self.autotagthread.autoTagComplete.connect(self.auto_tag_finished)
self.autotagthread.autoTagLogMsg.connect(self.auto_tag_log)
self.autotagthread.autoTagProgress.connect(self.atprogdialog.on_progress)
self.autotagthread.ratelimit.connect(self.ratelimit)
self.atprogdialog.rejected.connect(self.autotagthread.cancel)
self.auto_tag_log("==========================================================================\n")
self.auto_tag_log(f"Auto-Tagging Started for {len(ca_list)} items\n")
self.autotagthread.start()
def auto_tag_finished(self, match_results: OnlineMatchResults, archives_to_remove: list[ComicArchive]) -> None:
tag_names = ", ".join([tags[tag_id].name() for tag_id in self.selected_write_tags])
if self.atprogdialog:
self.atprogdialog.close()
match_results = OnlineMatchResults()
archives_to_remove = []
for prog_idx, ca in enumerate(ca_list):
self.auto_tag_log("==========================================================================\n")
self.auto_tag_log(f"Auto-Tagging {prog_idx} of {len(ca_list)}\n")
self.auto_tag_log(f"{ca.path}\n")
QtCore.QCoreApplication.processEvents()
try:
cover_idx = ca.read_tags(self.selected_read_tags[0]).get_cover_page_index_list()[0]
except Exception as e:
cover_idx = 0
logger.error("Failed to load metadata for %s: %s", ca.path, e)
image_data = ca.get_page(cover_idx)
self.atprogdialog.set_archive_image(image_data)
self.atprogdialog.set_test_image(b"")
self.fileSelectionList.remove_archive_list(archives_to_remove)
if self.atprogdialog.isdone:
break
self.atprogdialog.progressBar.setValue(prog_idx)
self.atprogdialog.label.setText(str(ca.path))
if ca.is_writable():
success, match_results = self.identify_and_tag_single_archive(ca, match_results, atstartdlg)
if success and atstartdlg.remove_after_success:
archives_to_remove.append(ca)
self.atprogdialog.close()
if atstartdlg.remove_after_success:
self.fileSelectionList.remove_archive_list(archives_to_remove)
self.fileSelectionList.update_selected_rows()
new_ca = self.fileSelectionList.get_current_archive()
@ -1927,9 +2071,11 @@ class TaggerWindow(QtWidgets.QMainWindow):
self,
match_results.multiple_matches,
self.selected_write_tags,
lambda match: self.current_talker().fetch_comic_data(match.issue_id),
self.config[0],
self.current_talker(),
)
matchdlg.setModal(True)
matchdlg.exec()
self.fileSelectionList.update_selected_rows()
new_ca = self.fileSelectionList.get_current_archive()
@ -2057,6 +2203,7 @@ class TaggerWindow(QtWidgets.QMainWindow):
"File Rename", "If you rename files now, unsaved data in the form will be lost. Are you sure?"
):
dlg = RenameWindow(self, ca_list, self.selected_read_tags, self.config, self.talkers)
dlg.setModal(True)
if dlg.exec() and self.comic_archive is not None:
self.fileSelectionList.update_selected_rows()
self.load_archive(self.comic_archive)
@ -2075,19 +2222,28 @@ class TaggerWindow(QtWidgets.QMainWindow):
self.config[0].internal__last_opened_folder = os.path.abspath(os.path.split(comic_archive.path)[0])
self.comic_archive = comic_archive
self.metadata, _, error = self.read_selected_tags(self.selected_read_tags, self.comic_archive)
self.metadata, error = self.read_selected_tags(self.selected_read_tags, self.comic_archive)
if error is not None:
logger.error("Failed to load tags from %s: %s", self.comic_archive.path, error)
self.exception(f"Failed to load tags from {self.comic_archive.path}, see log for details\n\n")
self.update_ui_for_archive()
def read_selected_tags(
self, tag_ids: list[str], ca: ComicArchive
) -> tuple[GenericMetadata, list[str], Exception | None]:
return read_selected_tags(
tag_ids, ca, self.config[0].Metadata_Options__tag_merge, self.config[0].Metadata_Options__tag_merge_lists
)
def read_selected_tags(self, tag_ids: list[str], ca: ComicArchive) -> tuple[GenericMetadata, Exception | None]:
md = GenericMetadata()
error = None
try:
for tag_id in tag_ids:
metadata = ca.read_tags(tag_id)
md.overlay(
metadata,
mode=self.config[0].Metadata_Options__tag_merge,
merge_lists=self.config[0].Metadata_Options__tag_merge_lists,
)
except Exception as e:
error = e
return md, error
def file_list_cleared(self) -> None:
self.reset_app()

View File

@ -49,15 +49,6 @@
<verstretch>7</verstretch>
</sizepolicy>
</property>
<property name="styleSheet">
<string notr="true">QTableWidget[rowCount=&quot;0&quot;] {
background-image: url(&quot;:/graphics/about.png&quot;);
background-attachment: fixed;
background-position: top center;
background-repeat: no-repeat;
background-color: white;
}</string>
</property>
<property name="selectionMode">
<enum>QAbstractItemView::SingleSelection</enum>
</property>
@ -162,7 +153,7 @@
</widget>
</item>
<item>
<widget class="QWidget" name="imageSourceLogo" native="true">
<widget class="QWidget" name="imageIssuesSourceLogo" native="true">
<property name="minimumSize">
<size>
<width>300</width>

Binary file not shown.

View File

@ -1,21 +0,0 @@
MIT License
Copyright (c) 2024 Niklas Henning
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -1,231 +0,0 @@
# PyQt Toast
[![PyPI](https://img.shields.io/badge/pypi-v1.3.2-blue)](https://pypi.org/project/pyqt-toast-notification/)
[![Python](https://img.shields.io/badge/python-3.7+-blue)](https://github.com/niklashenning/pyqttoast)
[![Build](https://img.shields.io/badge/build-passing-neon)](https://github.com/niklashenning/pyqttoast)
[![Coverage](https://img.shields.io/badge/coverage-95%25-green)](https://github.com/niklashenning/pyqttoast)
[![License](https://img.shields.io/badge/license-MIT-green)](https://github.com/niklashenning/pyqttoast/blob/master/LICENSE)
A fully customizable and modern toast notification library for PyQt and PySide
![pyqttoast](https://github.com/niklashenning/pyqt-toast/assets/58544929/c104f10e-08df-4665-98d8-3785822a20dc)
## Features
* Supports showing multiple toasts at the same time
* Supports queueing of toasts
* Supports 7 different positions
* Supports multiple screens
* Supports positioning relative to widgets
* Modern and fully customizable UI
* Works with `PyQt5`, `PyQt6`, `PySide2`, and `PySide6`
## Installation
```
pip install pyqt-toast-notification
```
## Usage
Import the `Toast` class, instantiate it, and show the toast notification with the `show()` method:
```python
from PyQt6.QtWidgets import QMainWindow, QPushButton
from pyqttoast import Toast, ToastPreset
class Window(QMainWindow):
def __init__(self):
super().__init__(parent=None)
# Add button and connect click event
self.button = QPushButton(self)
self.button.setText('Show toast')
self.button.clicked.connect(self.show_toast)
# Shows a toast notification every time the button is clicked
def show_toast(self):
toast = Toast(self)
toast.setDuration(5000) # Hide after 5 seconds
toast.setTitle('Success! Confirmation email sent.')
toast.setText('Check your email to complete signup.')
toast.applyPreset(ToastPreset.SUCCESS) # Apply style preset
toast.show()
```
> **IMPORTANT:** <br>An instance of `Toast` can only be shown **once**. If you want to show another one, even if the content is exactly the same, you have to create another instance.
## Customization
* **Setting the position of the toasts (<u>static</u>):**
```python
Toast.setPosition(ToastPosition.BOTTOM_MIDDLE) # Default: ToastPosition.BOTTOM_RIGHT
```
> **AVAILABLE POSITIONS:** <br> `BOTTOM_LEFT`, `BOTTOM_MIDDLE`, `BOTTOM_RIGHT`, `TOP_LEFT`, `TOP_MIDDLE`, `TOP_RIGHT`, `CENTER`
* **Setting whether the toasts should always be shown on the main screen (<u>static</u>):**
```python
Toast.setAlwaysOnMainScreen(True) # Default: False
```
* **Positioning the toasts relative to a widget instead of a screen (<u>static</u>):**
```python
Toast.setPositionRelativeToWidget(some_widget) # Default: None
```
* **Setting a limit on how many toasts can be shown at the same time (<u>static</u>):**
```python
Toast.setMaximumOnScreen(5) # Default: 3
```
> If you try to show more toasts than the maximum amount on screen, they will get added to a queue and get shown as soon as one of the currently showing toasts is closed.
* **Setting the vertical spacing between the toasts (<u>static</u>):**
```python
Toast.setSpacing(20) # Default: 10
```
* **Setting the x and y offset of the toast position (<u>static</u>):**
```python
Toast.setOffset(30, 55) # Default: 20, 45
```
* **Making the toast show forever until it is closed:**
```python
toast.setDuration(0) # Default: 5000
```
* **Enabling or disabling the duration bar:**
```python
toast.setShowDurationBar(False) # Default: True
```
* **Adding an icon:**
```python
toast.setIcon(ToastIcon.SUCCESS) # Default: ToastIcon.INFORMATION
toast.setShowIcon(True) # Default: False
# Or setting a custom icon:
toast.setIcon(QPixmap('path/to/your/icon.png'))
# If you want to show the icon without recoloring it, set the icon color to None:
toast.setIconColor(None) # Default: #5C5C5C
```
> **AVAILABLE ICONS:** <br> `SUCCESS`, `WARNING`, `ERROR`, `INFORMATION`, `CLOSE`
* **Setting the icon size:**
```python
toast.setIconSize(QSize(14, 14)) # Default: QSize(18, 18)
```
* **Enabling or disabling the icon separator:**
```python
toast.setShowIconSeparator(False) # Default: True
```
* **Setting the close button alignment:**
```python
toast.setCloseButtonAlignment(ToastButtonAlignment.MIDDLE) # Default: ToastButtonAlignment.TOP
```
> **AVAILABLE ALIGNMENTS:** <br> `TOP`, `MIDDLE`, `BOTTOM`
* **Enabling or disabling the close button:**
```python
toast.setShowCloseButton(False) # Default: True
```
* **Customizing the duration of the fade animations (milliseconds):**
```python
toast.setFadeInDuration(100) # Default: 250
toast.setFadeOutDuration(150) # Default: 250
```
* **Enabling or disabling duration reset on hover:**
```python
toast.setResetDurationOnHover(False) # Default: True
```
* **Making the corners rounded:**
```python
toast.setBorderRadius(3) # Default: 0
```
* **Setting custom colors:**
```python
toast.setBackgroundColor(QColor('#292929')) # Default: #E7F4F9
toast.setTitleColor(QColor('#FFFFFF')) # Default: #000000
toast.setTextColor(QColor('#D0D0D0')) # Default: #5C5C5C
toast.setDurationBarColor(QColor('#3E9141')) # Default: #5C5C5C
toast.setIconColor(QColor('#3E9141')) # Default: #5C5C5C
toast.setIconSeparatorColor(QColor('#585858')) # Default: #D9D9D9
toast.setCloseButtonIconColor(QColor('#C9C9C9')) # Default: #000000
```
* **Setting custom fonts:**
```python
# Init font
font = QFont('Times', 10, QFont.Weight.Bold)
# Set fonts
toast.setTitleFont(font) # Default: QFont('Arial', 9, QFont.Weight.Bold)
toast.setTextFont(font) # Default: QFont('Arial', 9)
```
* **Applying a style preset:**
```python
toast.applyPreset(ToastPreset.ERROR)
```
> **AVAILABLE PRESETS:** <br> `SUCCESS`, `WARNING`, `ERROR`, `INFORMATION`, `SUCCESS_DARK`, `WARNING_DARK`, `ERROR_DARK`, `INFORMATION_DARK`
* **Setting toast size constraints:**
```python
# Minimum and maximum size
toast.setMinimumWidth(100)
toast.setMaximumWidth(350)
toast.setMinimumHeight(50)
toast.setMaximumHeight(120)
# Fixed size (not recommended)
toast.setFixedSize(QSize(350, 80))
```
**<br>Other customization options:**
| Option | Description | Default |
|-------------------------------|---------------------------------------------------------------------------------|----------------------------|
| `setFixedScreen()` | Fixed screen where the toasts will be shown (static) | `None` |
| `setMovePositionWithWidget()` | Whether the toasts should move with widget if positioned relative to a widget | `True` |
| `setIconSeparatorWidth()` | Width of the icon separator that separates the icon and text section | `2` |
| `setCloseButtonIcon()` | Icon of the close button | `ToastIcon.CLOSE` |
| `setCloseButtonIconSize()` | Size of the close button icon | `QSize(10, 10)` |
| `setCloseButtonSize()` | Size of the close button | `QSize(24, 24)` |
| `setStayOnTop()` | Whether the toast stays on top of other windows even when they are focused | `True` |
| `setTextSectionSpacing()` | Vertical spacing between the title and the text | `8` |
| `setMargins()` | Margins around the whole toast content | `QMargins(20, 18, 10, 18)` |
| `setIconMargins()` | Margins around the icon | `QMargins(0, 0, 15, 0)` |
| `setIconSectionMargins()` | Margins around the icon section (the area with the icon and the icon separator) | `QMargins(0, 0, 15, 0)` |
| `setTextSectionMargins()` | Margins around the text section (the area with the title and the text) | `QMargins(0, 0, 15, 0)` |
| `setCloseButtonMargins()` | Margins around the close button | `QMargins(0, -8, 0, -8)` |
## Demo
https://github.com/niklashenning/pyqt-toast/assets/58544929/f4d7f4a4-6d69-4087-ae19-da54b6da499d
The demos for PyQt5, PyQt6, and PySide6 can be found in the [demo](demo) folder.
## Tests
Installing the required test dependencies [PyQt6](https://pypi.org/project/PyQt6/), [pytest](https://github.com/pytest-dev/pytest), and [coveragepy](https://github.com/nedbat/coveragepy):
```
pip install PyQt6 pytest coverage
```
To run the tests with coverage, clone this repository, go into the main directory and run:
```
coverage run -m pytest
coverage report --ignore-errors -m
```
## License
This software is licensed under the [MIT license](https://github.com/niklashenning/pyqttoast/blob/master/LICENSE).

View File

@ -1,11 +0,0 @@
from __future__ import annotations
from .toast import Toast, ToastButtonAlignment, ToastIcon, ToastPosition, ToastPreset
__all__ = [
"Toast",
"ToastButtonAlignment",
"ToastIcon",
"ToastPosition",
"ToastPreset",
]

View File

@ -1,41 +0,0 @@
from __future__ import annotations
from PyQt6.QtGui import QColor
UPDATE_POSITION_DURATION = 200
DROP_SHADOW_SIZE = 5
SUCCESS_ACCENT_COLOR = QColor("#3E9141")
WARNING_ACCENT_COLOR = QColor("#E8B849")
ERROR_ACCENT_COLOR = QColor("#BA2626")
INFORMATION_ACCENT_COLOR = QColor("#007FFF")
DEFAULT_ACCENT_COLOR = QColor("#5C5C5C")
DEFAULT_BACKGROUND_COLOR = QColor("#E7F4F9")
DEFAULT_TITLE_COLOR = QColor("#000000")
DEFAULT_TEXT_COLOR = QColor("#5C5C5C")
DEFAULT_ICON_SEPARATOR_COLOR = QColor("#D9D9D9")
DEFAULT_CLOSE_BUTTON_ICON_COLOR = QColor("#000000")
DEFAULT_BACKGROUND_COLOR_DARK = QColor("#292929")
DEFAULT_TITLE_COLOR_DARK = QColor("#FFFFFF")
DEFAULT_TEXT_COLOR_DARK = QColor("#D0D0D0")
DEFAULT_ICON_SEPARATOR_COLOR_DARK = QColor("#585858")
DEFAULT_CLOSE_BUTTON_ICON_COLOR_DARK = QColor("#C9C9C9")
__all__ = [
"UPDATE_POSITION_DURATION",
"DROP_SHADOW_SIZE",
"SUCCESS_ACCENT_COLOR",
"WARNING_ACCENT_COLOR",
"ERROR_ACCENT_COLOR",
"INFORMATION_ACCENT_COLOR",
"DEFAULT_ACCENT_COLOR",
"DEFAULT_BACKGROUND_COLOR",
"DEFAULT_TITLE_COLOR",
"DEFAULT_TEXT_COLOR",
"DEFAULT_ICON_SEPARATOR_COLOR",
"DEFAULT_CLOSE_BUTTON_ICON_COLOR",
"DEFAULT_BACKGROUND_COLOR_DARK",
"DEFAULT_TITLE_COLOR_DARK",
"DEFAULT_TEXT_COLOR_DARK",
"DEFAULT_ICON_SEPARATOR_COLOR_DARK",
"DEFAULT_CLOSE_BUTTON_ICON_COLOR_DARK",
]

View File

@ -1,5 +0,0 @@
from __future__ import annotations
import importlib.resources
css_path = importlib.resources.files(__package__)

View File

@ -1,24 +0,0 @@
#drop-shadow-layer-1 {
background: rgba(0, 0, 0, 3);
border-radius: 8px;
}
#drop-shadow-layer-2 {
background: rgba(0, 0, 0, 5);
border-radius: 8px;
}
#drop-shadow-layer-3 {
background: rgba(0, 0, 0, 6);
border-radius: 8px;
}
#drop-shadow-layer-4 {
background: rgba(0, 0, 0, 9);
border-radius: 8px;
}
#drop-shadow-layer-5 {
background: rgba(0, 0, 0, 10);
border-radius: 8px;
}

View File

@ -1,7 +0,0 @@
#toast-close-button {
background: transparent;
}
#toast-icon-widget {
background: transparent;
}

View File

@ -1,57 +0,0 @@
from __future__ import annotations
from PyQt6.QtCore import QSize
from PyQt6.QtWidgets import QWidget
from .css import css_path
class DropShadow(QWidget):
def __init__(self, parent: QWidget | None = None) -> None:
"""Create a new DropShadow instance
:param parent: the parent widget
"""
super().__init__(parent)
# Drawn manually since only one graphics effect can be applied
self.layer_1 = QWidget(self)
self.layer_1.setObjectName("drop-shadow-layer-1")
self.layer_2 = QWidget(self)
self.layer_2.setObjectName("drop-shadow-layer-2")
self.layer_3 = QWidget(self)
self.layer_3.setObjectName("drop-shadow-layer-3")
self.layer_4 = QWidget(self)
self.layer_4.setObjectName("drop-shadow-layer-4")
self.layer_5 = QWidget(self)
self.layer_5.setObjectName("drop-shadow-layer-5")
# Apply stylesheet
self.setStyleSheet((css_path / "drop_shadow.css").read_text(encoding="utf-8"))
def resize(self, size: QSize) -> None:
"""Resize the drop shadow widget
:param size: new size
"""
super().resize(size)
width = size.width()
height = size.height()
self.layer_1.resize(width, height)
self.layer_1.move(0, 0)
self.layer_2.resize(width - 2, height - 2)
self.layer_2.move(1, 1)
self.layer_3.resize(width - 4, height - 4)
self.layer_3.move(2, 2)
self.layer_4.resize(width - 6, height - 6)
self.layer_4.move(3, 3)
self.layer_5.resize(width - 8, height - 8)
self.layer_5.move(4, 4)

View File

@ -1,7 +0,0 @@
from __future__ import annotations
import os
def get_hook_dirs() -> list[str]:
return [os.path.dirname(__file__)]

View File

@ -1,5 +0,0 @@
from __future__ import annotations
from PyInstaller.utils.hooks import collect_data_files
datas = collect_data_files("pyqttoast", excludes=["hooks"])

View File

@ -1,55 +0,0 @@
from __future__ import annotations
from PyQt6.QtGui import QColor, QImage, QPixmap, qRgba
from .icons import icon_path
from .toast_enums import ToastIcon
class IconUtils:
@staticmethod
def get_icon_from_enum(enum_icon: ToastIcon) -> QPixmap:
"""Get a QPixmap from a ToastIcon
:param enum_icon: ToastIcon
:return: pixmap of the ToastIcon
"""
image = QPixmap()
if enum_icon == ToastIcon.SUCCESS:
image.loadFromData((icon_path / "success.png").read_bytes())
elif enum_icon == ToastIcon.WARNING:
image.loadFromData((icon_path / "warning.png").read_bytes())
elif enum_icon == ToastIcon.ERROR:
image.loadFromData((icon_path / "error.png").read_bytes())
elif enum_icon == ToastIcon.INFORMATION:
image.loadFromData((icon_path / "information.png").read_bytes())
elif enum_icon == ToastIcon.CLOSE:
image.loadFromData((icon_path / "close.png").read_bytes())
return image
@staticmethod
def recolor_image(image: QImage, color: QColor | None) -> QImage:
"""Take an image and return a copy with the colors changed
:param image: image to recolor
:param color: new color (None if the image should not be recolored)
:return: recolored image
"""
# Leave image as is if color is None
if color is None:
return image
# Loop through every pixel
for x in range(0, image.width()):
for y in range(0, image.height()):
# Get current color of the pixel
current_color = image.pixelColor(x, y)
# Replace the rgb values with rgb of new color and keep alpha the same
new_color_r = color.red()
new_color_g = color.green()
new_color_b = color.blue()
new_color = QColor.fromRgba(qRgba(new_color_r, new_color_g, new_color_b, current_color.alpha()))
image.setPixelColor(x, y, new_color)
return image

View File

@ -1,5 +0,0 @@
from __future__ import annotations
import importlib.resources
icon_path = importlib.resources.files(__package__)

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.6 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.8 KiB

File diff suppressed because it is too large Load Diff

View File

@ -1,38 +0,0 @@
from __future__ import annotations
from enum import Enum
class ToastPreset(Enum):
SUCCESS = 1
WARNING = 2
ERROR = 3
INFORMATION = 4
SUCCESS_DARK = 5
WARNING_DARK = 6
ERROR_DARK = 7
INFORMATION_DARK = 8
class ToastIcon(Enum):
SUCCESS = 1
WARNING = 2
ERROR = 3
INFORMATION = 4
CLOSE = 5
class ToastPosition(Enum):
BOTTOM_LEFT = 1
BOTTOM_MIDDLE = 2
BOTTOM_RIGHT = 3
TOP_LEFT = 4
TOP_MIDDLE = 5
TOP_RIGHT = 6
CENTER = 7
class ToastButtonAlignment(Enum):
TOP = 1
MIDDLE = 2
BOTTOM = 3

View File

@ -9,7 +9,7 @@ import webbrowser
from collections.abc import Collection, Sequence
from PyQt6.QtCore import QUrl
from PyQt6.QtGui import QGuiApplication, QPalette
from PyQt6.QtGui import QPalette
from PyQt6.QtWidgets import QWidget
logger = logging.getLogger(__name__)
@ -125,9 +125,6 @@ if qt_available:
# And the move call repositions the window
window.move(hpos + main_window_size.left(), vpos + main_window_size.top())
def is_dark_mode() -> bool:
return QGuiApplication.styleHints().colorScheme() == Qt.ColorScheme.Dark
def get_qimage_from_data(image_data: bytes) -> QtGui.QImage:
img = QtGui.QImage()

View File

@ -31,7 +31,7 @@
<number>0</number>
</property>
<item alignment="Qt::AlignTop">
<widget class="QWidget" name="coverImageContainer" native="true">
<widget class="QWidget" name="imageContainer" native="true">
<property name="sizePolicy">
<sizepolicy hsizetype="Preferred" vsizetype="Preferred">
<horstretch>0</horstretch>
@ -66,7 +66,7 @@
</widget>
</item>
<item>
<widget class="QLabel" name="lblIssuesSourceName">
<widget class="QLabel" name="lblSourceName">
<property name="sizePolicy">
<sizepolicy hsizetype="Preferred" vsizetype="Preferred">
<horstretch>0</horstretch>
@ -177,7 +177,7 @@
</property>
</column>
</widget>
<widget class="QTextEdit" name="teDescription">
<widget class="QTextEdit" name="teDetails">
<property name="sizePolicy">
<sizepolicy hsizetype="Expanding" vsizetype="Expanding">
<horstretch>0</horstretch>
@ -223,7 +223,7 @@
</widget>
</item>
<item>
<widget class="QCheckBox" name="cbxPublisherFilter">
<widget class="QCheckBox" name="cbxFilter">
<property name="toolTip">
<string>Filter the publishers based on the publisher filter.</string>
</property>

View File

@ -15,7 +15,7 @@ from __future__ import annotations
import logging
import pathlib
from typing import Any, Callable, NamedTuple
from typing import Any, Callable
import settngs
@ -25,11 +25,6 @@ from comictalker.talker_utils import fix_url
logger = logging.getLogger(__name__)
class RLCallBack(NamedTuple):
callback: Callable[[float, float], None]
interval: float
class TalkerError(Exception):
"""Base class exception for information sources.
@ -175,8 +170,6 @@ class ComicTalker:
refresh_cache: bool = False,
literal: bool = False,
series_match_thresh: int = 90,
*,
on_rate_limit: RLCallBack | None = None,
) -> list[ComicSeries]:
"""
This function should return a list of series that match the given series name
@ -198,12 +191,7 @@ class ComicTalker:
raise NotImplementedError
def fetch_comic_data(
self,
issue_id: str | None = None,
series_id: str | None = None,
issue_number: str = "",
*,
on_rate_limit: RLCallBack | None = None,
self, issue_id: str | None = None, series_id: str | None = None, issue_number: str = ""
) -> GenericMetadata:
"""
This function should return an instance of GenericMetadata for a single issue.
@ -222,34 +210,19 @@ class ComicTalker:
"""
raise NotImplementedError
def fetch_series(
self,
series_id: str,
*,
on_rate_limit: RLCallBack | None = None,
) -> ComicSeries:
def fetch_series(self, series_id: str) -> ComicSeries:
"""
This function should return an instance of ComicSeries from the given series ID.
Caching MUST be implemented on this function.
"""
raise NotImplementedError
def fetch_issues_in_series(
self,
series_id: str,
*,
on_rate_limit: RLCallBack | None = None,
) -> list[GenericMetadata]:
def fetch_issues_in_series(self, series_id: str) -> list[GenericMetadata]:
"""Expected to return a list of issues with a given series ID"""
raise NotImplementedError
def fetch_issues_by_series_issue_num_and_year(
self,
series_id_list: list[str],
issue_number: str,
year: int | None,
*,
on_rate_limit: RLCallBack | None = None,
self, series_id_list: list[str], issue_number: str, year: int | None
) -> list[GenericMetadata]:
"""
This function should return a single issue for each series id in

View File

@ -27,6 +27,7 @@ from typing import Any, Callable, Generic, TypeVar, cast
from urllib.parse import parse_qsl, urlencode, urljoin
import settngs
from pyrate_limiter import Limiter, RequestRate
from typing_extensions import Required, TypedDict
from comicapi import utils
@ -35,8 +36,7 @@ from comicapi.issuestring import IssueString
from comicapi.utils import LocationParseError, StrEnum, parse_url
from comictalker import talker_utils
from comictalker.comiccacher import ComicCacher, Issue, Series
from comictalker.comictalker import ComicTalker, RLCallBack, TalkerDataError, TalkerError, TalkerNetworkError
from comictalker.vendor.pyrate_limiter import Limiter, RequestRate
from comictalker.comictalker import ComicTalker, TalkerDataError, TalkerError, TalkerNetworkError
try:
import niquests as requests
@ -101,7 +101,7 @@ class CVSeries(TypedDict, total=False):
description: str
id: Required[int]
image: CVImage
name: Required[str]
name: str
publisher: CVPublisher
start_year: str
resource_type: str
@ -274,8 +274,6 @@ class ComicVineTalker(ComicTalker):
refresh_cache: bool = False,
literal: bool = False,
series_match_thresh: int = 90,
*,
on_rate_limit: RLCallBack | None = None,
) -> list[ComicSeries]:
# Sanitize the series name for comicvine searching, comicvine search ignore symbols
search_series_name = utils.sanitize_title(series_name, basic=literal)
@ -307,11 +305,7 @@ class ComicVineTalker(ComicTalker):
"limit": 100,
}
cv_response: CVResult[list[CVSeries]] = self._get_cv_content(
urljoin(self.api_url, "search"),
params,
on_rate_limit=on_rate_limit,
)
cv_response: CVResult[list[CVSeries]] = self._get_cv_content(urljoin(self.api_url, "search"), params)
search_results: list[CVSeries] = []
@ -356,11 +350,7 @@ class ComicVineTalker(ComicTalker):
page += 1
params["page"] = page
cv_response = self._get_cv_content(
urljoin(self.api_url, "search"),
params,
on_rate_limit=on_rate_limit,
)
cv_response = self._get_cv_content(urljoin(self.api_url, "search"), params)
search_results.extend(cv_response["results"])
current_result_count += cv_response["number_of_page_results"]
@ -383,56 +373,24 @@ class ComicVineTalker(ComicTalker):
return formatted_search_results
def fetch_comic_data(
self,
issue_id: str | None = None,
series_id: str | None = None,
issue_number: str = "",
on_rate_limit: RLCallBack | None = None,
self, issue_id: str | None = None, series_id: str | None = None, issue_number: str = ""
) -> GenericMetadata:
comic_data = GenericMetadata()
if issue_id:
comic_data = self._fetch_issue_data_by_issue_id(
issue_id,
on_rate_limit=on_rate_limit,
)
comic_data = self._fetch_issue_data_by_issue_id(issue_id)
elif issue_number and series_id:
comic_data = self._fetch_issue_data(
int(series_id),
issue_number,
on_rate_limit=on_rate_limit,
)
comic_data = self._fetch_issue_data(int(series_id), issue_number)
return comic_data
def fetch_series(
self,
series_id: str,
on_rate_limit: RLCallBack | None = None,
) -> ComicSeries:
return self._fetch_series_data(
int(series_id),
on_rate_limit=on_rate_limit,
)[0]
def fetch_series(self, series_id: str) -> ComicSeries:
return self._fetch_series_data(int(series_id))[0]
def fetch_issues_in_series(
self,
series_id: str,
on_rate_limit: RLCallBack | None = None,
) -> list[GenericMetadata]:
return [
x[0]
for x in self._fetch_issues_in_series(
series_id,
on_rate_limit=on_rate_limit,
)
]
def fetch_issues_in_series(self, series_id: str) -> list[GenericMetadata]:
return [x[0] for x in self._fetch_issues_in_series(series_id)]
def fetch_issues_by_series_issue_num_and_year(
self,
series_id_list: list[str],
issue_number: str,
year: str | int | None,
on_rate_limit: RLCallBack | None = None,
self, series_id_list: list[str], issue_number: str, year: str | int | None
) -> list[GenericMetadata]:
logger.debug("Fetching comics by series ids: %s and number: %s", series_id_list, issue_number)
# before we search online, look in our cache, since we might already have this info
@ -443,9 +401,7 @@ class ComicVineTalker(ComicTalker):
series = cvc.get_series_info(series_id, self.id, expire_stale=False)
issues = []
# Explicitly mark count_of_issues at an impossible value
cvseries = CVSeries(id=int(series_id), count_of_issues=-1) # type: ignore[typeddict-item]
# Check if we have the series cached
cvseries = CVSeries(id=int(series_id), count_of_issues=-1)
if series:
cvseries = cast(CVSeries, json.loads(series[0].data))
issues = cvc.get_series_issues_info(series_id, self.id, expire_stale=True)
@ -453,11 +409,10 @@ class ComicVineTalker(ComicTalker):
for issue, _ in issues:
cvissue = cast(CVIssue, json.loads(issue.data))
if cvissue.get("issue_number") == issue_number:
comicseries = self._fetch_series([int(cvissue["volume"]["id"])], on_rate_limit=on_rate_limit)[0][0]
cached_results.append(
self._map_comic_issue_to_metadata(
cvissue,
comicseries,
self._fetch_series([int(cvissue["volume"]["id"])])[0][0],
),
)
issue_found = True
@ -489,11 +444,7 @@ class ComicVineTalker(ComicTalker):
"filter": flt,
}
cv_response: CVResult[list[CVIssue]] = self._get_cv_content(
urljoin(self.api_url, "issues/"),
params,
on_rate_limit=on_rate_limit,
)
cv_response: CVResult[list[CVIssue]] = self._get_cv_content(urljoin(self.api_url, "issues/"), params)
current_result_count = cv_response["number_of_page_results"]
total_result_count = cv_response["number_of_total_results"]
@ -508,11 +459,7 @@ class ComicVineTalker(ComicTalker):
offset += cv_response["number_of_page_results"]
params["offset"] = offset
cv_response = self._get_cv_content(
urljoin(self.api_url, "issues/"),
params,
on_rate_limit=on_rate_limit,
)
cv_response = self._get_cv_content(urljoin(self.api_url, "issues/"), params)
filtered_issues_result.extend(cv_response["results"])
current_result_count += cv_response["number_of_page_results"]
@ -527,13 +474,7 @@ class ComicVineTalker(ComicTalker):
)
formatted_filtered_issues_result = [
self._map_comic_issue_to_metadata(
x,
self._fetch_series_data(
x["volume"]["id"],
on_rate_limit=on_rate_limit,
)[0],
)
self._map_comic_issue_to_metadata(x, self._fetch_series_data(x["volume"]["id"])[0])
for x in filtered_issues_result
]
formatted_filtered_issues_result.extend(cached_results)
@ -545,12 +486,7 @@ class ComicVineTalker(ComicTalker):
flt = "id:" + "|".join(used_issues)
return flt, used_issues
def fetch_comics(
self,
*,
issue_ids: list[str],
on_rate_limit: RLCallBack | None = None,
) -> list[GenericMetadata]:
def fetch_comics(self, *, issue_ids: list[str]) -> list[GenericMetadata]:
# before we search online, look in our cache, since we might already have this info
cvc = self.cacher()
cached_results: list[GenericMetadata] = []
@ -588,7 +524,7 @@ class ComicVineTalker(ComicTalker):
flt, used_issues = self._get_id_list(list(needed_issues))
params["filter"] = flt
cv_response: CVResult[list[CVIssue]] = self._get_cv_content(issue_url, params, on_rate_limit=on_rate_limit)
cv_response: CVResult[list[CVIssue]] = self._get_cv_content(issue_url, params)
issue_results.extend(cv_response["results"])
@ -630,11 +566,7 @@ class ComicVineTalker(ComicTalker):
return cached_results
def _fetch_series(
self,
series_ids: list[int],
on_rate_limit: RLCallBack | None,
) -> list[tuple[ComicSeries, bool]]:
def _fetch_series(self, series_ids: list[int]) -> list[tuple[ComicSeries, bool]]:
# before we search online, look in our cache, since we might already have this info
cvc = self.cacher()
cached_results: list[tuple[ComicSeries, bool]] = []
@ -661,9 +593,7 @@ class ComicVineTalker(ComicTalker):
flt, used_series = self._get_id_list(list(needed_series))
params["filter"] = flt
cv_response: CVResult[list[CVSeries]] = self._get_cv_content(
series_url, params, on_rate_limit=on_rate_limit
)
cv_response: CVResult[list[CVSeries]] = self._get_cv_content(series_url, params)
series_results.extend(cv_response["results"])
@ -686,18 +616,12 @@ class ComicVineTalker(ComicTalker):
return cached_results
def _get_cv_content(
self,
url: str,
params: dict[str, Any],
*,
on_rate_limit: RLCallBack | None,
) -> CVResult[T]:
def _get_cv_content(self, url: str, params: dict[str, Any]) -> CVResult[T]:
"""
Get the content from the CV server.
"""
cv_response: CVResult[T] = self._get_url_content(url, params, on_rate_limit=on_rate_limit)
cv_response: CVResult[T] = self._get_url_content(url, params)
if cv_response["status_code"] != 1:
logger.debug(
"%s query failed with error #%s: [%s].",
@ -709,7 +633,7 @@ class ComicVineTalker(ComicTalker):
return cv_response
def _get_url_content(self, url: str, params: dict[str, Any], on_rate_limit: RLCallBack | None = None) -> Any:
def _get_url_content(self, url: str, params: dict[str, Any]) -> Any:
# if there is a 500 error, try a few more times before giving up
limit_counter = 0
final_params = self.custom_url_parameters.copy()
@ -718,7 +642,7 @@ class ComicVineTalker(ComicTalker):
for tries in range(1, 5):
try:
ratelimit_key = self._get_ratelimit_key(url)
with self.limiter.ratelimit(ratelimit_key, delay=True, on_rate_limit=on_rate_limit):
with self.limiter.ratelimit(ratelimit_key, delay=True):
logger.debug("Requesting: %s?%s", url, urlencode(final_params))
self.total_requests_made[ratelimit_key] += 1
resp = requests.get(
@ -812,20 +736,13 @@ class ComicVineTalker(ComicTalker):
format=None,
)
def _fetch_issues_in_series(
self,
series_id: str,
on_rate_limit: RLCallBack | None,
) -> list[tuple[GenericMetadata, bool]]:
def _fetch_issues_in_series(self, series_id: str) -> list[tuple[GenericMetadata, bool]]:
logger.debug("Fetching all issues in series: %s", series_id)
# before we search online, look in our cache, since we might already have this info
cvc = self.cacher()
cached_results = cvc.get_series_issues_info(series_id, self.id)
series = self._fetch_series_data(
int(series_id),
on_rate_limit=on_rate_limit,
)[0]
series = self._fetch_series_data(int(series_id))[0]
logger.debug(
"Found %d issues cached need %d issues",
@ -841,11 +758,7 @@ class ComicVineTalker(ComicTalker):
"format": "json",
"offset": 0,
}
cv_response: CVResult[list[CVIssue]] = self._get_cv_content(
urljoin(self.api_url, "issues/"),
params,
on_rate_limit=on_rate_limit,
)
cv_response: CVResult[list[CVIssue]] = self._get_cv_content(urljoin(self.api_url, "issues/"), params)
current_result_count = cv_response["number_of_page_results"]
total_result_count = cv_response["number_of_total_results"]
@ -860,23 +773,13 @@ class ComicVineTalker(ComicTalker):
offset += cv_response["number_of_page_results"]
params["offset"] = offset
cv_response = self._get_cv_content(
urljoin(self.api_url, "issues/"),
params,
on_rate_limit=on_rate_limit,
)
cv_response = self._get_cv_content(urljoin(self.api_url, "issues/"), params)
series_issues_result.extend(cv_response["results"])
current_result_count += cv_response["number_of_page_results"]
# Format to expected output
formatted_series_issues_result = [
self._map_comic_issue_to_metadata(
x,
self._fetch_series_data(
x["volume"]["id"],
on_rate_limit=on_rate_limit,
)[0],
)
self._map_comic_issue_to_metadata(x, self._fetch_series_data(x["volume"]["id"])[0])
for x in series_issues_result
]
@ -890,11 +793,7 @@ class ComicVineTalker(ComicTalker):
)
return [(x, False) for x in formatted_series_issues_result]
def _fetch_series_data(
self,
series_id: int,
on_rate_limit: RLCallBack | None,
) -> tuple[ComicSeries, bool]:
def _fetch_series_data(self, series_id: int) -> tuple[ComicSeries, bool]:
logger.debug("Fetching series info: %s", series_id)
# before we search online, look in our cache, since we might already have this info
cvc = self.cacher()
@ -910,11 +809,7 @@ class ComicVineTalker(ComicTalker):
"api_key": self.api_key,
"format": "json",
}
cv_response: CVResult[CVSeries] = self._get_cv_content(
series_url,
params,
on_rate_limit=on_rate_limit,
)
cv_response: CVResult[CVSeries] = self._get_cv_content(series_url, params)
series_results = cv_response["results"]
@ -925,17 +820,9 @@ class ComicVineTalker(ComicTalker):
return self._format_series(series_results), True
def _fetch_issue_data(
self,
series_id: int,
issue_number: str,
on_rate_limit: RLCallBack | None,
) -> GenericMetadata:
def _fetch_issue_data(self, series_id: int, issue_number: str) -> GenericMetadata:
logger.debug("Fetching issue by series ID: %s and issue number: %s", series_id, issue_number)
issues_list_results = self._fetch_issues_in_series(
str(series_id),
on_rate_limit=on_rate_limit,
)
issues_list_results = self._fetch_issues_in_series(str(series_id))
# Loop through issue list to find the required issue info
f_record = (GenericMetadata(), False)
@ -951,17 +838,10 @@ class ComicVineTalker(ComicTalker):
return f_record[0]
if f_record[0].issue_id is not None:
return self._fetch_issue_data_by_issue_id(
f_record[0].issue_id,
on_rate_limit=on_rate_limit,
)
return self._fetch_issue_data_by_issue_id(f_record[0].issue_id)
return GenericMetadata()
def _fetch_issue_data_by_issue_id(
self,
issue_id: str,
on_rate_limit: RLCallBack | None,
) -> GenericMetadata:
def _fetch_issue_data_by_issue_id(self, issue_id: str) -> GenericMetadata:
logger.debug("Fetching issue by issue ID: %s", issue_id)
# before we search online, look in our cache, since we might already have this info
cvc = self.cacher()
@ -970,20 +850,12 @@ class ComicVineTalker(ComicTalker):
logger.debug("Issue cached: %s", bool(cached_issue and cached_issue[1]))
if cached_issue and cached_issue.complete:
return self._map_comic_issue_to_metadata(
json.loads(cached_issue[0].data),
self._fetch_series_data(
int(cached_issue[0].series_id),
on_rate_limit=on_rate_limit,
)[0],
json.loads(cached_issue[0].data), self._fetch_series_data(int(cached_issue[0].series_id))[0]
)
issue_url = urljoin(self.api_url, f"issue/{CVTypeID.Issue}-{issue_id}")
params = {"api_key": self.api_key, "format": "json"}
cv_response: CVResult[CVIssue] = self._get_cv_content(
issue_url,
params,
on_rate_limit=on_rate_limit,
)
cv_response: CVResult[CVIssue] = self._get_cv_content(issue_url, params)
issue_results = cv_response["results"]
@ -1001,11 +873,7 @@ class ComicVineTalker(ComicTalker):
# Now, map the GenericMetadata data to generic metadata
return self._map_comic_issue_to_metadata(
issue_results,
self._fetch_series_data(
int(issue_results["volume"]["id"]),
on_rate_limit=on_rate_limit,
)[0],
issue_results, self._fetch_series_data(int(issue_results["volume"]["id"]))[0]
)
def _map_comic_issue_to_metadata(self, issue: CVIssue, series: ComicSeries) -> GenericMetadata:

View File

@ -1 +0,0 @@
from __future__ import annotations

View File

@ -1,21 +0,0 @@
MIT License
Copyright (c) 2021 vutran1710
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -1,402 +0,0 @@
<img align="left" width="95" height="120" src="docs/_static/logo.png">
# PyrateLimiter
The request rate limiter using Leaky-bucket algorithm.
Full project documentation can be found at [pyratelimiter.readthedocs.io](https://pyratelimiter.readthedocs.io).
[![PyPI version](https://badge.fury.io/py/pyrate-limiter.svg)](https://badge.fury.io/py/pyrate-limiter)
[![PyPI - Python Versions](https://img.shields.io/pypi/pyversions/pyrate-limiter)](https://pypi.org/project/pyrate-limiter)
[![codecov](https://codecov.io/gh/vutran1710/PyrateLimiter/branch/master/graph/badge.svg?token=E0Q0YBSINS)](https://codecov.io/gh/vutran1710/PyrateLimiter)
[![Maintenance](https://img.shields.io/badge/Maintained%3F-yes-green.svg)](https://github.com/vutran1710/PyrateLimiter/graphs/commit-activity)
[![PyPI license](https://img.shields.io/pypi/l/ansicolortags.svg)](https://pypi.python.org/pypi/pyrate-limiter/)
<br>
## Contents
- [PyrateLimiter](#pyratelimiter)
- [Contents](#contents)
- [Features](#features)
- [Installation](#installation)
- [Basic usage](#basic-usage)
- [Defining rate limits](#defining-rate-limits)
- [Applying rate limits](#applying-rate-limits)
- [Identities](#identities)
- [Handling exceeded limits](#handling-exceeded-limits)
- [Bucket analogy](#bucket-analogy)
- [Rate limit exceptions](#rate-limit-exceptions)
- [Rate limit delays](#rate-limit-delays)
- [Additional usage options](#additional-usage-options)
- [Decorator](#decorator)
- [Contextmanager](#contextmanager)
- [Async decorator/contextmanager](#async-decoratorcontextmanager)
- [Backends](#backends)
- [Memory](#memory)
- [SQLite](#sqlite)
- [Redis](#redis)
- [Custom backends](#custom-backends)
- [Additional features](#additional-features)
- [Time sources](#time-sources)
- [Examples](#examples)
## Features
* Tracks any number of rate limits and intervals you want to define
* Independently tracks rate limits for multiple services or resources
* Handles exceeded rate limits by either raising errors or adding delays
* Several usage options including a normal function call, a decorator, or a contextmanager
* Async support
* Includes optional SQLite and Redis backends, which can be used to persist limit tracking across
multiple threads, processes, or application restarts
## Installation
Install using pip:
```
pip install pyrate-limiter
```
Or using conda:
```
conda install --channel conda-forge pyrate-limiter
```
## Basic usage
### Defining rate limits
Consider some public API (like LinkedIn, GitHub, etc.) that has rate limits like the following:
```
- 500 requests per hour
- 1000 requests per day
- 10000 requests per month
```
You can define these rates using the `RequestRate` class, and add them to a `Limiter`:
``` python
from pyrate_limiter import Duration, RequestRate, Limiter
hourly_rate = RequestRate(500, Duration.HOUR) # 500 requests per hour
daily_rate = RequestRate(1000, Duration.DAY) # 1000 requests per day
monthly_rate = RequestRate(10000, Duration.MONTH) # 10000 requests per month
limiter = Limiter(hourly_rate, daily_rate, monthly_rate)
```
or
``` python
from pyrate_limiter import Duration, RequestRate, Limiter
rate_limits = (
RequestRate(500, Duration.HOUR), # 500 requests per hour
RequestRate(1000, Duration.DAY), # 1000 requests per day
RequestRate(10000, Duration.MONTH), # 10000 requests per month
)
limiter = Limiter(*rate_limits)
```
Note that these rates need to be ordered by interval length; in other words, an hourly rate must
come before a daily rate, etc.
### Applying rate limits
Then, use `Limiter.try_acquire()` wherever you are making requests (or other rate-limited operations).
This will raise an exception if the rate limit is exceeded.
```python
import requests
def request_function():
limiter.try_acquire('identity')
requests.get('https://example.com')
while True:
request_function()
```
Alternatively, you can use `Limiter.ratelimit()` as a function decorator:
```python
@limiter.ratelimit('identity')
def request_function():
requests.get('https://example.com')
```
See [Additional usage options](#additional-usage-options) below for more details.
### Identities
Note that both `try_acquire()` and `ratelimit()` take one or more `identity` arguments. Typically this is
the name of the service or resource that is being rate-limited. This allows you to track rate limits
for these resources independently. For example, if you have a service that is rate-limited by user:
```python
def request_function(user_ids):
limiter.try_acquire(*user_ids)
for user_id in user_ids:
requests.get(f'https://example.com?user_id={user_id}')
```
## Handling exceeded limits
When a rate limit is exceeded, you have two options: raise an exception, or add delays.
### Bucket analogy
<img height="300" align="right" src="https://upload.wikimedia.org/wikipedia/commons/c/c4/Leaky_bucket_analogy.JPG">
At this point it's useful to introduce the analogy of "buckets" used for rate-limiting. Here is a
quick summary:
* This library implements the [Leaky Bucket algorithm](https://en.wikipedia.org/wiki/Leaky_bucket).
* It is named after the idea of representing some kind of fixed capacity -- like a network or service -- as a bucket.
* The bucket "leaks" at a constant rate. For web services, this represents the **ideal or permitted request rate**.
* The bucket is "filled" at an intermittent, unpredicatble rate, representing the **actual rate of requests**.
* When the bucket is "full", it will overflow, representing **canceled or delayed requests**.
### Rate limit exceptions
By default, a `BucketFullException` will be raised when a rate limit is exceeded.
The error contains a `meta_info` attribute with the following information:
* `identity`: The identity it received
* `rate`: The specific rate that has been exceeded
* `remaining_time`: The remaining time until the next request can be sent
Here's an example that will raise an exception on the 4th request:
```python
from pyrate_limiter import (Duration, RequestRate,
Limiter, BucketFullException)
rate = RequestRate(3, Duration.SECOND)
limiter = Limiter(rate)
for _ in range(4):
try:
limiter.try_acquire('vutran')
except BucketFullException as err:
print(err)
# Output: Bucket for vutran with Rate 3/1 is already full
print(err.meta_info)
# Output: {'identity': 'vutran', 'rate': '3/1', 'remaining_time': 2.9,
# 'error': 'Bucket for vutran with Rate 3/1 is already full'}
```
The rate part of the output is constructed as: `limit / interval`. On the above example, the limit
is 3 and the interval is 1, hence the `Rate 3/1`.
### Rate limit delays
You may want to simply slow down your requests to stay within the rate limits instead of canceling
them. In that case you can use the `delay` argument. Note that this is only available for
`Limiter.ratelimit()`:
```python
@limiter.ratelimit('identity', delay=True)
def my_function():
do_stuff()
```
If you exceed a rate limit with a long interval (daily, monthly, etc.), you may not want to delay
that long. In this case, you can set a `max_delay` (in seconds) that you are willing to wait in
between calls:
```python
@limiter.ratelimit('identity', delay=True, max_delay=360)
def my_function():
do_stuff()
```
In this case, calls may be delayed by at most 360 seconds to stay within the rate limits; any longer
than that, and a `BucketFullException` will be raised instead. Without specifying `max_delay`, calls
will be delayed as long as necessary.
## Additional usage options
Besides `Limiter.try_acquire()`, some additional usage options are available using `Limiter.ratelimit()`:
### Decorator
`Limiter.ratelimit()` can be used as a decorator:
```python
@limiter.ratelimit('identity')
def my_function():
do_stuff()
```
As with `Limiter.try_acquire()`, if calls to the wrapped function exceed the rate limits you
defined, a `BucketFullException` will be raised.
### Contextmanager
`Limiter.ratelimit()` also works as a contextmanager:
```python
def my_function():
with limiter.ratelimit('identity', delay=True):
do_stuff()
```
### Async decorator/contextmanager
`Limiter.ratelimit()` also support async functions, either as a decorator or contextmanager:
```python
@limiter.ratelimit('identity', delay=True)
async def my_function():
await do_stuff()
async def my_function():
async with limiter.ratelimit('identity'):
await do_stuff()
```
When delays are enabled for an async function, `asyncio.sleep()` will be used instead of `time.sleep()`.
## Backends
A few different bucket backends are available, which can be selected using the `bucket_class`
argument for `Limiter`. Any additional backend-specific arguments can be passed
via `bucket_kwargs`.
### Memory
The default bucket is stored in memory, backed by a `queue.Queue`. A list implementation is also available:
```python
from pyrate_limiter import Limiter, MemoryListBucket
limiter = Limiter(bucket_class=MemoryListBucket)
```
### SQLite
If you need to persist the bucket state, a SQLite backend is available.
By default it will store the state in the system temp directory, and you can use
the `path` argument to use a different location:
```python
from pyrate_limiter import Limiter, SQLiteBucket
limiter = Limiter(bucket_class=SQLiteBucket)
```
By default, the database will be stored in the system temp directory. You can specify a different
path via `bucket_kwargs`:
```python
limiter = Limiter(
bucket_class=SQLiteBucket,
bucket_kwargs={'path': '/path/to/db.sqlite'},
)
```
#### Concurrency
This backend is thread-safe.
If you want to use SQLite with multiprocessing, some additional protections are needed. For
these cases, a separate `FileLockSQLiteBucket` class is available. This requires installing the
[py-filelock](https://py-filelock.readthedocs.io) library.
```python
limiter = Limiter(bucket_class=FileLockSQLiteBucket)
```
### Redis
If you have a larger, distributed application, Redis is an ideal backend. This
option requires [redis-py](https://github.com/andymccurdy/redis-py).
Note that this backend requires a `bucket_name` argument, which will be used as a prefix for the
Redis keys created. This can be used to disambiguate between multiple services using the same Redis
instance with pyrate-limiter.
**Important**: you might want to consider adding `expire_time` for each buckets. In a scenario where some `identity` produces a request rate that is too sparsed, it is a good practice to expire the bucket which holds such identity's info to save memory.
```python
from pyrate_limiter import Limiter, RedisBucket, Duration, RequestRate
rates = [
RequestRate(5, 10 * Duration.SECOND),
RequestRate(8, 20 * Duration.SECOND),
]
limiter = Limiter(
*rates
bucket_class=RedisBucket,
bucket_kwargs={
'bucket_name':
'my_service',
'expire_time': rates[-1].interval,
},
)
```
#### Connection settings
If you need to pass additional connection settings, you can use the `redis_pool` bucket argument:
```python
from redis import ConnectionPool
redis_pool = ConnectionPool(host='localhost', port=6379, db=0)
rate = RequestRate(5, 10 * Duration.SECOND)
limiter = Limiter(
rate,
bucket_class=RedisBucket,
bucket_kwargs={'redis_pool': redis_pool, 'bucket_name': 'my_service'},
)
```
#### Redis clusters
Redis clusters are also supported, which requires
[redis-py-cluster](https://github.com/Grokzen/redis-py-cluster):
```python
from pyrate_limiter import Limiter, RedisClusterBucket
limiter = Limiter(bucket_class=RedisClusterBucket)
```
### Custom backends
If these don't suit your needs, you can also create your own bucket backend by extending `pyrate_limiter.bucket.AbstractBucket`.
## Additional features
### Time sources
By default, monotonic time is used, to ensure requests are always logged in the correct order.
You can specify a custom time source with the `time_function` argument. For example, you may want to
use the current UTC time for consistency across a distributed application using a Redis backend.
```python
from datetime import datetime
from pyrate_limiter import Duration, Limiter, RequestRate
rate = RequestRate(5, Duration.SECOND)
limiter_datetime = Limiter(rate, time_function=lambda: datetime.utcnow().timestamp())
```
Or simply use the basic `time.time()` function:
```python
from time import time
rate = RequestRate(5, Duration.SECOND)
limiter_time = Limiter(rate, time_function=time)
```
## Examples
To prove that pyrate-limiter is working as expected, here is a complete example to demonstrate
rate-limiting with delays:
```python
from time import perf_counter as time
from pyrate_limiter import Duration, Limiter, RequestRate
limiter = Limiter(RequestRate(5, Duration.SECOND))
n_requests = 27
@limiter.ratelimit("test", delay=True)
def limited_function(start_time):
print(f"t + {(time() - start_time):.5f}")
start_time = time()
for _ in range(n_requests):
limited_function(start_time)
print(f"Ran {n_requests} requests in {time() - start_time:.5f} seconds")
```
And an equivalent example for async usage:
```python
import asyncio
from time import perf_counter as time
from pyrate_limiter import Duration, Limiter, RequestRate
limiter = Limiter(RequestRate(5, Duration.SECOND))
n_requests = 27
@limiter.ratelimit("test", delay=True)
async def limited_function(start_time):
print(f"t + {(time() - start_time):.5f}")
async def test_ratelimit():
start_time = time()
tasks = [limited_function(start_time) for _ in range(n_requests)]
await asyncio.gather(*tasks)
print(f"Ran {n_requests} requests in {time() - start_time:.5f} seconds")
asyncio.run(test_ratelimit())
```

View File

@ -1,9 +0,0 @@
# flake8: noqa
"""PyrateLimiter"""
from __future__ import annotations
from .bucket import *
from .constants import *
from .exceptions import *
from .limiter import *
from .request_rate import *

View File

@ -1,134 +0,0 @@
"""Implement this class to create
a workable bucket for Limiter to use
"""
from __future__ import annotations
from abc import ABC, abstractmethod
from queue import Queue
from threading import RLock
class AbstractBucket(ABC):
"""Base bucket interface"""
def __init__(self, maxsize: int = 0, **_kwargs):
self._maxsize = maxsize
def maxsize(self) -> int:
"""Return the maximum size of the bucket,
ie the maximum number of item this bucket can hold
"""
return self._maxsize
@abstractmethod
def size(self) -> int:
"""Return the current size of the bucket,
ie the count of all items currently in the bucket
"""
@abstractmethod
def put(self, item: float) -> int:
"""Put an item (typically the current time) in the bucket
Return 1 if successful, else 0
"""
@abstractmethod
def get(self, number: int) -> int:
"""Get items, remove them from the bucket in the FIFO order, and return the number of items
that have been removed
"""
@abstractmethod
def all_items(self) -> list[float]:
"""Return a list as copies of all items in the bucket"""
@abstractmethod
def flush(self) -> None:
"""Flush/reset bucket"""
def inspect_expired_items(self, time: float) -> tuple[int, float]:
"""Find how many items in bucket that have slipped out of the time-window
Returns:
The number of unexpired items, and the time until the next item will expire
"""
volume = self.size()
item_count, remaining_time = 0, 0.0
for log_idx, log_item in enumerate(self.all_items()):
if log_item > time:
item_count = volume - log_idx
remaining_time = round(log_item - time, 3)
break
return item_count, remaining_time
def lock_acquire(self):
"""Acquire a lock prior to beginning a new transaction, if needed"""
def lock_release(self):
"""Release lock following a transaction, if needed"""
class MemoryQueueBucket(AbstractBucket):
"""A bucket that resides in memory using python's built-in Queue class"""
def __init__(self, maxsize: int = 0, **_kwargs):
super().__init__()
self._q: Queue = Queue(maxsize=maxsize)
def size(self) -> int:
return self._q.qsize()
def put(self, item: float):
return self._q.put(item)
def get(self, number: int) -> int:
counter = 0
for _ in range(number):
self._q.get()
counter += 1
return counter
def all_items(self) -> list[float]:
return list(self._q.queue)
def flush(self):
while not self._q.empty():
self._q.get()
class MemoryListBucket(AbstractBucket):
"""A bucket that resides in memory using python's List"""
def __init__(self, maxsize: int = 0, **_kwargs):
super().__init__(maxsize=maxsize)
self._q: list[float] = []
self._lock = RLock()
def size(self) -> int:
return len(self._q)
def put(self, item: float):
with self._lock:
if self.size() < self.maxsize():
self._q.append(item)
return 1
return 0
def get(self, number: int) -> int:
with self._lock:
counter = 0
for _ in range(number):
self._q.pop(0)
counter += 1
return counter
def all_items(self) -> list[float]:
return self._q.copy()
def flush(self):
self._q = list()

View File

@ -1,9 +0,0 @@
from __future__ import annotations
class Duration:
SECOND = 1
MINUTE = 60
HOUR = 3600
DAY = 3600 * 24
MONTH = 3600 * 24 * 30

View File

@ -1,32 +0,0 @@
# pylint: disable=C0114,C0115
from __future__ import annotations
from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from .request_rate import RequestRate
class BucketFullException(Exception):
def __init__(self, identity: str, rate: RequestRate, remaining_time: float):
error = f"Bucket for {identity} with Rate {rate} is already full"
self.meta_info: dict[str, str | float] = {
"error": error,
"identity": identity,
"rate": str(rate),
"remaining_time": remaining_time,
}
super().__init__(error)
class InvalidParams(Exception):
def __init__(self, param_name: str):
self.message = f"Parameters missing or invalid:{param_name}"
super().__init__(self.message)
class ImmutableClassProperty(Exception):
def __init__(self, class_instance: Any, prop: str):
"""Mutating class property is forbidden"""
self.message = f"{class_instance}.{prop} must not be mutated"
super().__init__(self.message)

View File

@ -1,132 +0,0 @@
from __future__ import annotations
import asyncio
from functools import partial, wraps
from inspect import iscoroutinefunction
from logging import getLogger
from time import sleep
from typing import TYPE_CHECKING
from comictalker.comictalker import RLCallBack
from .exceptions import BucketFullException
logger = getLogger("pyrate_limiter")
if TYPE_CHECKING:
from .limiter import Limiter
class LimitContextDecorator:
"""A class that can be used as a:
* decorator
* async decorator
* contextmanager
* async contextmanager
Intended to be used via :py:meth:`.Limiter.ratelimit`. Depending on arguments, calls that exceed
the rate limit will either raise an exception, or sleep until space is available in the bucket.
Args:
limiter: Limiter object
identities: Bucket identities
delay: Delay until the next request instead of raising an exception
max_delay: The maximum allowed delay time (in seconds); anything over this will raise
an exception
"""
def __init__(
self,
limiter: Limiter,
*identities: str,
delay: bool = False,
max_delay: int | float | None = None,
on_rate_limit: RLCallBack | None = None,
):
self.delay = delay
self.max_delay = max_delay or 0
self.try_acquire = partial(limiter.try_acquire, *identities)
self.on_rate_limit = on_rate_limit
def __call__(self, func):
"""Allows usage as a decorator for both normal and async functions"""
@wraps(func)
def wrapper(*args, **kwargs):
self.delayed_acquire()
return func(*args, **kwargs)
@wraps(func)
async def async_wrapper(*args, **kwargs):
await self.async_delayed_acquire()
return await func(*args, **kwargs)
# Return either an async or normal wrapper, depending on the type of the wrapped function
return async_wrapper if iscoroutinefunction(func) else wrapper
def __enter__(self):
"""Allows usage as a contextmanager"""
self.delayed_acquire()
def __exit__(self, *exc):
pass
async def __aenter__(self):
"""Allows usage as an async contextmanager"""
await self.async_delayed_acquire()
async def __aexit__(self, *exc):
pass
def delayed_acquire(self):
"""Delay and retry until we can successfully acquire an available bucket item"""
while True:
try:
self.try_acquire()
except BucketFullException as err:
delay_time = full_delay_time = self.delay_or_reraise(err)
else:
break
if self.on_rate_limit:
if self.on_rate_limit.interval > 0 and delay_time > self.on_rate_limit.interval:
delay_time = self.on_rate_limit.interval
self.on_rate_limit.callback(full_delay_time, delay_time)
logger.warning(
"Rate limit reached; %.0f seconds remaining before next request. Sleeping for %.0f seconds",
full_delay_time,
delay_time,
)
sleep(delay_time)
async def async_delayed_acquire(self):
"""Delay and retry until we can successfully acquire an available bucket item"""
while True:
try:
self.try_acquire()
except BucketFullException as err:
delay_time = full_delay_time = self.delay_or_reraise(err)
if self.on_rate_limit:
if self.on_rate_limit.interval > 0 and delay_time > self.on_rate_limit.interval:
delay_time = self.on_rate_limit.interval
self.on_rate_limit.callback(full_delay_time, delay_time)
logger.warning(
"Rate limit reached; %.0f seconds remaining before next request. Sleeping for %.0f seconds",
full_delay_time,
delay_time,
)
await asyncio.sleep(delay_time)
else:
break
def delay_or_reraise(self, err: BucketFullException) -> float:
"""Determine if we should delay after exceeding a rate limit. If so, return the delay time,
otherwise re-raise the exception.
"""
delay_time = float(err.meta_info["remaining_time"])
exceeded_max_delay = bool(self.max_delay) and (delay_time > self.max_delay)
if self.delay and not exceeded_max_delay:
return delay_time
raise err

View File

@ -1,163 +0,0 @@
from __future__ import annotations
from time import monotonic
from typing import Any, Callable
from comictalker.comictalker import RLCallBack
from .bucket import AbstractBucket, MemoryQueueBucket
from .exceptions import BucketFullException, InvalidParams
from .limit_context_decorator import LimitContextDecorator
from .request_rate import RequestRate
class Limiter:
"""Main rate-limiter class
Args:
rates: Request rate definitions
bucket_class: Bucket backend to use; may be any subclass of :py:class:`.AbstractBucket`.
See :py:mod`pyrate_limiter.bucket` for available bucket classes.
bucket_kwargs: Extra keyword arguments to pass to the bucket class constructor.
time_function: Time function that returns the current time as a float, in seconds
"""
def __init__(
self,
*rates: RequestRate,
on_rate_limit: RLCallBack | None = None,
bucket_class: type[AbstractBucket] = MemoryQueueBucket,
bucket_kwargs: dict[str, Any] | None = None,
time_function: Callable[[], float] | None = None,
):
self._validate_rate_list(rates)
self.on_rate_limit = on_rate_limit
self._rates = rates
self._bkclass = bucket_class
self._bucket_args = bucket_kwargs or {}
self._validate_bucket()
self.bucket_group: dict[str, AbstractBucket] = {}
self.time_function = monotonic
if time_function is not None:
self.time_function = time_function
# Call for time_function to make an anchor if required.
self.time_function()
def _validate_rate_list(self, rates): # pylint: disable=no-self-use
"""Raise exception if rates are incorrectly ordered."""
if not rates:
raise InvalidParams("Rate(s) must be provided")
for idx, rate in enumerate(rates[1:]):
prev_rate = rates[idx]
invalid = rate.limit <= prev_rate.limit or rate.interval <= prev_rate.interval
if invalid:
msg = f"{prev_rate} cannot come before {rate}"
raise InvalidParams(msg)
def _validate_bucket(self):
"""Try initialize a bucket to check if ok"""
bucket = self._bkclass(maxsize=self._rates[-1].limit, identity="_", **self._bucket_args)
del bucket
def _init_buckets(self, identities) -> None:
"""Initialize a bucket for each identity, if needed.
The bucket's maxsize equals the max limit of request-rates.
"""
maxsize = self._rates[-1].limit
for item_id in sorted(identities):
if not self.bucket_group.get(item_id):
self.bucket_group[item_id] = self._bkclass(
maxsize=maxsize,
identity=item_id,
**self._bucket_args,
)
self.bucket_group[item_id].lock_acquire()
def _release_buckets(self, identities) -> None:
"""Release locks after bucket transactions, if applicable"""
for item_id in sorted(identities):
self.bucket_group[item_id].lock_release()
def try_acquire(self, *identities: str) -> None:
"""Attempt to acquire an item, or raise an error if a rate limit has been exceeded.
Args:
identities: One or more identities to acquire. Typically this is the name of a service
or resource that is being rate-limited.
Raises:
:py:exc:`BucketFullException`: If the bucket is full and the item cannot be acquired
"""
self._init_buckets(identities)
now = round(self.time_function(), 3)
for rate in self._rates:
for item_id in identities:
bucket = self.bucket_group[item_id]
volume = bucket.size()
if volume < rate.limit:
continue
# Determine rate's starting point, and check requests made during its time window
item_count, remaining_time = bucket.inspect_expired_items(now - rate.interval)
if item_count >= rate.limit:
self._release_buckets(identities)
raise BucketFullException(item_id, rate, remaining_time)
# Remove expired bucket items beyond the last (maximum) rate limit,
if rate is self._rates[-1]:
bucket.get(volume - item_count)
# If no buckets are full, add another item to each bucket representing the next request
for item_id in identities:
self.bucket_group[item_id].put(now)
self._release_buckets(identities)
def ratelimit(
self,
*identities: str,
delay: bool = False,
max_delay: int | float | None = None,
on_rate_limit: RLCallBack | None = None,
):
"""A decorator and contextmanager that applies rate-limiting, with async support.
Depending on arguments, calls that exceed the rate limit will either raise an exception, or
sleep until space is available in the bucket.
Args:
identities: One or more identities to acquire. Typically this is the name of a service
or resource that is being rate-limited.
delay: Delay until the next request instead of raising an exception
max_delay: The maximum allowed delay time (in seconds); anything over this will raise
an exception
Raises:
:py:exc:`BucketFullException`: If the rate limit is reached, and ``delay=False`` or the
delay exceeds ``max_delay``
"""
return LimitContextDecorator(
self,
*identities,
delay=delay,
max_delay=max_delay,
on_rate_limit=self.on_rate_limit or on_rate_limit,
)
def get_current_volume(self, identity) -> int:
"""Get current bucket volume for a specific identity"""
bucket = self.bucket_group[identity]
return bucket.size()
def flush_all(self) -> int:
cnt = 0
for _, bucket in self.bucket_group.items():
bucket.flush()
cnt += 1
return cnt

View File

@ -1,52 +0,0 @@
"""Initialize this class to define request-rates for limiter"""
from __future__ import annotations
from enum import Enum
from typing import Any
from .exceptions import ImmutableClassProperty
class ResetTypes(Enum):
SCHEDULED = 1
INTERVAL = 2
class RequestRate:
"""Request rate definition.
Args:
limit: Number of requests allowed within ``interval``
interval: Time interval, in seconds
"""
def __init__(
self,
limit: int,
interval: int,
reset: ResetTypes = ResetTypes.INTERVAL,
):
self._limit = limit
self._interval = interval
self._reset = reset
self._log: dict[Any, Any] = {}
@property
def limit(self) -> int:
return self._limit
@limit.setter
def limit(self, _):
raise ImmutableClassProperty(self, "limit")
@property
def interval(self) -> int:
return self._interval
@interval.setter
def interval(self, _):
raise ImmutableClassProperty(self, "interval")
def __str__(self):
return f"{self.limit}/{self.interval}"

View File

@ -134,7 +134,6 @@ description = run the tests with pytest
package = wheel
deps =
pytest>=7
gui,all: pytest-qt
extras =
7z: 7Z
cbr: CBR
@ -240,7 +239,7 @@ commands =
description = Generate appimage executable
skip_install = true
platform = linux
base = testenv
base = {env:tox_env:testenv}
labels =
build
depends =
@ -298,7 +297,6 @@ per-file-ignores =
build-tools/generate_settngs.py: T20
build-tools/oidc-exchange.py: T20
tests/*: L
tests/pyqttoast_test.py: E402
[mypy]
exclude = comictaggerlib/graphics/resources.py
@ -321,15 +319,5 @@ disallow_untyped_defs = false
disallow_incomplete_defs = false
check_untyped_defs = false
[mypy-comictaggerlib.ui.pyqttoast.tests.*]
disallow_untyped_defs = false
disallow_incomplete_defs = false
check_untyped_defs = false
[mypy-comictaggerlib.graphics.resources]
ignore_errors = true
follow_imports = skip
[mypy-comictalker.vendor.*]
ignore_errors = true
follow_imports = skip
ignore_errors = True

View File

@ -4,6 +4,7 @@ import os
import pathlib
import platform
import shutil
from contextlib import nullcontext as does_not_raise
import pytest
from importlib_metadata import entry_points
@ -44,11 +45,13 @@ def test_read_tags(cbz, md_saved):
assert md == md_saved
def test_write_cr(tmp_comic):
def test_write_cr(tmp_comic_path):
tmp_comic = comicapi.comicarchive.ComicArchive(tmp_comic_path)
md = tmp_comic.read_tags("cr")
md.apply_default_page_list(tmp_comic.get_page_name_list())
assert tmp_comic.write_tags(md, "cr")
with does_not_raise():
tmp_comic.write_tags(md, "cr")
md = tmp_comic.read_tags("cr")
@ -60,7 +63,8 @@ def test_save_cr_rar(tmp_path, md_saved):
tmp_comic = comicapi.comicarchive.ComicArchive(tmp_path / cbr_path.name)
assert tmp_comic.seems_to_be_a_comic_archive()
assert tmp_comic.write_tags(comicapi.genericmetadata.md_test, "cr")
with does_not_raise():
tmp_comic.write_tags(comicapi.genericmetadata.md_test, "cr")
md = tmp_comic.read_tags("cr")
@ -70,24 +74,27 @@ def test_save_cr_rar(tmp_path, md_saved):
assert md == md_saved
def test_page_type_write(tmp_comic):
def test_page_type_write(tmp_comic_path):
tmp_comic = comicapi.comicarchive.ComicArchive(tmp_comic_path)
md = tmp_comic.read_tags("cr")
t = md.pages[0]
t.type = ""
assert tmp_comic.write_tags(md, "cr")
with does_not_raise():
tmp_comic.write_tags(md, "cr")
md = tmp_comic.read_tags("cr")
def test_invalid_zip(tmp_comic: comicapi.comicarchive.ComicArchive):
with open(tmp_comic.path, mode="b+r") as f:
def test_invalid_zip(tmp_comic_path):
with open(tmp_comic_path, mode="b+r") as f:
# Corrupting the first file only breaks the first file. If it is never read then no exception will be raised
f.seek(-10, os.SEEK_END) # seek to a probably bad place in th Central Directory and write some bytes
f.write(b"PK\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000")
result = tmp_comic.write_tags(comicapi.genericmetadata.md_test, "cr") # This is not the first file
assert result
tmp_comic = comicapi.comicarchive.ComicArchive(tmp_comic_path)
with pytest.raises(OSError, match="^Error listing files in zip archive"):
tmp_comic.write_tags(comicapi.genericmetadata.md_test, "cr") # This is not the first file
assert not tmp_comic.seems_to_be_a_comic_archive() # Calls archiver.is_valid
@ -110,7 +117,8 @@ def test_copy_from_archive(archiver, tmp_path, cbz, md_saved):
archive = archiver.open(comic_path)
assert archive.copy_from_archive(cbz.archiver)
with does_not_raise():
archive.copy_from_archive(cbz.archiver)
comic_archive = comicapi.comicarchive.ComicArchive(comic_path)
@ -121,7 +129,8 @@ def test_copy_from_archive(archiver, tmp_path, cbz, md_saved):
assert md == md_saved
def test_rename(tmp_comic, tmp_path):
def test_rename(tmp_comic_path, tmp_path):
tmp_comic = comicapi.comicarchive.ComicArchive(tmp_comic_path)
old_path = tmp_comic.path
tmp_comic.rename(tmp_path / "test.cbz")
assert not old_path.exists()
@ -129,8 +138,9 @@ def test_rename(tmp_comic, tmp_path):
assert tmp_comic.path != old_path
def test_rename_ro_dest(tmp_comic, tmp_path):
old_path = tmp_comic.path
def test_rename_ro_dest(tmp_comic_path, tmp_path):
tmp_comic = comicapi.comicarchive.ComicArchive(tmp_comic_path)
dest = tmp_path / "tmp"
dest.mkdir(mode=0o000)
with pytest.raises(OSError):
@ -138,6 +148,6 @@ def test_rename_ro_dest(tmp_comic, tmp_path):
raise OSError("Windows sucks")
tmp_comic.rename(dest / "test.cbz")
dest.chmod(mode=0o777)
assert old_path.exists()
assert tmp_comic_path.exists()
assert tmp_comic.path.exists()
assert tmp_comic.path == old_path
assert tmp_comic.path == tmp_comic_path

View File

@ -9,12 +9,9 @@ import testing.comicvine
def test_search_for_series(comicvine_api, comic_cache):
results = comicvine_api.search_for_series(
"cory doctorows futuristic tales of the here and now", on_rate_limit=None
)[0]
results = comicvine_api.search_for_series("cory doctorows futuristic tales of the here and now")[0]
cache_series = comic_cache.get_search_results(
comicvine_api.id,
"cory doctorows futuristic tales of the here and now",
comicvine_api.id, "cory doctorows futuristic tales of the here and now"
)[0][0]
series_results = comicvine_api._format_series(json.loads(cache_series.data))
assert results == series_results
@ -43,7 +40,7 @@ def test_fetch_issues_in_series(comicvine_api, comic_cache):
def test_fetch_issue_data_by_issue_id(comicvine_api):
result = comicvine_api.fetch_comic_data(140529, on_rate_limit=None)
result = comicvine_api.fetch_comic_data(140529)
result.notes = None
assert result == testing.comicvine.cv_md
@ -78,6 +75,6 @@ cv_issue = [
@pytest.mark.parametrize("series_id, issue_number, expected", cv_issue)
def test_fetch_issue_data(comicvine_api, series_id, issue_number, expected):
results = comicvine_api._fetch_issue_data(series_id, issue_number, on_rate_limit=None)
results = comicvine_api._fetch_issue_data(series_id, issue_number)
results.notes = None
assert results == expected

View File

@ -3,6 +3,7 @@ from __future__ import annotations
import copy
import datetime
import io
import pathlib
import shutil
import unittest.mock
from argparse import Namespace
@ -12,6 +13,7 @@ from typing import Any
import pytest
import settngs
from PIL import Image
from pyrate_limiter import Limiter, RequestRate
import comicapi.comicarchive
import comicapi.genericmetadata
@ -21,7 +23,6 @@ import comictalker
import comictalker.comiccacher
import comictalker.talkers.comicvine
from comicapi import utils
from comictalker.vendor.pyrate_limiter import Limiter, RequestRate
from testing import comicvine, filenames
from testing.comicdata import all_seed_imprints, seed_imprints
@ -33,17 +34,20 @@ except ImportError:
@pytest.fixture
def cbz():
yield comicapi.comicarchive.ComicArchive(filenames.cbz_path)
yield comicapi.comicarchive.ComicArchive(
str(filenames.cbz_path)
) # When testing these always refer to a file on a filesystem
@pytest.fixture
def tmp_comic(tmp_path):
shutil.copy(filenames.cbz_path, tmp_path)
yield comicapi.comicarchive.ComicArchive(tmp_path / filenames.cbz_path.name)
def tmp_comic_path(tmp_path: pathlib.Path):
shutil.copy(str(filenames.cbz_path), str(tmp_path)) # When testing these always refer to a file on a filesystem
yield (tmp_path / filenames.cbz_path.name)
@pytest.fixture
def cbz_double_cover(tmp_path, tmp_comic):
def cbz_double_cover(tmp_path, tmp_comic_path):
tmp_comic = comicapi.comicarchive.ComicArchive(tmp_comic_path)
cover = Image.open(io.BytesIO(tmp_comic.get_page(0)))
other_page = Image.open(io.BytesIO(tmp_comic.get_page(tmp_comic.get_number_of_pages() - 1)))
@ -53,7 +57,6 @@ def cbz_double_cover(tmp_path, tmp_comic):
double_cover.paste(cover, (cover.width, 0))
tmp_comic.archiver.write_file("double_cover.jpg", double_cover.tobytes("jpeg", "RGB"))
yield tmp_comic
@pytest.fixture(autouse=True)

View File

@ -12,10 +12,11 @@ from comictalker.comictalker import ComicTalker
def test_save(
plugin_config: tuple[settngs.Config[ctsettings.ct_ns], dict[str, ComicTalker]],
tmp_comic,
tmp_comic_path,
md_saved,
mock_now,
) -> None:
tmp_comic = comicapi.comicarchive.ComicArchive(tmp_comic_path)
# Overwrite the series so it has definitely changed
tmp_comic.write_tags(md_saved.replace(series="nothing"), "cr")
@ -24,9 +25,6 @@ def test_save(
# Check that it changed
assert md != md_saved
# Clear the cached tags
tmp_comic.reset_cache()
# Setup the app
config = plugin_config[0]
talkers = plugin_config[1]
@ -37,7 +35,7 @@ def test_save(
# Check online, should be intercepted by comicvine_api
config[0].Auto_Tag__online = True
# Use the temporary comic we created
config[0].Runtime_Options__files = [tmp_comic.path]
config[0].Runtime_Options__files = [tmp_comic_path]
# Read and save ComicRack tags
config[0].Runtime_Options__tags_read = ["cr"]
config[0].Runtime_Options__tags_write = ["cr"]
@ -46,6 +44,9 @@ def test_save(
# Run ComicTagger
CLI(config[0], talkers).run()
# tmp_comic is invalid it can't handle outside changes so we need a new one
tmp_comic = comicapi.comicarchive.ComicArchive(tmp_comic_path)
# Read the CBZ
md = tmp_comic.read_tags("cr")
@ -68,18 +69,16 @@ def test_save(
def test_delete(
plugin_config: tuple[settngs.Config[ctsettings.ct_ns], dict[str, ComicTalker]],
tmp_comic,
tmp_comic_path,
md_saved,
mock_now,
) -> None:
tmp_comic = comicapi.comicarchive.ComicArchive(tmp_comic_path)
md = tmp_comic.read_tags("cr")
# Check that the metadata starts correct
assert md == md_saved
# Clear the cached metadata
tmp_comic.reset_cache()
# Setup the app
config = plugin_config[0]
talkers = plugin_config[1]
@ -88,12 +87,15 @@ def test_delete(
config[0].Commands__command = comictaggerlib.resulttypes.Action.delete
# Use the temporary comic we created
config[0].Runtime_Options__files = [tmp_comic.path]
config[0].Runtime_Options__files = [tmp_comic_path]
# Delete ComicRack tags
config[0].Runtime_Options__tags_write = ["cr"]
# Run ComicTagger
CLI(config[0], talkers).run()
# tmp_comic is invalid it can't handle outside changes so we need a new one
tmp_comic = comicapi.comicarchive.ComicArchive(tmp_comic_path)
# Read the CBZ
md = tmp_comic.read_tags("cr")
@ -106,10 +108,11 @@ def test_delete(
def test_rename(
plugin_config: tuple[settngs.Config[ctsettings.ct_ns], dict[str, ComicTalker]],
tmp_comic,
tmp_comic_path,
md_saved,
mock_now,
) -> None:
tmp_comic = comicapi.comicarchive.ComicArchive(tmp_comic_path)
md = tmp_comic.read_tags("cr")
# Check that the metadata starts correct
@ -126,7 +129,7 @@ def test_rename(
config[0].Commands__command = comictaggerlib.resulttypes.Action.rename
# Use the temporary comic we created
config[0].Runtime_Options__files = [tmp_comic.path]
config[0].Runtime_Options__files = [tmp_comic_path]
# Set the template
config[0].File_Rename__template = "{series}"
@ -135,8 +138,11 @@ def test_rename(
# Run ComicTagger
CLI(config[0], talkers).run()
# tmp_comic is invalid it can't handle outside changes so we need a new one
tmp_comic = comicapi.comicarchive.ComicArchive(tmp_comic_path)
# Update the comic path
tmp_comic.path = tmp_comic.path.parent / (md.series + ".cbz")
tmp_comic.path = tmp_comic.path.parent / ((md.series or "comic") + ".cbz")
# Read the CBZ
md = tmp_comic.read_tags("cr")

View File

@ -5,6 +5,7 @@ import io
import pytest
from PIL import Image
import comicapi.comicarchive
import comictaggerlib.imagehasher
import comictaggerlib.issueidentifier
import testing.comicdata
@ -13,24 +14,16 @@ from comicapi.genericmetadata import ImageHash
from comictaggerlib.resulttypes import IssueResult
def test_crop(cbz_double_cover, config, tmp_path, comicvine_api):
def test_crop(cbz_double_cover, config, tmp_path, comicvine_api, tmp_comic_path):
tmp_comic = comicapi.comicarchive.ComicArchive(tmp_comic_path)
config, definitions = config
iio = comictaggerlib.issueidentifier.IssueIdentifierOptions(
series_match_search_thresh=config.Issue_Identifier__series_match_search_thresh,
series_match_identify_thresh=config.Issue_Identifier__series_match_identify_thresh,
use_publisher_filter=config.Auto_Tag__use_publisher_filter,
publisher_filter=config.Auto_Tag__publisher_filter,
quiet=config.Runtime_Options__quiet,
cache_dir=config.Runtime_Options__config.user_cache_dir,
border_crop_percent=config.Issue_Identifier__border_crop_percent,
talker=comicvine_api,
)
ii = comictaggerlib.issueidentifier.IssueIdentifier(iio, None)
im = Image.open(io.BytesIO(cbz_double_cover.archiver.read_file("double_cover.jpg")))
ii = comictaggerlib.issueidentifier.IssueIdentifier(tmp_comic, config, comicvine_api)
im = Image.open(io.BytesIO(tmp_comic.archiver.read_file("double_cover.jpg")))
cropped = ii._crop_double_page(im)
original = cbz_double_cover.get_page(0)
original = tmp_comic.get_page(0)
original_hash = comictaggerlib.imagehasher.ImageHasher(data=original).average_hash()
cropped_hash = comictaggerlib.imagehasher.ImageHasher(image=cropped).average_hash()
@ -41,17 +34,7 @@ def test_crop(cbz_double_cover, config, tmp_path, comicvine_api):
@pytest.mark.parametrize("additional_md, expected", testing.comicdata.metadata_keys)
def test_get_search_keys(cbz, config, additional_md, expected, comicvine_api):
config, definitions = config
iio = comictaggerlib.issueidentifier.IssueIdentifierOptions(
series_match_search_thresh=config.Issue_Identifier__series_match_search_thresh,
series_match_identify_thresh=config.Issue_Identifier__series_match_identify_thresh,
use_publisher_filter=config.Auto_Tag__use_publisher_filter,
publisher_filter=config.Auto_Tag__publisher_filter,
quiet=config.Runtime_Options__quiet,
cache_dir=config.Runtime_Options__config.user_cache_dir,
border_crop_percent=config.Issue_Identifier__border_crop_percent,
talker=comicvine_api,
)
ii = comictaggerlib.issueidentifier.IssueIdentifier(iio, None)
ii = comictaggerlib.issueidentifier.IssueIdentifier(cbz, config, comicvine_api)
assert expected == ii._get_search_keys(additional_md)
@ -65,17 +48,7 @@ def test_get_issue_cover_match_score(
expected: comictaggerlib.issueidentifier.Score,
):
config, definitions = config
iio = comictaggerlib.issueidentifier.IssueIdentifierOptions(
series_match_search_thresh=config.Issue_Identifier__series_match_search_thresh,
series_match_identify_thresh=config.Issue_Identifier__series_match_identify_thresh,
use_publisher_filter=config.Auto_Tag__use_publisher_filter,
publisher_filter=config.Auto_Tag__publisher_filter,
quiet=config.Runtime_Options__quiet,
cache_dir=config.Runtime_Options__config.user_cache_dir,
border_crop_percent=config.Issue_Identifier__border_crop_percent,
talker=comicvine_api,
)
ii = comictaggerlib.issueidentifier.IssueIdentifier(iio, None)
ii = comictaggerlib.issueidentifier.IssueIdentifier(cbz, config, comicvine_api)
score = ii._get_issue_cover_match_score(
primary_img_url=data[0],
alt_urls=data[1],
@ -87,17 +60,7 @@ def test_get_issue_cover_match_score(
def test_search(cbz, config, comicvine_api):
config, definitions = config
iio = comictaggerlib.issueidentifier.IssueIdentifierOptions(
series_match_search_thresh=config.Issue_Identifier__series_match_search_thresh,
series_match_identify_thresh=config.Issue_Identifier__series_match_identify_thresh,
use_publisher_filter=config.Auto_Tag__use_publisher_filter,
publisher_filter=config.Auto_Tag__publisher_filter,
quiet=config.Runtime_Options__quiet,
cache_dir=config.Runtime_Options__config.user_cache_dir,
border_crop_percent=config.Issue_Identifier__border_crop_percent,
talker=comicvine_api,
)
ii = comictaggerlib.issueidentifier.IssueIdentifier(iio, None)
ii = comictaggerlib.issueidentifier.IssueIdentifier(cbz, config, comicvine_api)
result, issues = ii.identify(cbz, cbz.read_tags("cr"))
cv_expected = IssueResult(
series=f"{testing.comicvine.cv_volume_result['results']['name']} ({testing.comicvine.cv_volume_result['results']['start_year']})",
@ -121,17 +84,7 @@ def test_search(cbz, config, comicvine_api):
def test_crop_border(cbz, config, comicvine_api):
config, definitions = config
iio = comictaggerlib.issueidentifier.IssueIdentifierOptions(
series_match_search_thresh=config.Issue_Identifier__series_match_search_thresh,
series_match_identify_thresh=config.Issue_Identifier__series_match_identify_thresh,
use_publisher_filter=config.Auto_Tag__use_publisher_filter,
publisher_filter=config.Auto_Tag__publisher_filter,
quiet=config.Runtime_Options__quiet,
cache_dir=config.Runtime_Options__config.user_cache_dir,
border_crop_percent=config.Issue_Identifier__border_crop_percent,
talker=comicvine_api,
)
ii = comictaggerlib.issueidentifier.IssueIdentifier(iio, None)
ii = comictaggerlib.issueidentifier.IssueIdentifier(cbz, config, comicvine_api)
# This creates a white square centered on a black background
bg = Image.new("RGBA", (100, 100), (0, 0, 0, 255))

File diff suppressed because it is too large Load Diff

View File

@ -5,6 +5,7 @@ from importlib_metadata import entry_points
import comicapi.genericmetadata
import testing.comicdata
from comicapi.archivers.zip import ZipArchiver
from comictaggerlib.md import prepare_metadata
tags = []
@ -20,11 +21,12 @@ if not tags:
@pytest.mark.parametrize("tag_type", tags)
def test_metadata(mock_version, tmp_comic, md_saved, tag_type):
def test_metadata(mock_version, tmp_comic_path, md_saved, tag_type):
archiver = ZipArchiver.open(tmp_comic_path)
tag = tag_type(mock_version[0])
supported_attributes = tag.supported_attributes
tag.write_tags(comicapi.genericmetadata.md_test, tmp_comic.archiver)
written_metadata = tag.read_tags(tmp_comic.archiver)
tag.write_tags(comicapi.genericmetadata.md_test, archiver)
written_metadata = tag.read_tags(archiver)
md = md_saved._get_clean_metadata(*supported_attributes)
# Hack back in the pages variable because CoMet supports identifying the cover by the filename