Merge pull request #3 from ajslater/pre-release

v0.2.0a0
This commit is contained in:
AJ Slater 2024-02-28 13:09:44 -08:00 committed by GitHub
commit 1aa1a6d6b7
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
22 changed files with 1241 additions and 442 deletions

60
.circleci/config.yml Normal file
View File

@ -0,0 +1,60 @@
jobs:
build:
machine:
image: ubuntu-2204:current
environment:
DOCKER_CLI_EXPERIMENTAL: enabled
DOCKER_BUILDKIT: 1
steps:
- checkout
- run:
command: docker compose build comicfn2dict-builder
name: Build Builder
- run:
command: ./bin/docker-compose-exit.sh comicfn2dict-lint
name: comicfn2dict Lint
- run:
command: ./bin/docker-compose-exit.sh comicfn2dict-test
name: comicfn2dict Test
- store_test_results:
path: test-results/pytest
- store_artifacts:
path: test-results/coverage
- run:
command: ./bin/docker-compose-exit.sh comicfn2dict-build
name: Build comicfn2dict Dist
- persist_to_workspace:
paths:
- ./README.md
- ./bin
- ./dist
- ./pyproject.toml
root: .
deploy:
docker:
- image: cimg/python:3.12.1
steps:
- attach_workspace:
at: .
- run:
command: ./bin/publish-pypi.sh
version: 2.1
workflows:
main:
jobs:
- build:
filters:
branches:
only:
- develop
- pre-release
- main
- deploy:
filters:
branches:
only:
- pre-release
- main
requires:
- build
version: 2.1

20
Dockerfile Normal file
View File

@ -0,0 +1,20 @@
FROM python:3.12.1-bookworm
LABEL maintainer="AJ Slater <aj@slater.net>"
COPY debian.sources /etc/apt/sources.list.d/
# hadolint ignore=DL3008
RUN apt-get clean \
&& apt-get update \
&& apt-get install --no-install-recommends -y \
bash \
npm \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /app
COPY bin ./bin
COPY package.json package-lock.json pyproject.toml poetry.lock Makefile ./
RUN make install-all
COPY . .

View File

@ -1,28 +1,28 @@
.PHONY: install-deps
## Update pip and install poetry
## @category Install
install-deps:
pip install --upgrade pip
pip install --upgrade poetry
npm install
.PHONY: install
## Install for production
## @category Install
install-prod: install-deps
poetry install --no-root --only-root
npm install
.PHONY: install-dev
## Install dev requirements
## @category Install
install-dev: install-deps
poetry install --no-root --only-root --with dev
npm install
.PHONY: install-all
## Install with all extras
## @category Install
install-all: install-deps
poetry install --no-root --all-extras
npm install
.PHONY: clean
## Clean pycaches

15
NEWS.md
View File

@ -1,5 +1,20 @@
# 📰 comicfn2dict News
## v0.2.0
- Titles are now parsed only if they occur after the series token AND after
either issue, year or volume.
- A more sophisticated date parser.
- Issue numbers that lead with a '#' character may start with alphabetical
characters.
- If volume is parsed, but issue number is not, the issue number is copied from
the volume number.
- ComicFilenameParser and ComicFilenameSerializer classes are available as well
as the old function API.
- New test cases thanks to @lordwelch & @bpepple
- Titles must come after series and one other token, but before format and scan
info.
## v0.1.4
- Require Python 3.10

View File

@ -12,7 +12,17 @@ pip install comicfn2dict
## API
look at `comicfn2dict/comicfn2dict.py`
<!-- eslint-skip -->
```python
from comicfn2dict import comicfn2dict, dict2comicfn
path = "Comic Series #001 Title (2024).cbz"
metadata: dict[str, str| tuple[str,...]] = comicfn2dict(path, verbose=0)
filename: str = dict2comicfn(metadata, bool=True, verbose=0)
```
## CLI

6
bin/docker-compose-exit.sh Executable file
View File

@ -0,0 +1,6 @@
#!/bin/bash
# Run a docker compose service and return its exit code
set -euo pipefail
SERVICE=$1
# docker compose without the dash doesn't have the exit-code-from param
docker compose up --exit-code-from "$SERVICE" "$SERVICE"

7
bin/publish-pypi.sh Executable file
View File

@ -0,0 +1,7 @@
#!/bin/bash
# Publish the created package
set -euo pipefail
cd "$(dirname "$0")/.."
pip3 install --upgrade pip
pip3 install --upgrade poetry
poetry publish -u "$PYPI_USER" -p "$PYPI_PASS"

View File

@ -1,3 +1,3 @@
"""Comic Filename to Dict parser and unparser."""
from .parse import comicfn2dict # noqa: F401
from .unparse import dict2comicfn # noqa: F401
from .parse import ComicFilenameParser, comicfn2dict # noqa: F401
from .unparse import ComicFilenameSerializer, dict2comicfn # noqa: F401

View File

@ -4,17 +4,27 @@ from argparse import ArgumentParser
from pathlib import Path
from pprint import pprint
from comicfn2dict.parse import comicfn2dict
from comicfn2dict.parse import ComicFilenameParser
def main():
def main() -> None:
"""Test parser."""
description = "Comic book archive read/write tool."
parser = ArgumentParser(description=description)
parser.add_argument("path", help="Path of comic filename to parse", type=Path)
parser.add_argument(
"-v",
"--verbose",
default=0,
action="count",
help="Display intermediate parsing steps. Good for debugging.",
)
args = parser.parse_args()
name = args.path.name
metadata = comicfn2dict(name)
cfnparser = ComicFilenameParser(name, verbose=args.verbose)
metadata = cfnparser.parse()
if args.verbose:
print("=" * 80) # noqa:T201
pprint(metadata) # noqa:T203

View File

@ -1,3 +0,0 @@
"""API import source."""
from comicfn2dict.parse import comicfn2dict # noqa: F401
from comicfn2dict.unparse import dict2comicfn # noqa: F401

9
comicfn2dict/log.py Normal file
View File

@ -0,0 +1,9 @@
"""Print log header."""
def print_log_header(label: str) -> None:
"""Print log header."""
prefix = "-" * 3 + label
suffix_len = 80 - len(prefix)
suffix = "-" * suffix_len
print(prefix + suffix) # noqa: T201

View File

@ -1,234 +1,364 @@
"""Parse comic book archive names using the simple 'parse' parser."""
from calendar import month_abbr
from copy import copy
from pathlib import Path
from pprint import pformat
from re import Match, Pattern
from typing import Any
from sys import maxsize
from comicfn2dict.log import print_log_header
from comicfn2dict.regex import (
DASH_SPLIT_RE,
EXTRA_SPACES_RE,
ISSUE_ANYWHERE_RE,
ALPHA_MONTH_RANGE_RE,
BOOK_VOLUME_RE,
ISSUE_BEGIN_RE,
ISSUE_COUNT_RE,
ISSUE_END_RE,
ISSUE_NUMBER_RE,
ISSUE_TOKEN_RE,
NON_SPACE_DIVIDER_RE,
ORIGINAL_FORMAT_RE,
ISSUE_WITH_COUNT_RE,
MONTH_FIRST_DATE_RE,
NON_NUMBER_DOT_RE,
ORIGINAL_FORMAT_SCAN_INFO_RE,
ORIGINAL_FORMAT_SCAN_INFO_SEPARATE_RE,
PUBLISHER_AMBIGUOUS_RE,
PUBLISHER_AMBIGUOUS_TOKEN_RE,
PUBLISHER_UNAMBIGUOUS_RE,
PUBLISHER_UNAMBIGUOUS_TOKEN_RE,
REGEX_SUBS,
REMAINING_GROUP_RE,
SCAN_INFO_RE,
SCAN_INFO_SECONDARY_RE,
TOKEN_DELIMETER,
VOLUME_RE,
YEAR_BEGIN_RE,
VOLUME_WITH_COUNT_RE,
YEAR_END_RE,
YEAR_FIRST_DATE_RE,
YEAR_TOKEN_RE,
)
_DATE_KEYS = frozenset({"year", "month", "day"})
_REMAINING_GROUP_KEYS = ("series", "title")
# Ordered by commonness.
_TITLE_PRECEDING_KEYS = ("issue", "year", "volume", "month")
def _parse_ext(name: str, suffix: str, metadata: dict) -> str:
class ComicFilenameParser:
"""Parse a filename metadata into a dict."""
def path_index(self, key: str, default: int = -1) -> int:
"""Lazily retrieve and memoize the key's location in the path."""
if key == "remainders":
return default
value: str = self.metadata.get(key, "") # type: ignore
if not value:
return default
if value not in self._path_indexes:
# XXX This is fragile, but it's difficult to calculate the original
# position at match time from the ever changing _unparsed_path.
index = self.path.rfind(value) if key == "ext" else self.path.find(value)
self._path_indexes[value] = index
return self._path_indexes[value]
def _log(self, label: str) -> None:
if not self._debug:
return
print_log_header(label)
combined = {}
for key in self.metadata:
combined[key] = (self.metadata.get(key), self.path_index(key))
print(" " + self._unparsed_path) # noqa: T201
print(" " + pformat(combined)) # noqa: T201
def _parse_ext(self) -> None:
"""Pop the extension from the pathname."""
data = name.removesuffix(suffix)
path = Path(self._unparsed_path)
suffix = path.suffix
if not suffix:
return
data = path.name.removesuffix(suffix)
ext = suffix.lstrip(".")
if ext:
metadata["ext"] = ext
return data
self.metadata["ext"] = ext
self._unparsed_path = data
def _clean_dividers(data: str) -> str:
def _clean_dividers(self) -> None:
"""Replace non space dividers and clean extra spaces out of string."""
data = NON_SPACE_DIVIDER_RE.sub(" ", data)
return EXTRA_SPACES_RE.sub(" ", data)
data = self._unparsed_path
# Simple substitutions
for regex, pair in REGEX_SUBS.items():
replacement, count = pair
data = regex.sub(replacement, data, count=count).strip()
self._unparsed_path = data.strip()
self._log("After Clean Path")
def _get_data_list(path: str | Path, metadata: dict) -> list[str]:
"""Prepare data list from a path or string."""
def _parse_items_update_metadata(
self, matches: Match, exclude: str, require_all: bool, first_only: bool
) -> bool:
"""Update Metadata."""
matched_metadata = {}
for key, value in matches.groupdict().items():
if value == exclude:
continue
if not value:
if require_all:
return False
continue
matched_metadata[key] = value
if first_only:
break
if not matched_metadata:
return False
self.metadata.update(matched_metadata)
return True
def _parse_items_pop_tokens(self, regex: Pattern, first_only: bool) -> None:
"""Pop tokens from unparsed path."""
count = 1 if first_only else 0
marked_str = regex.sub(TOKEN_DELIMETER, self._unparsed_path, count=count)
parts = []
for part in marked_str.split(TOKEN_DELIMETER):
if token := part.strip():
parts.append(token)
self._unparsed_path = TOKEN_DELIMETER.join(parts)
def _parse_items( # noqa: PLR0913
self,
regex: Pattern,
require_all: bool = False,
exclude: str = "",
first_only: bool = False,
pop: bool = True,
) -> None:
"""Parse a value from the data list into metadata and alter the data list."""
# Match
matches = regex.search(self._unparsed_path)
if not matches:
return
if not self._parse_items_update_metadata(
matches, exclude, require_all, first_only
):
return
if pop:
self._parse_items_pop_tokens(regex, first_only)
def _parse_issue(self) -> None:
"""Parse Issue."""
self._parse_items(ISSUE_NUMBER_RE)
if "issue" not in self.metadata:
self._parse_items(ISSUE_WITH_COUNT_RE)
self._log("After Issue")
def _parse_volume(self) -> None:
"""Parse Volume."""
self._parse_items(VOLUME_RE)
if "volume" not in self.metadata:
self._parse_items(VOLUME_WITH_COUNT_RE)
self._log("After Volume")
def _alpha_month_to_numeric(self) -> None:
"""Translate alpha_month to numeric month."""
if alpha_month := self.metadata.pop("alpha_month", ""):
alpha_month = alpha_month.capitalize() # type: ignore
for index, abbr in enumerate(month_abbr):
if abbr and alpha_month.startswith(abbr):
month = f"{index:02d}"
self.metadata["month"] = month
break
def _parse_dates(self) -> None:
"""Parse date schemes."""
# Discard second month of alpha month ranges.
self._unparsed_path = ALPHA_MONTH_RANGE_RE.sub(r"\1", self._unparsed_path)
# Month first date
self._parse_items(MONTH_FIRST_DATE_RE)
self._alpha_month_to_numeric()
# Year first date
if _DATE_KEYS - self.metadata.keys():
self._parse_items(YEAR_FIRST_DATE_RE)
self._alpha_month_to_numeric()
if "year" not in self.metadata:
self._parse_items(YEAR_TOKEN_RE, first_only=True)
if "volume" in self.metadata:
return
# A second year will be the real year.
# Move the first year to volume
if volume := self.metadata.get("year", ""):
self._parse_items(YEAR_TOKEN_RE)
if self.metadata.get("year", "") != volume:
self.metadata["volume"] = volume
self._log("After Date")
def _parse_format_and_scan_info(self) -> None:
"""Format & Scan Info."""
self._parse_items(
ORIGINAL_FORMAT_SCAN_INFO_RE,
require_all=True,
)
if "original_format" not in self.metadata:
self._parse_items(
ORIGINAL_FORMAT_SCAN_INFO_SEPARATE_RE,
)
self._parse_items(SCAN_INFO_SECONDARY_RE)
if (
scan_info_secondary := self.metadata.pop("secondary_scan_info", "")
) and "scan_info" not in self.metadata:
self.metadata["scan_info"] = scan_info_secondary # type: ignore
self._log("After original_format & scan_info")
def _parse_ends_of_remaining_tokens(self):
# Volume left on the end of string tokens
if "volume" not in self.metadata:
self._parse_items(BOOK_VOLUME_RE)
self._log("After original_format & scan_info")
# Years left on the end of string tokens
year_end_matched = False
if "year" not in self.metadata:
self._parse_items(YEAR_END_RE, pop=False)
year_end_matched = "year" in self.metadata
self._log("After Year on end of token")
# Issue left on the end of string tokens
if "issue" not in self.metadata and not year_end_matched:
exclude: str = self.metadata.get("year", "") # type: ignore
self._parse_items(ISSUE_END_RE, exclude=exclude)
if "issue" not in self.metadata:
self._parse_items(ISSUE_BEGIN_RE)
self._log("After Issue on ends of tokens")
def _parse_publisher(self) -> None:
"""Parse Publisher."""
# Pop single tokens so they don't end up titles.
self._parse_items(PUBLISHER_UNAMBIGUOUS_TOKEN_RE, first_only=True)
if "publisher" not in self.metadata:
self._parse_items(PUBLISHER_AMBIGUOUS_TOKEN_RE, first_only=True)
if "publisher" not in self.metadata:
self._parse_items(PUBLISHER_UNAMBIGUOUS_RE, pop=False, first_only=True)
if "publisher" not in self.metadata:
self._parse_items(PUBLISHER_AMBIGUOUS_RE, pop=False, first_only=True)
self._log("After publisher")
def _is_at_title_position(self, value: str) -> bool:
"""Title is in correct position."""
title_index = self.path.find(value)
# Titles must come after series but before format and scan_info
if (
title_index < self.path_index("series")
or title_index > self.path_index("original_format", maxsize)
or title_index > self.path_index("scan_info", maxsize)
):
return False
# Titles must be after the series and one other token.
title_ok = False
other_tokens_exist = False
for preceding_key in _TITLE_PRECEDING_KEYS:
other_tokens_exist = True
if title_index > self.path_index(preceding_key):
title_ok = True
break
return title_ok or not other_tokens_exist
def _grouping_operators_strip(self, value: str) -> str:
"""Strip spaces and parens."""
value = value.strip()
value = value.strip("()").strip()
value = value.strip("-").strip()
value = value.strip(",").strip()
value = value.strip("'").strip()
return value.strip('"').strip()
def _parse_series_and_title_token(
self, remaining_key_index: int, tokens: list[str]
) -> str:
"""Parse one series or title token."""
key = _REMAINING_GROUP_KEYS[remaining_key_index]
if key in self.metadata:
return ""
token = tokens.pop(0)
match = REMAINING_GROUP_RE.search(token)
if not match:
return token
value = match.group()
if key == "title" and not self._is_at_title_position(value):
return token
value = NON_NUMBER_DOT_RE.sub(r"\1 \2", value)
value = self._grouping_operators_strip(value)
if value:
self.metadata[key] = value
return ""
def _parse_series_and_title(self) -> None:
"""Assign series and title."""
if not self._unparsed_path:
return
remaining_key_index = 0
unused_tokens = []
tokens = self._unparsed_path.split(TOKEN_DELIMETER)
while tokens and remaining_key_index < len(_REMAINING_GROUP_KEYS):
unused_token = self._parse_series_and_title_token(
remaining_key_index, tokens
)
if unused_token:
unused_tokens.append(unused_token)
remaining_key_index += 1
self._unparsed_path = " ".join(unused_tokens) if unused_tokens else ""
self._log("After Series & Title")
def _add_remainders(self) -> None:
"""Add Remainders."""
remainders = []
for token in self._unparsed_path.split(TOKEN_DELIMETER):
if remainder := token.strip():
remainders.append(remainder)
if remainders:
self.metadata["remainders"] = tuple(remainders)
def parse(self) -> dict[str, str | tuple[str, ...]]:
"""Parse the filename with a hierarchy of regexes."""
self._log("Init")
self._parse_ext()
self._clean_dividers()
self._parse_issue()
self._parse_volume()
self._parse_dates()
self._parse_format_and_scan_info()
self._parse_ends_of_remaining_tokens()
self._parse_publisher()
self._parse_series_and_title()
# Copy volume into issue if it's all we have.
if "issue" not in self.metadata and "volume" in self.metadata:
self.metadata["issue"] = self.metadata["volume"]
self._log("After issue can be volume")
self._add_remainders()
return self.metadata
def __init__(self, path: str | Path, verbose: int = 0):
"""Initialize."""
self._debug: bool = verbose > 0
# munge path
if isinstance(path, str):
path = path.strip()
path = Path(path)
data = _parse_ext(path.name, path.suffix, metadata)
data = _clean_dividers(data)
return DASH_SPLIT_RE.split(data)
p_path = Path(path)
self.path = str(p_path.name).strip()
self.metadata: dict[str, str | tuple[str, ...]] = {}
self._unparsed_path = copy(self.path)
self._path_indexes: dict[str, int] = {}
def _paren_strip(value: str) -> str:
"""Strip spaces and parens."""
return value.strip().strip("()").strip()
def _splicey_dicey(
data_list: list[str], index: int, match: Match, match_group: int | str = 0
) -> str:
"""Replace a string token from a list with two strings and the value removed.
And return the value.
"""
value = match.group(match_group)
data = data_list.pop(index)
data_ends = []
if data_before := data[: match.start()].strip():
data_ends.append(data_before)
if data_after := data[match.end() :].strip():
data_ends.append(data_after)
data_list[index:index] = data_ends
return _paren_strip(value)
def _match_original_format_and_scan_info(
match: Match, metadata: dict[str, Any], data_list: list[str], index: int
) -> None:
"""Match (ORIGINAL_FORMAT-SCAN_INFO)."""
original_format = match.group("original_format")
try:
scan_info = match.group("scan_info")
except IndexError:
scan_info = None
metadata["original_format"] = _paren_strip(original_format)
match_group = 1
if scan_info:
metadata["scan_info"] = _paren_strip(scan_info)
match_group = 0
_splicey_dicey(data_list, index, match, match_group=match_group)
def _parse_original_format_and_scan_info(data_list: list[str], metadata: dict) -> int:
"""Parse (ORIGINAL_FORMAT-SCAN_INFO)."""
index = 0
match = None
for data in data_list:
match = ORIGINAL_FORMAT_SCAN_INFO_RE.search(data)
if match:
_match_original_format_and_scan_info(match, metadata, data_list, index)
break
index += 1
else:
index = 0
return index
def _pop_value_from_token(
data_list: list,
metadata: dict,
regex: Pattern,
key: str,
index: int = 0,
) -> Match:
"""Search token for value, splice and assign to metadata."""
data = data_list[index]
match = regex.search(data)
if match:
value = _splicey_dicey(data_list, index, match, key)
metadata[key] = value
return match
def _parse_item(
data_list: list[str],
metadata: dict,
regex: Pattern,
key: str,
start_index: int = 0,
) -> int:
"""Parse a value from the data list into metadata and alter the data list."""
index = start_index
dl_len = end_index = len(data_list)
if index >= end_index:
index = 0
while index < end_index:
match = _pop_value_from_token(data_list, metadata, regex, key, index)
if match:
break
index += 1
if index > dl_len and start_index > 0:
index = 0
end_index = start_index
return index
def _pop_issue_from_text_fields(
data_list: list[str], metadata: dict, index: int
) -> str:
"""Search issue from ends of text fields."""
if "issue" not in metadata:
_pop_value_from_token(data_list, metadata, ISSUE_END_RE, "issue", index=index)
if "issue" not in metadata:
_pop_value_from_token(data_list, metadata, ISSUE_BEGIN_RE, "issue", index=index)
return data_list.pop(index)
def _assign_remaining_groups(data_list: list[str], metadata: dict):
"""Assign series and title."""
index = 0
for key in _REMAINING_GROUP_KEYS:
try:
data = data_list[index]
except (IndexError, TypeError):
break
match = REMAINING_GROUP_RE.search(data) if data else None
if match:
value = _pop_issue_from_text_fields(data_list, metadata, index)
value = _paren_strip(value)
if value:
metadata[key] = value
else:
index += 1
def _pickup_issue(remainders: list[str], metadata: dict) -> None:
"""Get issue from remaining tokens or anywhere in a pinch."""
if "issue" in metadata:
return
_parse_item(remainders, metadata, ISSUE_TOKEN_RE, "issue")
if "issue" in metadata:
return
_parse_item(remainders, metadata, ISSUE_ANYWHERE_RE, "issue")
def comicfn2dict(path: str | Path) -> dict[str, Any]:
"""Parse the filename with a hierarchy of regexes."""
metadata = {}
data_list = _get_data_list(path, metadata)
# Parse paren tokens
_parse_item(data_list, metadata, ISSUE_COUNT_RE, "issue_count")
_parse_item(data_list, metadata, YEAR_TOKEN_RE, "year")
of_index = _parse_original_format_and_scan_info(data_list, metadata)
if "original_format" not in metadata:
of_index = _parse_item(
data_list, metadata, ORIGINAL_FORMAT_RE, "original_format"
)
if "scan_info" not in metadata:
# Start searching for scan_info after original format.
_parse_item(
data_list,
metadata,
SCAN_INFO_RE,
"scan_info",
start_index=of_index + 1,
)
# Parse regular tokens
_parse_item(data_list, metadata, VOLUME_RE, "volume")
_parse_item(data_list, metadata, ISSUE_NUMBER_RE, "issue")
# Pickup year if not gotten.
if "year" not in metadata:
_parse_item(data_list, metadata, YEAR_BEGIN_RE, "year")
if "year" not in metadata:
_parse_item(data_list, metadata, YEAR_END_RE, "year")
# Pickup issue if it's a standalone token
if "issue" not in metadata:
_parse_item(data_list, metadata, ISSUE_TOKEN_RE, "issue")
# Series and Title. Also looks for issue.
_assign_remaining_groups(data_list, metadata)
# Final try for issue number.
_pickup_issue(data_list, metadata)
# Add Remainders
if data_list:
metadata["remainders"] = tuple(data_list)
return metadata
def comicfn2dict(
path: str | Path, verbose: int = 0
) -> dict[str, str | tuple[str, ...]]:
"""Simplfily the API."""
parser = ComicFilenameParser(path, verbose=verbose)
return parser.parse()

View File

@ -1,15 +1,32 @@
"""Parsing regexes."""
import re
from re import IGNORECASE, Pattern, compile
from types import MappingProxyType
PUBLISHERS_UNAMBIGUOUS: tuple[str, ...] = (
r"Abrams ComicArts",
r"BOOM! Studios",
r"DC(\sComics)?",
r"Dark Horse Comics",
r"Drawn & Quarterly",
r"Dynamite Entertainment",
r"IDW Publishing",
r"Icon Comics",
r"Kodansha",
r"Oni Press",
r"Pantheon Books",
r"SLG Publishing",
r"SelfMadeHero",
r"Titan Comics",
)
PUBLISHERS_AMBIGUOUS: tuple[str, ...] = (
r"(?<!Capt\.\s)(?<!Capt\s)(?<!Captain\s)Marvel",
r"Heavy Metal",
r"Epic",
r"Image",
r"Mirage",
)
def re_compile(exp, parenthify=False):
"""Compile regex with options."""
if parenthify:
exp = r"\(" + exp + r"\)"
return re.compile(exp, flags=re.IGNORECASE)
ORIGINAL_FORMAT_PATTERNS = (
ORIGINAL_FORMAT_PATTERNS: tuple[str, ...] = (
r"Anthology",
r"(One|1)[-\s]Shot",
r"Annual",
@ -35,41 +52,160 @@ ORIGINAL_FORMAT_PATTERNS = (
r"Sketch",
r"TPB",
r"Trade[-\s]Paper[-\s]?Back",
r"Web([-\s]?Comic)?",
r"Web([-\s]?(Comic|Rip))?",
)
MONTHS: tuple[str, ...] = (
r"Jan(uary)?",
r"Feb(ruary)?",
r"Mar(ch)?",
r"Apr(il)?",
r"May",
r"Jun(e)?",
r"Jul(y)?",
r"Aug(ust)?",
r"Sep(tember)?",
r"Oct(ober)?",
r"Nov(ember)?",
r"Dec(ember)?",
)
TOKEN_DELIMETER: str = r"/"
def re_compile(exp: str, parenthify: bool = False) -> Pattern:
"""Compile regex with options."""
if parenthify:
exp = r"\(" + exp + r"\)"
return compile(exp, flags=IGNORECASE)
# CLEAN
NON_SPACE_DIVIDER_RE = re_compile(r"[_\+]")
DASH_SPLIT_RE = re_compile(r"\s-\s")
EXTRA_SPACES_RE = re_compile(r"\s\s+")
_TOKEN_DIVIDERS_RE = re_compile(r":")
_SPACE_EQUIVALENT_RE = re_compile(r"_")
_EXTRA_SPACES_RE = re_compile(r"\s\s+")
_LEFT_PAREN_EQUIVALENT_RE = re_compile(r"\[")
_RIGHT_PAREN_EQUIVALENT_RE = re_compile(r"\]")
_DOUBLE_UNDERSCORE_RE = re_compile(r"__(.*)__")
REGEX_SUBS: MappingProxyType[Pattern, tuple[str, int]] = MappingProxyType(
{
_DOUBLE_UNDERSCORE_RE: (r"(\1)", 0),
_TOKEN_DIVIDERS_RE: (TOKEN_DELIMETER, 1),
_SPACE_EQUIVALENT_RE: (r" ", 0),
_EXTRA_SPACES_RE: (r" ", 0),
_LEFT_PAREN_EQUIVALENT_RE: (r"(", 0),
_RIGHT_PAREN_EQUIVALENT_RE: (r")", 0),
}
)
### DATES
_YEAR_RE_EXP = r"(?P<year>[12]\d{3})"
_MONTH_ALPHA_RE_EXP = r"(" + "(?P<alpha_month>" + r"|".join(MONTHS) + r")\.?" r")"
_MONTH_NUMERIC_RE_EXP = r"(?P<month>0?\d|1[0-2]?)"
_MONTH_RE_EXP = r"(" + _MONTH_ALPHA_RE_EXP + r"|" + _MONTH_NUMERIC_RE_EXP + r")"
_ALPHA_MONTH_RANGE = (
r"\b" # noqa: ISC003
+ r"("
+ r"|".join(MONTHS)
+ r")"
+ r"("
+ r"\.?-"
+ r"("
+ r"|".join(MONTHS)
+ r")"
+ r")\b"
)
ALPHA_MONTH_RANGE_RE: Pattern = re_compile(_ALPHA_MONTH_RANGE)
_DAY_RE_EXP = r"(?P<day>([0-2]?\d|(3)[0-1]))"
_DATE_DELIM = r"[-\s]+"
_MONTH_FIRST_DATE_RE_EXP = (
r"((\b|\(?)"
# Month
+ _MONTH_RE_EXP
# Day
+ r"("
+ _DATE_DELIM
+ _DAY_RE_EXP
+ r")?"
# Year
+ r"[,]?"
+ _DATE_DELIM
+ _YEAR_RE_EXP
+ r"(\)?|\b))"
)
_YEAR_FIRST_DATE_RE_EXP = (
r"(\b\(?"
+ _YEAR_RE_EXP
+ _DATE_DELIM
+ _MONTH_RE_EXP
+ _DATE_DELIM
+ _DAY_RE_EXP
+ r"\b\)?)"
)
MONTH_FIRST_DATE_RE: Pattern = re_compile(_MONTH_FIRST_DATE_RE_EXP)
YEAR_FIRST_DATE_RE: Pattern = re_compile(_YEAR_FIRST_DATE_RE_EXP)
YEAR_TOKEN_RE: Pattern = re_compile(_YEAR_RE_EXP, parenthify=True)
YEAR_END_RE: Pattern = re_compile(_YEAR_RE_EXP + r"\/|$")
# PAREN GROUPS
ISSUE_COUNT_RE = re_compile(r"of\s*(?P<issue_count>\d+)", parenthify=True)
_YEAR_RE_EXP = r"(?P<year>[12]\d{3})"
YEAR_TOKEN_RE = re_compile(_YEAR_RE_EXP, parenthify=True)
YEAR_BEGIN_RE = re_compile(r"^" + _YEAR_RE_EXP + r"\b")
YEAR_END_RE = re_compile(r"\b" + _YEAR_RE_EXP + r"$")
_OF_PATTERNS = r"|".join(ORIGINAL_FORMAT_PATTERNS)
_ORIGINAL_FORMAT_RE_EXP = r"(?P<original_format>" + _OF_PATTERNS + r")"
ORIGINAL_FORMAT_RE = re_compile(_ORIGINAL_FORMAT_RE_EXP, parenthify=True)
_SCAN_INFO_RE_EXP = r"(?P<scan_info>[^()]+?)"
SCAN_INFO_RE = re_compile(_SCAN_INFO_RE_EXP, parenthify=True)
_SCAN_INFO_RE_EXP = r"(?P<scan_info>[^()]*)"
_ORIGINAL_FORMAT_SCAN_INFO_RE_EXP = (
_ORIGINAL_FORMAT_RE_EXP + r"(?:-" + _SCAN_INFO_RE_EXP + r")?"
_ORIGINAL_FORMAT_RE_EXP + r"\s*[\(:-]" + _SCAN_INFO_RE_EXP # + r")?"
)
ORIGINAL_FORMAT_SCAN_INFO_RE = re_compile(
# Keep this even though comicfn2dict doesn't use it directly
ORIGINAL_FORMAT_RE: Pattern = re_compile(_ORIGINAL_FORMAT_RE_EXP, parenthify=True)
ORIGINAL_FORMAT_SCAN_INFO_RE: Pattern = re_compile(
_ORIGINAL_FORMAT_SCAN_INFO_RE_EXP, parenthify=True
)
ORIGINAL_FORMAT_SCAN_INFO_SEPARATE_RE: Pattern = re_compile(
r"\(" + _ORIGINAL_FORMAT_RE_EXP + r"\).*\(" + _SCAN_INFO_RE_EXP + r"\)"
)
# REGULAR TOKENS
VOLUME_RE = re_compile(r"((?:v(?:ol(?:ume)?)?\.?)\s*(?P<volume>\d+))")
_ISSUE_RE_EXP = r"(?P<issue>[\d½]+\.?\d*\w*)"
ISSUE_NUMBER_RE = re_compile(r"(#" + _ISSUE_RE_EXP + r")")
ISSUE_TOKEN_RE = re_compile(r"^(" + _ISSUE_RE_EXP + r")$")
ISSUE_END_RE = re_compile(r"\b(" + _ISSUE_RE_EXP + r")$")
ISSUE_BEGIN_RE = re_compile(r"^(" + _ISSUE_RE_EXP + r")\b")
ISSUE_ANYWHERE_RE = re_compile(r"\b(" + _ISSUE_RE_EXP + r")\b")
SCAN_INFO_SECONDARY_RE: Pattern = re_compile(r"\b(?P<secondary_scan_info>c2c)\b")
# ISSUE
_ISSUE_RE_EXP = r"(?P<issue>\w*(½|\d+)[\.\d+]*\w*)"
_ISSUE_COUNT_RE_EXP = r"\(of\s*(?P<issue_count>\d+)\)"
ISSUE_NUMBER_RE: Pattern = re_compile(
r"(\(?#" + _ISSUE_RE_EXP + r"\)?)" + r"(\W*" + _ISSUE_COUNT_RE_EXP + r")?"
)
ISSUE_WITH_COUNT_RE: Pattern = re_compile(
r"(\(?" + _ISSUE_RE_EXP + r"\)?" + r"\W*" + _ISSUE_COUNT_RE_EXP + r")"
)
ISSUE_END_RE: Pattern = re_compile(r"([\/\s]\(?" + _ISSUE_RE_EXP + r"\)?(\/|$))")
ISSUE_BEGIN_RE: Pattern = re_compile(r"((^|\/)\(?" + _ISSUE_RE_EXP + r"\)?[\/|\s])")
# Volume
_VOLUME_COUNT_RE_EXP = r"\(of\s*(?P<volume_count>\d+)\)"
VOLUME_RE: Pattern = re_compile(
r"(" + r"(?:v(?:ol(?:ume)?)?\.?)\s*(?P<volume>\d+)" # noqa: ISC003
r"(\W*" + _VOLUME_COUNT_RE_EXP + r")?" + r")"
)
VOLUME_WITH_COUNT_RE: Pattern = re_compile(
r"(\(?" + r"(?P<volume>\d+)" + r"\)?" + r"\W*" + _VOLUME_COUNT_RE_EXP + r")"
)
BOOK_VOLUME_RE: Pattern = re_compile(r"(?P<title>" + r"book\s*(?P<volume>\d+)" + r")")
# Publisher
_PUBLISHER_UNAMBIGUOUS_RE_EXP = (
r"(\b(?P<publisher>" + r"|".join(PUBLISHERS_UNAMBIGUOUS) + r")\b)"
)
_PUBLISHER_AMBIGUOUS_RE_EXP = (
r"(\b(?P<publisher>" + r"|".join(PUBLISHERS_AMBIGUOUS) + r")\b)"
)
PUBLISHER_UNAMBIGUOUS_TOKEN_RE: Pattern = re_compile(
r"(^|\/)" + _PUBLISHER_UNAMBIGUOUS_RE_EXP + r"($|\/)"
)
PUBLISHER_AMBIGUOUS_TOKEN_RE: Pattern = re_compile(
r"(^|\/)" + _PUBLISHER_AMBIGUOUS_RE_EXP + r"($|\/)"
)
PUBLISHER_UNAMBIGUOUS_RE: Pattern = re_compile(_PUBLISHER_UNAMBIGUOUS_RE_EXP)
PUBLISHER_AMBIGUOUS_RE = re_compile(_PUBLISHER_AMBIGUOUS_RE_EXP)
# LONG STRINGS
REMAINING_GROUP_RE = re_compile(r"^[\w].*[^\)]")
REMAINING_GROUP_RE: Pattern = re_compile(r"^[^\(].*[^\)]")
NON_NUMBER_DOT_RE: Pattern = re_compile(r"(\D)\.(\D)")

View File

@ -1,5 +1,10 @@
"""Unparse comic filenames."""
from collections.abc import Callable, Mapping
from calendar import month_abbr
from collections.abc import Callable, Mapping, Sequence
from contextlib import suppress
from types import MappingProxyType
from comicfn2dict.log import print_log_header
def issue_formatter(issue: str) -> str:
@ -18,33 +23,99 @@ _PAREN_FMT: str = "({})"
_FILENAME_FORMAT_TAGS: tuple[tuple[str, str | Callable], ...] = (
("series", "{}"),
("volume", "v{}"),
("volume_count", "(of {:03})"),
("issue", issue_formatter),
("issue_count", "(of {:03})"),
("year", _PAREN_FMT),
("date", _PAREN_FMT),
("title", "{}"),
("publisher", _PAREN_FMT),
("original_format", _PAREN_FMT),
("scan_info", _PAREN_FMT),
)
_EMPTY_VALUES: tuple[None, str] = (None, "")
_DEFAULT_EXT = "cbz"
_DATE_KEYS = ("year", "month", "day")
def dict2comicfn(md: Mapping, ext: bool = True) -> str | None:
class ComicFilenameSerializer:
"""Serialize Comic Filenames from dict."""
def _log(self, label: str, fn: str) -> None:
"""Log progress."""
if not self._debug:
return
print_log_header(label)
print(fn) # noqa: T201
def _add_date(self) -> None:
"""Construct date from Y-m-D if they exist."""
if "date" in self.metadata:
return
parts = []
for key in _DATE_KEYS:
if part := self.metadata.get(key):
if key == "month" and not parts:
with suppress(TypeError):
part = month_abbr[int(part)]
parts.append(part)
if key == "month" and not parts:
# noop if only day.
break
if parts:
parts = (str(part) for part in parts)
date = "-".join(parts)
self._log("After date", date)
self.metadata = MappingProxyType({**self.metadata, "date": date})
def _tokenize_tag(self, tag: str, fmt: str | Callable) -> str:
"""Add tags to the string."""
val = self.metadata.get(tag)
if val in _EMPTY_VALUES:
return ""
final_fmt = fmt(val) if isinstance(fmt, Callable) else fmt
return final_fmt.format(val).strip()
def _add_remainder(self) -> str:
"""Add the remainders specially."""
if remainders := self.metadata.get("remainders"):
if isinstance(remainders, Sequence):
remainders = (str(remainder) for remainder in remainders)
remainder = " ".join(remainders)
else:
remainder = str(remainders)
return f"[{remainder}]"
return ""
def serialize(self) -> str:
"""Get our preferred basename from a metadata dict."""
if not md:
return None
self._add_date()
tokens = []
for tag, fmt in _FILENAME_FORMAT_TAGS:
val = md.get(tag)
if val in _EMPTY_VALUES:
continue
final_fmt = fmt(val) if isinstance(fmt, Callable) else fmt
token = final_fmt.format(val).strip()
if token:
if token := self._tokenize_tag(tag, fmt):
tokens.append(token)
self._log(f"After {tag}", str(tokens))
fn = " ".join(tokens)
if remainders := md.get("remainders"):
remainder = " ".join(remainders)
fn += f" - {remainder}"
if ext:
fn += "." + md.get("ext", "cbz")
fn += self._add_remainder()
self._log("After remainder", fn)
if self._ext:
ext = self.metadata.get("ext", _DEFAULT_EXT)
fn += f".{ext}"
self._log("After ext", fn)
return fn
def __init__(self, metadata: Mapping, ext: bool = True, verbose: int = 0):
"""Initialize."""
self.metadata: Mapping = metadata
self._ext: bool = ext
self._debug: bool = bool(verbose)
def dict2comicfn(md: Mapping, ext: bool = True, verbose: int = 0) -> str:
"""Simplify API."""
serializer = ComicFilenameSerializer(md, ext=ext, verbose=verbose)
return serializer.serialize()

11
debian.sources Normal file
View File

@ -0,0 +1,11 @@
Types: deb
URIs: http://deb.debian.org/debian
Suites: bookworm bookworm-updates
Components: main contrib non-free
Signed-By: /usr/share/keyrings/debian-archive-keyring.gpg
Types: deb
URIs: http://deb.debian.org/debian-security
Suites: bookworm-security
Components: main contrib non-free
Signed-By: /usr/share/keyrings/debian-archive-keyring.gpg

21
docker-compose.yaml Normal file
View File

@ -0,0 +1,21 @@
services:
comicfn2dict-builder:
build: .
image: comicfn2dict-builder
container_name: comicfn2dict-builder
comicfn2dict-lint:
image: comicfn2dict-builder
container_name: comicfn2dict-lint
command: make lint
comicfn2dict-test:
image: comicfn2dict-builder
container_name: comicfn2dict-test
command: make test
volumes:
- ./test-results/:/app/test-results/
comicfn2dict-build:
image: comicfn2dict-builder
container_name: comicfn2dict-build
volumes:
- ./dist/:/app/dist/
command: poetry build

176
package-lock.json generated
View File

@ -442,9 +442,9 @@
}
},
"node_modules/@eslint/js": {
"version": "8.56.0",
"resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.56.0.tgz",
"integrity": "sha512-gMsVel9D7f2HLkBma9VbtzZRehRogVRfbr++f06nL2vnCGCNlzOD+/MUov/F4p8myyAHspEhVobgjpX64q5m6A==",
"version": "8.57.0",
"resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.0.tgz",
"integrity": "sha512-Ys+3g2TaW7gADOJzPt83SJtCDhMjndcDMFVQ/Tj9iA1BfJzFKD9mAUXT3OenpuPHbI6P/myECxRJrofUsDx/5g==",
"dev": true,
"engines": {
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
@ -811,9 +811,9 @@
"dev": true
},
"node_modules/@types/estree-jsx": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/@types/estree-jsx/-/estree-jsx-1.0.4.tgz",
"integrity": "sha512-5idy3hvI9lAMqsyilBM+N+boaCf1MgoefbDxN6KEO5aK17TOHwFAYT9sjxzeKAiIWRUBgLxmZ9mPcnzZXtTcRQ==",
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/@types/estree-jsx/-/estree-jsx-1.0.5.tgz",
"integrity": "sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg==",
"dev": true,
"dependencies": {
"@types/estree": "*"
@ -856,9 +856,9 @@
"dev": true
},
"node_modules/@types/node": {
"version": "20.11.19",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.19.tgz",
"integrity": "sha512-7xMnVEcZFu0DikYjWOlRq7NTPETrm7teqUT2WkQjrTIkEgUyyGdWsj/Zg8bEJt5TNklzbPD1X3fqfsHw3SpapQ==",
"version": "20.11.20",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.20.tgz",
"integrity": "sha512-7/rR21OS+fq8IyHTgtLkDK949uzsa6n8BkziAKtPVpugIkO6D+/ooXMvzXxDnZrmtXVfjb1bKQafYpb8s89LOg==",
"dev": true,
"dependencies": {
"undici-types": "~5.26.4"
@ -1195,10 +1195,13 @@
}
},
"node_modules/available-typed-arrays": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.6.tgz",
"integrity": "sha512-j1QzY8iPNPG4o4xmO3ptzpRxTciqD3MgEHtifP/YnJpIo58Xu+ne4BejlbkuaLfXn/nz6HFiw29bLpj2PNMdGg==",
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz",
"integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==",
"dev": true,
"dependencies": {
"possible-typed-array-names": "^1.0.0"
},
"engines": {
"node": ">= 0.4"
},
@ -1361,9 +1364,9 @@
}
},
"node_modules/caniuse-lite": {
"version": "1.0.30001587",
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001587.tgz",
"integrity": "sha512-HMFNotUmLXn71BQxg8cijvqxnIAofforZOwGsxyXJ0qugTdspUF4sPSJ2vhgprHCB996tIDzEq1ubumPDV8ULA==",
"version": "1.0.30001589",
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001589.tgz",
"integrity": "sha512-vNQWS6kI+q6sBlHbh71IIeC+sRwK2N3EDySc/updIGhIee2x5z00J4c1242/5/d6EpEMdOnk/m+6tuk4/tcsqg==",
"dev": true,
"funding": [
{
@ -1777,9 +1780,9 @@
"dev": true
},
"node_modules/electron-to-chromium": {
"version": "1.4.673",
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.673.tgz",
"integrity": "sha512-zjqzx4N7xGdl5468G+vcgzDhaHkaYgVcf9MqgexcTqsl2UHSCmOj/Bi3HAprg4BZCpC7HyD8a6nZl6QAZf72gw==",
"version": "1.4.681",
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.681.tgz",
"integrity": "sha512-1PpuqJUFWoXZ1E54m8bsLPVYwIVCRzvaL+n5cjigGga4z854abDnFRc+cTa2th4S79kyGqya/1xoR7h+Y5G5lg==",
"dev": true
},
"node_modules/emoji-regex": {
@ -1880,14 +1883,14 @@
}
},
"node_modules/es-set-tostringtag": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.2.tgz",
"integrity": "sha512-BuDyupZt65P9D2D2vA/zqcI3G5xRsklm5N3xCwuiy+/vKy8i0ifdsQP1sLgO4tZDSCaQUSnmC48khknGMV3D2Q==",
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.3.tgz",
"integrity": "sha512-3T8uNMC3OQTHkFUsFq8r/BwAXLHvU/9O9mE0fBc/MY5iq/8H7ncvO947LmYA6ldWw9Uh8Yhf25zu6n7nML5QWQ==",
"dev": true,
"dependencies": {
"get-intrinsic": "^1.2.2",
"has-tostringtag": "^1.0.0",
"hasown": "^2.0.0"
"get-intrinsic": "^1.2.4",
"has-tostringtag": "^1.0.2",
"hasown": "^2.0.1"
},
"engines": {
"node": ">= 0.4"
@ -1941,16 +1944,16 @@
}
},
"node_modules/eslint": {
"version": "8.56.0",
"resolved": "https://registry.npmjs.org/eslint/-/eslint-8.56.0.tgz",
"integrity": "sha512-Go19xM6T9puCOWntie1/P997aXxFsOi37JIHRWI514Hc6ZnaHGKY9xFhrU65RT6CcBEzZoGG1e6Nq+DT04ZtZQ==",
"version": "8.57.0",
"resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.0.tgz",
"integrity": "sha512-dZ6+mexnaTIbSBZWgou51U6OmzIhYM2VcNdtiTtI7qPNZm35Akpr0f6vtw3w1Kmn5PYo+tZVfh13WrhpS6oLqQ==",
"dev": true,
"dependencies": {
"@eslint-community/eslint-utils": "^4.2.0",
"@eslint-community/regexpp": "^4.6.1",
"@eslint/eslintrc": "^2.1.4",
"@eslint/js": "8.56.0",
"@humanwhocodes/config-array": "^0.11.13",
"@eslint/js": "8.57.0",
"@humanwhocodes/config-array": "^0.11.14",
"@humanwhocodes/module-importer": "^1.0.1",
"@nodelib/fs.walk": "^1.2.8",
"@ungap/structured-clone": "^1.2.0",
@ -2736,9 +2739,9 @@
}
},
"node_modules/flatted": {
"version": "3.2.9",
"resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.9.tgz",
"integrity": "sha512-36yxDn5H7OFZQla0/jFJmbIKTdZAQHngCedGxiMmpNfEZM0sdEeT+WczLQrjK6D7o2aiyLYDnkw0R3JK0Qv1RQ==",
"version": "3.3.1",
"resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.1.tgz",
"integrity": "sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw==",
"dev": true
},
"node_modules/for-each": {
@ -3042,9 +3045,9 @@
}
},
"node_modules/has-proto": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz",
"integrity": "sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==",
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz",
"integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==",
"dev": true,
"engines": {
"node": ">= 0.4"
@ -3429,9 +3432,9 @@
}
},
"node_modules/is-negative-zero": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz",
"integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==",
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.3.tgz",
"integrity": "sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==",
"dev": true,
"engines": {
"node": ">= 0.4"
@ -3522,12 +3525,15 @@
}
},
"node_modules/is-shared-array-buffer": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz",
"integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==",
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.3.tgz",
"integrity": "sha512-nA2hv5XIhLR3uVzDDfCIknerhx8XUKnstuOERPNNIinXG7v9u+ohXF67vxm4TPTEPU6lm61ZkwP3c9PCB97rhg==",
"dev": true,
"dependencies": {
"call-bind": "^1.0.2"
"call-bind": "^1.0.7"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
@ -6230,6 +6236,15 @@
"node": ">=4"
}
},
"node_modules/possible-typed-array-names": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.0.0.tgz",
"integrity": "sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q==",
"dev": true,
"engines": {
"node": ">= 0.4"
}
},
"node_modules/prelude-ls": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz",
@ -6273,12 +6288,12 @@
"dev": true
},
"node_modules/prettier-plugin-packagejson": {
"version": "2.4.11",
"resolved": "https://registry.npmjs.org/prettier-plugin-packagejson/-/prettier-plugin-packagejson-2.4.11.tgz",
"integrity": "sha512-zmOmM96GkAjT2zUdHSQJnpyVpbisBkewDluo2NLHjI/JN7uOCZlEzWVaMhdqyZ8LVdQDfzamvbvSw4swd3Az1A==",
"version": "2.4.12",
"resolved": "https://registry.npmjs.org/prettier-plugin-packagejson/-/prettier-plugin-packagejson-2.4.12.tgz",
"integrity": "sha512-hifuuOgw5rHHTdouw9VrhT8+Nd7UwxtL1qco8dUfd4XUFQL6ia3xyjSxhPQTsGnSYFraTWy5Omb+MZm/OWDTpQ==",
"dev": true,
"dependencies": {
"sort-package-json": "2.7.0",
"sort-package-json": "2.8.0",
"synckit": "0.9.0"
},
"peerDependencies": {
@ -11022,14 +11037,15 @@
}
},
"node_modules/set-function-name": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.1.tgz",
"integrity": "sha512-tMNCiqYVkXIZgc2Hnoy2IvC/f8ezc5koaRFkCjrpWzGpCd3qbZXPzVy9MAZzK1ch/X0jvSkojys3oqJN0qCmdA==",
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz",
"integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==",
"dev": true,
"dependencies": {
"define-data-property": "^1.0.1",
"define-data-property": "^1.1.4",
"es-errors": "^1.3.0",
"functions-have-names": "^1.2.3",
"has-property-descriptors": "^1.0.0"
"has-property-descriptors": "^1.0.2"
},
"engines": {
"node": ">= 0.4"
@ -11118,9 +11134,9 @@
"dev": true
},
"node_modules/sort-package-json": {
"version": "2.7.0",
"resolved": "https://registry.npmjs.org/sort-package-json/-/sort-package-json-2.7.0.tgz",
"integrity": "sha512-6AayF8bp6L+WROgpbhTMUtB9JSFmpGHjmW7DyaNPS1HwlTw2oSVlUUtlkHSEZmg5o89F3zvLBZNvMeZ1T4fjQg==",
"version": "2.8.0",
"resolved": "https://registry.npmjs.org/sort-package-json/-/sort-package-json-2.8.0.tgz",
"integrity": "sha512-PxeNg93bTJWmDGnu0HADDucoxfFiKkIr73Kv85EBThlI1YQPdc0XovBgg2llD0iABZbu2SlKo8ntGmOP9wOj/g==",
"dev": true,
"dependencies": {
"detect-indent": "^7.0.1",
@ -11522,12 +11538,12 @@
}
},
"node_modules/typed-array-buffer": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.1.tgz",
"integrity": "sha512-RSqu1UEuSlrBhHTWC8O9FnPjOduNs4M7rJ4pRKoEjtx1zUNOPN2sSXHLDX+Y2WPbHIxbvg4JFo2DNAEfPIKWoQ==",
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.2.tgz",
"integrity": "sha512-gEymJYKZtKXzzBzM4jqa9w6Q1Jjm7x2d+sh19AdsD4wqnMPDYyvwpsIc2Q/835kHuo3BEQ7CjelGhfTsoBb2MQ==",
"dev": true,
"dependencies": {
"call-bind": "^1.0.6",
"call-bind": "^1.0.7",
"es-errors": "^1.3.0",
"is-typed-array": "^1.1.13"
},
@ -11536,15 +11552,16 @@
}
},
"node_modules/typed-array-byte-length": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.0.tgz",
"integrity": "sha512-Or/+kvLxNpeQ9DtSydonMxCx+9ZXOswtwJn17SNLvhptaXYDJvkFFP5zbfU/uLmvnBJlI4yrnXRxpdWH/M5tNA==",
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.1.tgz",
"integrity": "sha512-3iMJ9q0ao7WE9tWcaYKIptkNBuOIcZCCT0d4MRvuuH88fEoEH62IuQe0OtraD3ebQEoTRk8XCBoknUNc1Y67pw==",
"dev": true,
"dependencies": {
"call-bind": "^1.0.2",
"call-bind": "^1.0.7",
"for-each": "^0.3.3",
"has-proto": "^1.0.1",
"is-typed-array": "^1.1.10"
"gopd": "^1.0.1",
"has-proto": "^1.0.3",
"is-typed-array": "^1.1.13"
},
"engines": {
"node": ">= 0.4"
@ -11554,16 +11571,17 @@
}
},
"node_modules/typed-array-byte-offset": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.0.tgz",
"integrity": "sha512-RD97prjEt9EL8YgAgpOkf3O4IF9lhJFr9g0htQkm0rchFp/Vx7LW5Q8fSXXub7BXAODyUQohRMyOc3faCPd0hg==",
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.2.tgz",
"integrity": "sha512-Ous0vodHa56FviZucS2E63zkgtgrACj7omjwd/8lTEMEPFFyjfixMZ1ZXenpgCFBBt4EC1J2XsyVS2gkG0eTFA==",
"dev": true,
"dependencies": {
"available-typed-arrays": "^1.0.5",
"call-bind": "^1.0.2",
"available-typed-arrays": "^1.0.7",
"call-bind": "^1.0.7",
"for-each": "^0.3.3",
"has-proto": "^1.0.1",
"is-typed-array": "^1.1.10"
"gopd": "^1.0.1",
"has-proto": "^1.0.3",
"is-typed-array": "^1.1.13"
},
"engines": {
"node": ">= 0.4"
@ -11573,14 +11591,20 @@
}
},
"node_modules/typed-array-length": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.4.tgz",
"integrity": "sha512-KjZypGq+I/H7HI5HlOoGHkWUUGq+Q0TPhQurLbyrVrvnKTBgzLhIJ7j6J/XTQOi0d1RjyZ0wdas8bKs2p0x3Ng==",
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.5.tgz",
"integrity": "sha512-yMi0PlwuznKHxKmcpoOdeLwxBoVPkqZxd7q2FgMkmD3bNwvF5VW0+UlUQ1k1vmktTu4Yu13Q0RIxEP8+B+wloA==",
"dev": true,
"dependencies": {
"call-bind": "^1.0.2",
"call-bind": "^1.0.7",
"for-each": "^0.3.3",
"is-typed-array": "^1.1.9"
"gopd": "^1.0.1",
"has-proto": "^1.0.3",
"is-typed-array": "^1.1.13",
"possible-typed-array-names": "^1.0.0"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"

165
poetry.lock generated
View File

@ -61,63 +61,63 @@ files = [
[[package]]
name = "coverage"
version = "7.4.1"
version = "7.4.3"
description = "Code coverage measurement for Python"
optional = false
python-versions = ">=3.8"
files = [
{ file = "coverage-7.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:077d366e724f24fc02dbfe9d946534357fda71af9764ff99d73c3c596001bbd7" },
{ file = "coverage-7.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0193657651f5399d433c92f8ae264aff31fc1d066deee4b831549526433f3f61" },
{ file = "coverage-7.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d17bbc946f52ca67adf72a5ee783cd7cd3477f8f8796f59b4974a9b59cacc9ee" },
{ file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3277f5fa7483c927fe3a7b017b39351610265308f5267ac6d4c2b64cc1d8d25" },
{ file = "coverage-7.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dceb61d40cbfcf45f51e59933c784a50846dc03211054bd76b421a713dcdf19" },
{ file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6008adeca04a445ea6ef31b2cbaf1d01d02986047606f7da266629afee982630" },
{ file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c61f66d93d712f6e03369b6a7769233bfda880b12f417eefdd4f16d1deb2fc4c" },
{ file = "coverage-7.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9bb62fac84d5f2ff523304e59e5c439955fb3b7f44e3d7b2085184db74d733b" },
{ file = "coverage-7.4.1-cp310-cp310-win32.whl", hash = "sha256:f86f368e1c7ce897bf2457b9eb61169a44e2ef797099fb5728482b8d69f3f016" },
{ file = "coverage-7.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:869b5046d41abfea3e381dd143407b0d29b8282a904a19cb908fa24d090cc018" },
{ file = "coverage-7.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b8ffb498a83d7e0305968289441914154fb0ef5d8b3157df02a90c6695978295" },
{ file = "coverage-7.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3cacfaefe6089d477264001f90f55b7881ba615953414999c46cc9713ff93c8c" },
{ file = "coverage-7.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d6850e6e36e332d5511a48a251790ddc545e16e8beaf046c03985c69ccb2676" },
{ file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e961aa13b6d47f758cc5879383d27b5b3f3dcd9ce8cdbfdc2571fe86feb4dd" },
{ file = "coverage-7.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfd1e1b9f0898817babf840b77ce9fe655ecbe8b1b327983df485b30df8cc011" },
{ file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6b00e21f86598b6330f0019b40fb397e705135040dbedc2ca9a93c7441178e74" },
{ file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:536d609c6963c50055bab766d9951b6c394759190d03311f3e9fcf194ca909e1" },
{ file = "coverage-7.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7ac8f8eb153724f84885a1374999b7e45734bf93a87d8df1e7ce2146860edef6" },
{ file = "coverage-7.4.1-cp311-cp311-win32.whl", hash = "sha256:f3771b23bb3675a06f5d885c3630b1d01ea6cac9e84a01aaf5508706dba546c5" },
{ file = "coverage-7.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:9d2f9d4cc2a53b38cabc2d6d80f7f9b7e3da26b2f53d48f05876fef7956b6968" },
{ file = "coverage-7.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f68ef3660677e6624c8cace943e4765545f8191313a07288a53d3da188bd8581" },
{ file = "coverage-7.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23b27b8a698e749b61809fb637eb98ebf0e505710ec46a8aa6f1be7dc0dc43a6" },
{ file = "coverage-7.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e3424c554391dc9ef4a92ad28665756566a28fecf47308f91841f6c49288e66" },
{ file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0860a348bf7004c812c8368d1fc7f77fe8e4c095d661a579196a9533778e156" },
{ file = "coverage-7.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe558371c1bdf3b8fa03e097c523fb9645b8730399c14fe7721ee9c9e2a545d3" },
{ file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3468cc8720402af37b6c6e7e2a9cdb9f6c16c728638a2ebc768ba1ef6f26c3a1" },
{ file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:02f2edb575d62172aa28fe00efe821ae31f25dc3d589055b3fb64d51e52e4ab1" },
{ file = "coverage-7.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ca6e61dc52f601d1d224526360cdeab0d0712ec104a2ce6cc5ccef6ed9a233bc" },
{ file = "coverage-7.4.1-cp312-cp312-win32.whl", hash = "sha256:ca7b26a5e456a843b9b6683eada193fc1f65c761b3a473941efe5a291f604c74" },
{ file = "coverage-7.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:85ccc5fa54c2ed64bd91ed3b4a627b9cce04646a659512a051fa82a92c04a448" },
{ file = "coverage-7.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bdb0285a0202888d19ec6b6d23d5990410decb932b709f2b0dfe216d031d218" },
{ file = "coverage-7.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:918440dea04521f499721c039863ef95433314b1db00ff826a02580c1f503e45" },
{ file = "coverage-7.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:379d4c7abad5afbe9d88cc31ea8ca262296480a86af945b08214eb1a556a3e4d" },
{ file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094116f0b6155e36a304ff912f89bbb5067157aff5f94060ff20bbabdc8da06" },
{ file = "coverage-7.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f5968608b1fe2a1d00d01ad1017ee27efd99b3437e08b83ded9b7af3f6f766" },
{ file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:10e88e7f41e6197ea0429ae18f21ff521d4f4490aa33048f6c6f94c6045a6a75" },
{ file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a4a3907011d39dbc3e37bdc5df0a8c93853c369039b59efa33a7b6669de04c60" },
{ file = "coverage-7.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6d224f0c4c9c98290a6990259073f496fcec1b5cc613eecbd22786d398ded3ad" },
{ file = "coverage-7.4.1-cp38-cp38-win32.whl", hash = "sha256:23f5881362dcb0e1a92b84b3c2809bdc90db892332daab81ad8f642d8ed55042" },
{ file = "coverage-7.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:a07f61fc452c43cd5328b392e52555f7d1952400a1ad09086c4a8addccbd138d" },
{ file = "coverage-7.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8e738a492b6221f8dcf281b67129510835461132b03024830ac0e554311a5c54" },
{ file = "coverage-7.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46342fed0fff72efcda77040b14728049200cbba1279e0bf1188f1f2078c1d70" },
{ file = "coverage-7.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9641e21670c68c7e57d2053ddf6c443e4f0a6e18e547e86af3fad0795414a628" },
{ file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb2c2688ed93b027eb0d26aa188ada34acb22dceea256d76390eea135083950" },
{ file = "coverage-7.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d12c923757de24e4e2110cf8832d83a886a4cf215c6e61ed506006872b43a6d1" },
{ file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0491275c3b9971cdbd28a4595c2cb5838f08036bca31765bad5e17edf900b2c7" },
{ file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8dfc5e195bbef80aabd81596ef52a1277ee7143fe419efc3c4d8ba2754671756" },
{ file = "coverage-7.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1a78b656a4d12b0490ca72651fe4d9f5e07e3c6461063a9b6265ee45eb2bdd35" },
{ file = "coverage-7.4.1-cp39-cp39-win32.whl", hash = "sha256:f90515974b39f4dea2f27c0959688621b46d96d5a626cf9c53dbc653a895c05c" },
{ file = "coverage-7.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:64e723ca82a84053dd7bfcc986bdb34af8d9da83c521c19d6b472bc6880e191a" },
{ file = "coverage-7.4.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:32a8d985462e37cfdab611a6f95b09d7c091d07668fdc26e47a725ee575fe166" },
{ file = "coverage-7.4.1.tar.gz", hash = "sha256:1ed4b95480952b1a26d863e546fa5094564aa0065e1e5f0d4d0041f293251d04" },
{ file = "coverage-7.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8580b827d4746d47294c0e0b92854c85a92c2227927433998f0d3320ae8a71b6" },
{ file = "coverage-7.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:718187eeb9849fc6cc23e0d9b092bc2348821c5e1a901c9f8975df0bc785bfd4" },
{ file = "coverage-7.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:767b35c3a246bcb55b8044fd3a43b8cd553dd1f9f2c1eeb87a302b1f8daa0524" },
{ file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae7f19afe0cce50039e2c782bff379c7e347cba335429678450b8fe81c4ef96d" },
{ file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba3a8aaed13770e970b3df46980cb068d1c24af1a1968b7818b69af8c4347efb" },
{ file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ee866acc0861caebb4f2ab79f0b94dbfbdbfadc19f82e6e9c93930f74e11d7a0" },
{ file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:506edb1dd49e13a2d4cac6a5173317b82a23c9d6e8df63efb4f0380de0fbccbc" },
{ file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd6545d97c98a192c5ac995d21c894b581f1fd14cf389be90724d21808b657e2" },
{ file = "coverage-7.4.3-cp310-cp310-win32.whl", hash = "sha256:f6a09b360d67e589236a44f0c39218a8efba2593b6abdccc300a8862cffc2f94" },
{ file = "coverage-7.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:18d90523ce7553dd0b7e23cbb28865db23cddfd683a38fb224115f7826de78d0" },
{ file = "coverage-7.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbbe5e739d45a52f3200a771c6d2c7acf89eb2524890a4a3aa1a7fa0695d2a47" },
{ file = "coverage-7.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:489763b2d037b164846ebac0cbd368b8a4ca56385c4090807ff9fad817de4113" },
{ file = "coverage-7.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:451f433ad901b3bb00184d83fd83d135fb682d780b38af7944c9faeecb1e0bfe" },
{ file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fcc66e222cf4c719fe7722a403888b1f5e1682d1679bd780e2b26c18bb648cdc" },
{ file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ec74cfef2d985e145baae90d9b1b32f85e1741b04cd967aaf9cfa84c1334f3" },
{ file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:abbbd8093c5229c72d4c2926afaee0e6e3140de69d5dcd918b2921f2f0c8baba" },
{ file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:35eb581efdacf7b7422af677b92170da4ef34500467381e805944a3201df2079" },
{ file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8249b1c7334be8f8c3abcaaa996e1e4927b0e5a23b65f5bf6cfe3180d8ca7840" },
{ file = "coverage-7.4.3-cp311-cp311-win32.whl", hash = "sha256:cf30900aa1ba595312ae41978b95e256e419d8a823af79ce670835409fc02ad3" },
{ file = "coverage-7.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:18c7320695c949de11a351742ee001849912fd57e62a706d83dfc1581897fa2e" },
{ file = "coverage-7.4.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b51bfc348925e92a9bd9b2e48dad13431b57011fd1038f08316e6bf1df107d10" },
{ file = "coverage-7.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d6cdecaedea1ea9e033d8adf6a0ab11107b49571bbb9737175444cea6eb72328" },
{ file = "coverage-7.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b2eccb883368f9e972e216c7b4c7c06cabda925b5f06dde0650281cb7666a30" },
{ file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c00cdc8fa4e50e1cc1f941a7f2e3e0f26cb2a1233c9696f26963ff58445bac7" },
{ file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a4a8dd3dcf4cbd3165737358e4d7dfbd9d59902ad11e3b15eebb6393b0446e" },
{ file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:062b0a75d9261e2f9c6d071753f7eef0fc9caf3a2c82d36d76667ba7b6470003" },
{ file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ebe7c9e67a2d15fa97b77ea6571ce5e1e1f6b0db71d1d5e96f8d2bf134303c1d" },
{ file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c0a120238dd71c68484f02562f6d446d736adcc6ca0993712289b102705a9a3a" },
{ file = "coverage-7.4.3-cp312-cp312-win32.whl", hash = "sha256:37389611ba54fd6d278fde86eb2c013c8e50232e38f5c68235d09d0a3f8aa352" },
{ file = "coverage-7.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:d25b937a5d9ffa857d41be042b4238dd61db888533b53bc76dc082cb5a15e914" },
{ file = "coverage-7.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:28ca2098939eabab044ad68850aac8f8db6bf0b29bc7f2887d05889b17346454" },
{ file = "coverage-7.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:280459f0a03cecbe8800786cdc23067a8fc64c0bd51dc614008d9c36e1659d7e" },
{ file = "coverage-7.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c0cdedd3500e0511eac1517bf560149764b7d8e65cb800d8bf1c63ebf39edd2" },
{ file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a9babb9466fe1da12417a4aed923e90124a534736de6201794a3aea9d98484e" },
{ file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dec9de46a33cf2dd87a5254af095a409ea3bf952d85ad339751e7de6d962cde6" },
{ file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:16bae383a9cc5abab9bb05c10a3e5a52e0a788325dc9ba8499e821885928968c" },
{ file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2c854ce44e1ee31bda4e318af1dbcfc929026d12c5ed030095ad98197eeeaed0" },
{ file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ce8c50520f57ec57aa21a63ea4f325c7b657386b3f02ccaedeccf9ebe27686e1" },
{ file = "coverage-7.4.3-cp38-cp38-win32.whl", hash = "sha256:708a3369dcf055c00ddeeaa2b20f0dd1ce664eeabde6623e516c5228b753654f" },
{ file = "coverage-7.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:1bf25fbca0c8d121a3e92a2a0555c7e5bc981aee5c3fdaf4bb7809f410f696b9" },
{ file = "coverage-7.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b253094dbe1b431d3a4ac2f053b6d7ede2664ac559705a704f621742e034f1f" },
{ file = "coverage-7.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77fbfc5720cceac9c200054b9fab50cb2a7d79660609200ab83f5db96162d20c" },
{ file = "coverage-7.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6679060424faa9c11808598504c3ab472de4531c571ab2befa32f4971835788e" },
{ file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4af154d617c875b52651dd8dd17a31270c495082f3d55f6128e7629658d63765" },
{ file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8640f1fde5e1b8e3439fe482cdc2b0bb6c329f4bb161927c28d2e8879c6029ee" },
{ file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:69b9f6f66c0af29642e73a520b6fed25ff9fd69a25975ebe6acb297234eda501" },
{ file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0842571634f39016a6c03e9d4aba502be652a6e4455fadb73cd3a3a49173e38f" },
{ file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a78ed23b08e8ab524551f52953a8a05d61c3a760781762aac49f8de6eede8c45" },
{ file = "coverage-7.4.3-cp39-cp39-win32.whl", hash = "sha256:c0524de3ff096e15fcbfe8f056fdb4ea0bf497d584454f344d59fce069d3e6e9" },
{ file = "coverage-7.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:0209a6369ccce576b43bb227dc8322d8ef9e323d089c6f3f26a597b09cb4d2aa" },
{ file = "coverage-7.4.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:7cbde573904625509a3f37b6fecea974e363460b556a627c60dc2f47e2fffa51" },
{ file = "coverage-7.4.3.tar.gz", hash = "sha256:276f6077a5c61447a48d133ed13e759c09e62aff0dc84274a68dc18660104d52" },
]
[package.dependencies]
@ -328,13 +328,13 @@ six = ">=1.13.0"
[[package]]
name = "json5"
version = "0.9.14"
version = "0.9.17"
description = "A Python implementation of the JSON5 data format."
optional = false
python-versions = "*"
python-versions = ">=3.8"
files = [
{ file = "json5-0.9.14-py2.py3-none-any.whl", hash = "sha256:740c7f1b9e584a468dbb2939d8d458db3427f2c93ae2139d05f47e453eae964f" },
{ file = "json5-0.9.14.tar.gz", hash = "sha256:9ed66c3a6ca3510a976a9ef9b8c0787de24802724ab1860bc0153c7fdd589b02" },
{ file = "json5-0.9.17-py2.py3-none-any.whl", hash = "sha256:f8ec1ecf985951d70f780f6f877c4baca6a47b6e61e02c4cd190138d10a7805a" },
{ file = "json5-0.9.17.tar.gz", hash = "sha256:717d99d657fa71b7094877b1d921b1cce40ab444389f6d770302563bb7dfd9ae" },
]
[package.extras]
@ -521,13 +521,13 @@ test = ["pytest"]
[[package]]
name = "pyright"
version = "1.1.350"
version = "1.1.351"
description = "Command line wrapper for pyright"
optional = false
python-versions = ">=3.7"
files = [
{ file = "pyright-1.1.350-py3-none-any.whl", hash = "sha256:f1dde6bcefd3c90aedbe9dd1c573e4c1ddbca8c74bf4fa664dd3b1a599ac9a66" },
{ file = "pyright-1.1.350.tar.gz", hash = "sha256:a8ba676de3a3737ea4d8590604da548d4498cc5ee9ee00b1a403c6db987916c6" },
{ file = "pyright-1.1.351-py3-none-any.whl", hash = "sha256:83b44b25396ae20661fc5f133c3fce30928ff1296d4f2e5ff0bca5fcf03eb89d" },
{ file = "pyright-1.1.351.tar.gz", hash = "sha256:01124099714eebd7f6525d8cbfa350626b56dfaf771cfcd55c03e69f0f1efbbd" },
]
[package.dependencies]
@ -791,39 +791,39 @@ files = [
[[package]]
name = "ruff"
version = "0.2.1"
version = "0.2.2"
description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false
python-versions = ">=3.7"
files = [
{ file = "ruff-0.2.1-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:dd81b911d28925e7e8b323e8d06951554655021df8dd4ac3045d7212ac4ba080" },
{ file = "ruff-0.2.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:dc586724a95b7d980aa17f671e173df00f0a2eef23f8babbeee663229a938fec" },
{ file = "ruff-0.2.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c92db7101ef5bfc18e96777ed7bc7c822d545fa5977e90a585accac43d22f18a" },
{ file = "ruff-0.2.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:13471684694d41ae0f1e8e3a7497e14cd57ccb7dd72ae08d56a159d6c9c3e30e" },
{ file = "ruff-0.2.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a11567e20ea39d1f51aebd778685582d4c56ccb082c1161ffc10f79bebe6df35" },
{ file = "ruff-0.2.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:00a818e2db63659570403e44383ab03c529c2b9678ba4ba6c105af7854008105" },
{ file = "ruff-0.2.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be60592f9d218b52f03384d1325efa9d3b41e4c4d55ea022cd548547cc42cd2b" },
{ file = "ruff-0.2.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbd2288890b88e8aab4499e55148805b58ec711053588cc2f0196a44f6e3d855" },
{ file = "ruff-0.2.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3ef052283da7dec1987bba8d8733051c2325654641dfe5877a4022108098683" },
{ file = "ruff-0.2.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:7022d66366d6fded4ba3889f73cd791c2d5621b2ccf34befc752cb0df70f5fad" },
{ file = "ruff-0.2.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:0a725823cb2a3f08ee743a534cb6935727d9e47409e4ad72c10a3faf042ad5ba" },
{ file = "ruff-0.2.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:0034d5b6323e6e8fe91b2a1e55b02d92d0b582d2953a2b37a67a2d7dedbb7acc" },
{ file = "ruff-0.2.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e5cb5526d69bb9143c2e4d2a115d08ffca3d8e0fddc84925a7b54931c96f5c02" },
{ file = "ruff-0.2.1-py3-none-win32.whl", hash = "sha256:6b95ac9ce49b4fb390634d46d6ece32ace3acdd52814671ccaf20b7f60adb232" },
{ file = "ruff-0.2.1-py3-none-win_amd64.whl", hash = "sha256:e3affdcbc2afb6f5bd0eb3130139ceedc5e3f28d206fe49f63073cb9e65988e0" },
{ file = "ruff-0.2.1-py3-none-win_arm64.whl", hash = "sha256:efababa8e12330aa94a53e90a81eb6e2d55f348bc2e71adbf17d9cad23c03ee6" },
{ file = "ruff-0.2.1.tar.gz", hash = "sha256:3b42b5d8677cd0c72b99fcaf068ffc62abb5a19e71b4a3b9cfa50658a0af02f1" },
{ file = "ruff-0.2.2-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0a9efb032855ffb3c21f6405751d5e147b0c6b631e3ca3f6b20f917572b97eb6" },
{ file = "ruff-0.2.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:d450b7fbff85913f866a5384d8912710936e2b96da74541c82c1b458472ddb39" },
{ file = "ruff-0.2.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecd46e3106850a5c26aee114e562c329f9a1fbe9e4821b008c4404f64ff9ce73" },
{ file = "ruff-0.2.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e22676a5b875bd72acd3d11d5fa9075d3a5f53b877fe7b4793e4673499318ba" },
{ file = "ruff-0.2.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1695700d1e25a99d28f7a1636d85bafcc5030bba9d0578c0781ba1790dbcf51c" },
{ file = "ruff-0.2.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:b0c232af3d0bd8f521806223723456ffebf8e323bd1e4e82b0befb20ba18388e" },
{ file = "ruff-0.2.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f63d96494eeec2fc70d909393bcd76c69f35334cdbd9e20d089fb3f0640216ca" },
{ file = "ruff-0.2.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a61ea0ff048e06de273b2e45bd72629f470f5da8f71daf09fe481278b175001" },
{ file = "ruff-0.2.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e1439c8f407e4f356470e54cdecdca1bd5439a0673792dbe34a2b0a551a2fe3" },
{ file = "ruff-0.2.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:940de32dc8853eba0f67f7198b3e79bc6ba95c2edbfdfac2144c8235114d6726" },
{ file = "ruff-0.2.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:0c126da55c38dd917621552ab430213bdb3273bb10ddb67bc4b761989210eb6e" },
{ file = "ruff-0.2.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:3b65494f7e4bed2e74110dac1f0d17dc8e1f42faaa784e7c58a98e335ec83d7e" },
{ file = "ruff-0.2.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1ec49be4fe6ddac0503833f3ed8930528e26d1e60ad35c2446da372d16651ce9" },
{ file = "ruff-0.2.2-py3-none-win32.whl", hash = "sha256:d920499b576f6c68295bc04e7b17b6544d9d05f196bb3aac4358792ef6f34325" },
{ file = "ruff-0.2.2-py3-none-win_amd64.whl", hash = "sha256:cc9a91ae137d687f43a44c900e5d95e9617cb37d4c989e462980ba27039d239d" },
{ file = "ruff-0.2.2-py3-none-win_arm64.whl", hash = "sha256:c9d15fc41e6054bfc7200478720570078f0b41c9ae4f010bcc16bd6f4d1aacdd" },
{ file = "ruff-0.2.2.tar.gz", hash = "sha256:e62ed7f36b3068a30ba39193a14274cd706bc486fad521276458022f7bccb31d" },
]
[[package]]
name = "setuptools"
version = "69.1.0"
version = "69.1.1"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
optional = false
python-versions = ">=3.8"
files = [
{ file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6" },
{ file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401" },
{ file = "setuptools-69.1.1-py3-none-any.whl", hash = "sha256:02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56" },
{ file = "setuptools-69.1.1.tar.gz", hash = "sha256:5c0806c7d9af348e6dd3777b4f4dbb42c7ad85b190104837488eab9a7c945cf8" },
]
[package.extras]
@ -850,6 +850,7 @@ testing = [
"jaraco.develop (>=7.21)",
"jaraco.envs (>=2.2)",
"jaraco.path (>=3.2.0)",
"packaging (>=23.2)",
"pip (>=19.1)",
"pytest (>=6)",
"pytest-checkdocs (>=2.4)",
@ -870,7 +871,7 @@ testing-integration = [
"filelock (>=3.4.0)",
"jaraco.envs (>=2.2)",
"jaraco.path (>=3.2.0)",
"packaging (>=23.1)",
"packaging (>=23.2)",
"pytest",
"pytest-enabler",
"pytest-xdist",

View File

@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
[tool.poetry]
name = "comicfn2dict"
version = "0.1.4"
version = "0.2.0a4"
description = "Parse common comic filenames and return a dict of metadata attributes. Includes a cli."
license = "GPL-3.0-only"
authors = ["AJ Slater <aj@slater.net>"]
@ -125,7 +125,7 @@ exclude = "*~,.git/*,.mypy_cache/*,.pytest_cache/*,.venv*,__pycache__/*,cache/*,
extend-exclude = ["typings"]
target-version = "py310"
[tool.lint.ruff]
[tool.ruff.lint]
extend-ignore = [
"S101",
"D203",

View File

@ -1,5 +1,7 @@
"""Test filenames with human parsed correct results."""
from types import MappingProxyType
TEST_COMIC_FIELDS = {
"series": "Long Series Name",
"issue": "001",
@ -22,6 +24,7 @@ TEST_COMIC_FIELDS_VOL = {
TEST_COMIC_VOL_ONLY = {
"series": "Long Series Name",
"volume": "1",
"issue": "1",
"title": "Title",
"original_format": "TPB",
"year": "2000",
@ -29,6 +32,7 @@ TEST_COMIC_VOL_ONLY = {
"ext": "cbr",
}
# Tests for 0.1.0
FNS = {
"Night of 1000 Wolves 001 (2013).cbz": {
"series": "Night of 1000 Wolves",
@ -51,11 +55,6 @@ FNS = {
"Long Series Name #001 (2000) Title (TPB) (Releaser).cbz": TEST_COMIC_FIELDS,
"Long Series Name (2000) 001 Title (TPB) (Releaser).cbz": TEST_COMIC_FIELDS,
"Long Series Name (2000) #001 Title (TPB) (Releaser).cbz": TEST_COMIC_FIELDS,
"Long Series Name v1 (2000) #001 "
"Title (TPB) (Releaser).cbz": TEST_COMIC_FIELDS_VOL,
"Long Series Name 001 (2000) (TPB-Releaser) Title.cbz": TEST_COMIC_FIELDS,
"Long Series Name Vol 1 "
"(2000) (TPB) (Releaser & Releaser-Releaser) Title.cbr": TEST_COMIC_VOL_ONLY,
"Ultimate Craziness (2019) (Digital) (Friends-of-Bill).cbr": {
"series": "Ultimate Craziness",
"year": "2019",
@ -73,26 +72,17 @@ FNS = {
"Arkenstone Vol. 01 - The Smell of Burnt Toast (2020) (digital) (My-brother).cbr": {
"series": "Arkenstone",
"volume": "01",
"issue": "01",
"year": "2020",
"ext": "cbr",
"scan_info": "My-brother",
"title": "The Smell of Burnt Toast",
"original_format": "digital",
},
"Bardude - The Last Thing I Remember.cbz": {
"series": "Bardude",
"title": "The Last Thing I Remember",
"ext": "cbz",
},
"Drunkguy - The Man Without Fear - 01.cbz": {
"series": "Drunkguy",
"title": "The Man Without Fear",
"issue": "01",
"ext": "cbz",
},
"The_Arkenstone_v03_(2002)_(Digital)_(DR_&amp;_Quenya-Elves).cbr": {
"series": "The Arkenstone",
"volume": "03",
"issue": "03",
"year": "2002",
"ext": "cbr",
"scan_info": "DR &amp; Quenya-Elves",
@ -111,6 +101,7 @@ FNS = {
"Kartalk Library Edition v01 (1992) (digital) (Son of Ultron-Empire).cbr": {
"series": "Kartalk Library Edition",
"volume": "01",
"issue": "01",
"year": "1992",
"ext": "cbr",
"original_format": "digital",
@ -119,15 +110,15 @@ FNS = {
"Kind of Deadly v02 - Last Bullet (2006) (Digital) (Zone-Empire).cbr": {
"series": "Kind of Deadly",
"volume": "02",
"issue": "02",
"year": "2006",
"ext": "cbr",
"original_format": "Digital",
"scan_info": "Zone-Empire",
"title": "Last Bullet",
},
"Jeremy John - A Big Long Title (2017) (digital-Minutement).cbz": {
"series": "Jeremy John",
"title": "A Big Long Title",
"Jeremy John - Not A Title (2017) (digital-Minutement).cbz": {
"series": "Jeremy John - Not A Title",
"year": "2017",
"ext": "cbz",
"original_format": "digital",
@ -139,8 +130,7 @@ FNS = {
"year": "2006",
"ext": "cbz",
"scan_info": "Minutemen-Faessla",
# "original_format": "digital",
"remainders": ("(digital",),
"original_format": "digital",
},
"Jeremy John 003 (2007) (4 covers) (digital) (Minutemen-Faessla).cbz": {
"series": "Jeremy John",
@ -154,6 +144,7 @@ FNS = {
"Jeremy John v01 - Uninterested! (2007) (Digital) (Asgard-Empire).cbr": {
"series": "Jeremy John",
"volume": "01",
"issue": "01",
"year": "2007",
"ext": "cbr",
"original_format": "Digital",
@ -180,6 +171,7 @@ FNS = {
"Darkwad by Carlos Zemo v01 - Knuckle Fight (2009) (Digital) (Zone-Empire).cbr": {
"series": "Darkwad by Carlos Zemo",
"volume": "01",
"issue": "01",
"year": "2009",
"ext": "cbr",
"title": "Knuckle Fight",
@ -243,3 +235,273 @@ FNS = {
"ext": "cbz",
},
}
# Tests for 0.2.0
FNS.update(
{
# Philosopy change regarding dashes.
"Bardude - The Last Thing I Remember.cbz": {
"series": "Bardude - The Last Thing I Remember",
"ext": "cbz",
},
"Drunkguy - The Man Without Fear - 01.cbz": {
"series": "Drunkguy - The Man Without Fear",
"issue": "01",
"ext": "cbz",
},
# BIG Change. title after token. more stripping.
"'Batman - Superman - World's Finest 022 (2024) (Webrip) (The Last Kryptonian-DCP).cbz": {
"ext": "cbz",
"issue": "022",
"original_format": "Webrip",
"series": "Batman - Superman - World's Finest",
"scan_info": "The Last Kryptonian-DCP",
"year": "2024",
},
# Issue number starting with a letter requested in https://github.com/comictagger/comictagger/issues/543
# word characters now allowed to lead issue numbers only if preceded by a # marker
"batman #B01 title.cbz": {
"ext": "cbz",
"issue": "B01",
"series": "batman",
"title": "title",
},
"Monster_Island_v1_#2__repaired__c2c.cbz": {
"ext": "cbz",
"issue": "2",
"series": "Monster Island",
"volume": "1",
"scan_info": "c2c",
"remainders": ("(repaired)",),
},
# Extra - in the series
" X-Men-V1-#067.cbr": {
"ext": "cbr",
"issue": "067",
"series": "X-Men",
"volume": "1",
"remainders": ("-",),
},
"Aquaman - Green Arrow - Deep Target #01 (of 07) (2021).cbr": {
"ext": "cbr",
"issue": "01",
"series": "Aquaman - Green Arrow - Deep Target",
"year": "2021",
"issue_count": "07",
},
# CT only separates this into a title if the '-' is attached to the previous word eg 'aquaman- Green Arrow'. @bpepple opened a ticket for this https://github.com/ajslater/comicfn2dict/issues/1 already
"Batman_-_Superman_#020_(2021).cbr": {
"ext": "cbr",
"issue": "020",
"series": "Batman - Superman",
"year": "2021",
},
# Publishers like to re-print some of their annuals using this format for the year
"Batman '89 (2021) .cbr": {
"ext": "cbr",
"series": "Batman '89",
"year": "2021",
},
# This made the parser in CT much more complicated. It's understandable that this isn't parsed on the first few iterations of this project
"Star Wars - War of the Bounty Hunters - IG-88 (2021).cbz": {
"ext": "cbz",
"series": "Star Wars - War of the Bounty Hunters - IG-88",
"year": "2021",
}, # The addition of the '#1' turns this into the same as 'Aquaman - Green Arrow - Deep Target' above
"Star Wars - War of the Bounty Hunters - IG-88 #1 (2021).cbz": {
"ext": "cbz",
"issue": "1",
"series": "Star Wars - War of the Bounty Hunters - IG-88",
"year": "2021",
},
"Free Comic Book Day - Avengers.Hulk (2021).cbz": {
"ext": "cbz",
"series": "Free Comic Book Day - Avengers Hulk",
"year": "2021",
},
# CT assumes the volume is also the issue number if it can't find an issue number
"Avengers By Brian Michael Bendis volume 03 (2013).cbz": {
"ext": "cbz",
"issue": "03",
"series": "Avengers By Brian Michael Bendis",
"volume": "03",
"year": "2013",
},
# CT catches the year
"Marvel Previews #002 (January 2022).cbr": {
"ext": "cbr",
"issue": "002",
"series": "Marvel Previews",
"publisher": "Marvel",
"month": "01",
"year": "2022",
},
"Test Numeric Year #2 2001-02-24.cbz": {
"ext": "cbz",
"issue": "2",
"series": "Test Numeric Year",
"year": "2001",
"month": "02",
"day": "24",
},
"Test Month First Date 02-24-2001.cbz": {
"ext": "cbz",
"series": "Test Month First Date",
"year": "2001",
"month": "02",
"day": "24",
},
# CT notices that this is a full date, CT doesn't actually return the month or day though just removes it
"X-Men, 2021-08-04 (#02).cbz": {
"ext": "cbz",
"issue": "02",
"series": "X-Men",
"year": "2021",
"month": "08",
"day": "04",
},
# 4 digit issue number
# should this be an issue number if year DONE?.
"action comics 1024.cbz": {
"ext": "cbz",
"issue": "1024",
"series": "action comics",
},
# This is a contrived test case. I've never seen this I just wanted to handle it with my parser
"Cory Doctorow's Futuristic Tales of the Here and Now #0.0.1 (2007).cbz": {
"ext": "cbz",
"issue": "0.0.1",
"series": "Cory Doctorow's Futuristic Tales of the Here and Now",
"year": "2007",
},
# CT treats ':' the same as '-' but here the ':' is attached to 'Now' which CT sees as a title separation
"Cory Doctorow's Futuristic Tales of the Here and Now: Anda's Game #001 (2007).cbz": {
"ext": "cbz",
"issue": "001",
"series": "Cory Doctorow's Futuristic Tales of the Here and Now",
"title": "Anda's Game",
"year": "2007",
},
# If a title ends in a year, it's not an issue (and is a year if no year)
"Blade Runner Free Comic Book Day 2021 (2021).cbr": {
"ext": "cbr",
"series": "Blade Runner Free Comic Book Day 2021",
"year": "2021",
},
# If a year occurs after another year, and no volume, do volume / year
"Super Strange Yarns (1957) #92 (1969).cbz": {
"ext": "cbz",
"issue": "92",
"series": "Super Strange Yarns",
"volume": "1957",
"year": "1969",
},
# CT checks for the following '(of 06)' after the '03' and marks it as the volume
"Elephantmen 2259 #008 - Simple Truth 03 (of 06) (2021).cbr": {
"ext": "cbr",
"issue": "008",
"series": "Elephantmen 2259",
"title": "Simple Truth",
"volume": "03",
"year": "2021",
"volume_count": "06",
},
# CT treats book like 'v' but also adds it as the title (matches ComicVine for this particular series)
"Bloodshot Book 03 (2020).cbr": {
"ext": "cbr",
"issue": "03",
"series": "Bloodshot",
"title": "Book 03",
"volume": "03",
"year": "2020",
},
# c2c aka "cover to cover" is fairly common and CT moves it to scan_info/remainder
"Marvel Two In One V1 #090 c2c.cbr": {
"ext": "cbr",
"issue": "090",
"series": "Marvel Two In One",
"publisher": "Marvel",
"volume": "1",
"scan_info": "c2c",
},
# CT treats '[]' as equivalent to '()', catches DC as a publisher and 'Sep-Oct 1951' as dates and removes them. CT doesn't catch the digital though so that could be better but I blame whoever made this atrocious filename
"Wonder Woman #49 DC Sep-Oct 1951 digital [downsized, lightened, 4 missing story pages restored] (Shadowcat-Empire).cbz": {
"ext": "cbz",
"issue": "49",
"series": "Wonder Woman",
"publisher": "DC",
"year": "1951",
"month": "09",
"remainders": (
"digital (downsized, lightened, 4 missing story pages "
"restored) (Shadowcat-Empire)",
),
},
"Captain Science #001 (1950) The Beginning - nothing.cbz": {
"ext": "cbz",
"issue": "001",
"title": "The Beginning - nothing",
"series": "Captain Science",
"year": "1950",
},
"Captain Science #001-cix-cbi.cbr": {
"ext": "cbr",
"issue": "001",
"series": "Captain Science",
"title": "cix-cbi",
},
"Long Series Name v1 (2000) #001 "
"Title (TPB) (Releaser).cbz": TEST_COMIC_FIELDS_VOL,
"Long Series Name 001 (2000) (TPB-Releaser) Title.cbz": {
"series": "Long Series Name",
"issue": "001",
"year": "2000",
"original_format": "TPB",
"scan_info": "Releaser",
"remainders": ("Title",),
"ext": "cbz",
},
"Long Series Name Vol 1 "
"(2000) (TPB) (Releaser & Releaser-Releaser) Title.cbr": {
"series": "Long Series Name",
"volume": "1",
"issue": "1",
"remainders": ("Title",),
"original_format": "TPB",
"year": "2000",
"scan_info": "Releaser & Releaser-Releaser",
"ext": "cbr",
},
}
)
# first_key, first_val = NEW.popitem()
# FNS[first_key] = first_val
PARSE_FNS = MappingProxyType(FNS)
SERIALIZE_FNS = MappingProxyType(
{
"Long Series Name #001 (2000) Title (TPB) (Releaser).cbz": TEST_COMIC_FIELDS,
"Long Series Name v1 #001 "
"(2000) Title (TPB) (Releaser & Releaser-Releaser).cbr": TEST_COMIC_VOL_ONLY,
"Series Name (2000-12-31).cbz": {
"series": "Series Name",
"year": "2000",
"month": "12",
"day": "31",
"ext": "cbz",
},
"Series Name (2000-12).cbz": {
"series": "Series Name",
"year": "2000",
"month": "12",
"ext": "cbz",
},
"Series Name (Dec-31).cbz": {
"series": "Series Name",
"month": "12",
"day": "31",
"ext": "cbz",
},
}
)

View File

@ -1,22 +1,18 @@
"""Tests for filename parsing."""
from pprint import pprint
from types import MappingProxyType
import pytest
from deepdiff.diff import DeepDiff
from comicfn2dict import comicfn2dict
from tests.comic_filenames import FNS
ALL_FIELDS = frozenset({"series", "volume", "issue", "issue_count", "year", "ext"})
FIELD_SCHEMA = MappingProxyType({key: None for key in ALL_FIELDS})
from comicfn2dict import ComicFilenameParser
from tests.comic_filenames import PARSE_FNS
@pytest.mark.parametrize("item", FNS.items())
@pytest.mark.parametrize("item", PARSE_FNS.items())
def test_parse_filename(item):
"""Test filename parsing."""
fn, defined_fields = item
md = comicfn2dict(fn)
md = ComicFilenameParser(fn, verbose=1).parse()
diff = DeepDiff(defined_fields, md, ignore_order=True)
print(fn)
pprint(defined_fields)

View File

@ -0,0 +1,13 @@
"""Tests for filename parsing."""
import pytest
from comicfn2dict import ComicFilenameSerializer
from tests.comic_filenames import SERIALIZE_FNS
@pytest.mark.parametrize("item", SERIALIZE_FNS.items())
def test_serialize_dict(item):
"""Test metadata serialization."""
test_fn, md = item
fn = ComicFilenameSerializer(md).serialize()
assert test_fn == fn