Compare commits

..

6 Commits
1.5.2 ... 1.5.4

4 changed files with 12 additions and 2 deletions

View File

@ -74,6 +74,7 @@ jobs:
- name: Install build dependencies
run: |
pip install --force-reinstall git+https://github.com/pyinstaller/pyinstaller
python -m pip install --upgrade --upgrade-strategy eager -r requirements_dev.txt
- name: Install Windows build dependencies

View File

@ -35,6 +35,7 @@ jobs:
- name: Install build dependencies
run: |
pip install --force-reinstall git+https://github.com/pyinstaller/pyinstaller
python -m pip install --upgrade --upgrade-strategy eager -r requirements_dev.txt
- name: Install Windows build dependencies
@ -78,6 +79,7 @@ jobs:
if: startsWith(github.ref, 'refs/tags/')
shell: bash
run: |
git fetch --depth=1 origin +refs/tags/*:refs/tags/* # github is dumb
echo "release_name=$(git tag -l --format "%(refname:strip=2): %(contents:lines=1)" ${{ github.ref_name }})" >> $GITHUB_ENV
- name: Release

View File

@ -450,6 +450,8 @@ class ComicCacher:
set_slots = ""
for key in data:
if data[key] is None:
continue
if keys != "":
keys += ", "

View File

@ -327,10 +327,11 @@ class ComicVineTalker:
def fetch_issues_by_volume(self, series_id: int) -> list[resulttypes.CVIssuesResults]:
# before we search online, look in our cache, since we might already have this info
volume_data = self.fetch_volume_data(series_id)
cvc = ComicCacher()
cached_volume_issues_result = cvc.get_volume_issues_info(series_id, self.source_name)
if cached_volume_issues_result:
if len(cached_volume_issues_result) >= volume_data["count_of_issues"]:
return cached_volume_issues_result
params = {
@ -407,6 +408,10 @@ class ComicVineTalker:
self.repair_urls(filtered_issues_result)
cvc = ComicCacher()
for c in filtered_issues_result:
cvc.add_volume_issues_info(self.source_name, c["volume"]["id"], [c])
return filtered_issues_result
def fetch_issue_data(self, series_id: int, issue_number: str, settings: ComicTaggerSettings) -> GenericMetadata:
@ -671,7 +676,7 @@ class ComicVineTalker:
def fetch_alternate_cover_urls(self, issue_id: int, issue_page_url: str) -> list[str]:
url_list = self.fetch_cached_alternate_cover_urls(issue_id)
if url_list is not None:
if url_list:
return url_list
# scrape the CV issue page URL to get the alternate cover URLs