Fix a cache miss when retrieving multiple issues

This commit is contained in:
Timmy Welch 2025-01-30 01:39:26 -08:00
parent 7998944a71
commit 5ee467465a
2 changed files with 36 additions and 23 deletions

View File

@ -21,7 +21,7 @@ import logging
import os
import pathlib
import sqlite3
from typing import Any
from typing import Any, Generic, TypeVar
from typing_extensions import NamedTuple
@ -39,6 +39,14 @@ class Issue(NamedTuple):
data: bytes
T = TypeVar("T", Issue, Series)
class CacheResult(NamedTuple, Generic[T]):
data: T
complete: bool
class ComicCacher:
def __init__(self, cache_folder: pathlib.Path, version: str) -> None:
self.cache_folder = cache_folder
@ -174,7 +182,7 @@ class ComicCacher:
}
self.upsert(cur, "issues", data)
def get_search_results(self, source: str, search_term: str, expire_stale: bool = True) -> list[tuple[Series, bool]]:
def get_search_results(self, source: str, search_term: str, expire_stale: bool = True) -> list[CacheResult[Series]]:
results = []
with sqlite3.connect(self.db_file) as con:
con.row_factory = sqlite3.Row
@ -197,11 +205,11 @@ class ComicCacher:
for record in rows:
result = Series(id=record["id"], data=record["data"])
results.append((result, record["complete"]))
results.append(CacheResult(result, record["complete"]))
return results
def get_series_info(self, series_id: str, source: str, expire_stale: bool = True) -> tuple[Series, bool] | None:
def get_series_info(self, series_id: str, source: str, expire_stale: bool = True) -> CacheResult[Series] | None:
with sqlite3.connect(self.db_file) as con:
con.row_factory = sqlite3.Row
cur = con.cursor()
@ -220,11 +228,11 @@ class ComicCacher:
result = Series(id=row["id"], data=row["data"])
return (result, row["complete"])
return CacheResult(result, row["complete"])
def get_series_issues_info(
self, series_id: str, source: str, expire_stale: bool = True
) -> list[tuple[Issue, bool]]:
) -> list[CacheResult[Issue]]:
with sqlite3.connect(self.db_file) as con:
con.row_factory = sqlite3.Row
cur = con.cursor()
@ -234,20 +242,20 @@ class ComicCacher:
self.expire_stale_records(cur, "Issues")
# fetch
results: list[tuple[Issue, bool]] = []
results: list[CacheResult[Issue]] = []
cur.execute("SELECT * FROM Issues WHERE series_id=? AND source=?", [series_id, source])
rows = cur.fetchall()
# now process the results
for row in rows:
record = (Issue(id=row["id"], series_id=row["series_id"], data=row["data"]), row["complete"])
record = CacheResult(Issue(id=row["id"], series_id=row["series_id"], data=row["data"]), row["complete"])
results.append(record)
return results
def get_issue_info(self, issue_id: str, source: str, expire_stale: bool = True) -> tuple[Issue, bool] | None:
def get_issue_info(self, issue_id: str, source: str, expire_stale: bool = True) -> CacheResult[Issue] | None:
with sqlite3.connect(self.db_file) as con:
con.row_factory = sqlite3.Row
cur = con.cursor()
@ -262,7 +270,7 @@ class ComicCacher:
record = None
if row:
record = (Issue(id=row["id"], series_id=row["series_id"], data=row["data"]), row["complete"])
record = CacheResult(Issue(id=row["id"], series_id=row["series_id"], data=row["data"]), row["complete"])
return record
@ -297,7 +305,7 @@ class ComicCacher:
sql_ins = f"INSERT OR REPLACE INTO {tablename} ({keys}) VALUES ({ins_slots})"
if not data.get("complete", True):
sql_ins += f" ON CONFLICT DO UPDATE SET {set_slots} WHERE complete != ?"
vals.extend(vals)
vals.extend(vals.copy())
vals.append(True) # If the cache is complete and this isn't complete we don't update it
cur.execute(sql_ins, vals)

View File

@ -485,7 +485,7 @@ class ComicVineTalker(ComicTalker):
for issue_id in issue_ids:
cached_issue = cvc.get_issue_info(issue_id, self.id)
if cached_issue and cached_issue[1]:
if cached_issue is not None:
cached_results.append(
self._map_comic_issue_to_metadata(
json.loads(cached_issue[0].data),
@ -531,21 +531,26 @@ class ComicVineTalker(ComicTalker):
series_info = {s[0].id: s[0] for s in self._fetch_series([int(i["volume"]["id"]) for i in issue_results])}
cache_issue: list[Issue] = []
for issue in issue_results:
cvc.add_issues_info(
self.id,
[
Issue(
id=str(issue["id"]),
series_id=str(issue["volume"]["id"]),
data=json.dumps(issue).encode("utf-8"),
),
],
False, # The /issues/ endpoint never provides credits
cache_issue.append(
Issue(
id=str(issue["id"]),
series_id=str(issue["volume"]["id"]),
data=json.dumps(issue).encode("utf-8"),
)
)
cached_results.append(
self._map_comic_issue_to_metadata(issue, series_info[str(issue["volume"]["id"])]),
)
from pprint import pp
pp(cache_issue, indent=2)
cvc.add_issues_info(
self.id,
cache_issue,
False, # The /issues/ endpoint never provides credits
)
return cached_results
@ -820,7 +825,7 @@ class ComicVineTalker(ComicTalker):
cached_issue = cvc.get_issue_info(issue_id, self.id)
logger.debug("Issue cached: %s", bool(cached_issue and cached_issue[1]))
if cached_issue and cached_issue[1]:
if cached_issue and cached_issue.complete:
return self._map_comic_issue_to_metadata(
json.loads(cached_issue[0].data), self._fetch_series_data(int(cached_issue[0].series_id))[0]
)