Compare commits

..

5 Commits

Author SHA1 Message Date
aa7b613f4b Add an option to return simple results 2024-08-05 13:54:00 -07:00
c078c60f29 static check fixes 2024-08-04 18:26:43 -07:00
f6631a01a2 more stuff 2024-08-04 18:12:00 -07:00
921019b0d4 stuff 2024-07-31 11:35:17 -07:00
04f3ae1e64 add output from inital image hash comparison 2024-05-10 14:12:39 -07:00
32 changed files with 814 additions and 4627 deletions

View File

@ -1,6 +1,6 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
rev: v4.4.0
hooks:
- id: trailing-whitespace
args: [--markdown-linebreak-ext=.gitignore]
@ -13,30 +13,6 @@ repos:
- id: go-imports
args: [-w]
- repo: https://github.com/golangci/golangci-lint
rev: v1.64.5
rev: v1.53.3
hooks:
- id: golangci-lint
- repo: https://github.com/asottile/setup-cfg-fmt
rev: v2.7.0
hooks:
- id: setup-cfg-fmt
- repo: https://github.com/asottile/reorder-python-imports
rev: v3.14.0
hooks:
- id: reorder-python-imports
args: [--py38-plus, --add-import, 'from __future__ import annotations']
- repo: https://github.com/asottile/add-trailing-comma
rev: v3.1.0
hooks:
- id: add-trailing-comma
- repo: https://github.com/asottile/pyupgrade
rev: v3.19.1
hooks:
- id: pyupgrade
args: [--py38-plus]
exclude: tests
- repo: https://github.com/hhatto/autopep8
rev: v2.3.2
hooks:
- id: autopep8

11
CHDB.go
View File

@ -1,11 +0,0 @@
package ch
type CHDB interface {
// OpenCHDB(path string, comicvinePath string, deleteExisting bool) (CHDB, error)
PathHashed(path string) bool
PathDownloaded(path string) bool
AddPath(path string)
CheckURL(url string) bool
AddURL(url string)
Close() error
}

View File

@ -1,177 +0,0 @@
package ch
import (
"fmt"
"log"
"os"
"path/filepath"
"slices"
bolt "go.etcd.io/bbolt"
)
type CHDBBolt struct {
comicvinePath string
db *bolt.DB
deleteExisting bool
}
func OpenCHDBBolt(path string, comicvinePath string, deleteExisting bool) (CHDBBolt, error) {
path, _ = filepath.Abs(path)
err := os.MkdirAll(filepath.Dir(path), 0o755)
if err != nil {
panic("Unable to create directory " + filepath.Dir(path))
}
db, err := bolt.Open(path, 0o644, nil)
if err != nil {
return CHDBBolt{comicvinePath, db, deleteExisting}, fmt.Errorf("failed to open database: %w", err)
}
err = db.Update(func(tx *bolt.Tx) error {
_, err = tx.CreateBucketIfNotExists([]byte("paths"))
if err != nil {
return fmt.Errorf("failed to create bucket %v: %w", "paths", err)
}
_, err = tx.CreateBucketIfNotExists([]byte("bad_urls"))
if err != nil {
return fmt.Errorf("failed to create bucket %v: %w", "paths", err)
}
return nil
})
if err != nil {
db.Close()
return CHDBBolt{comicvinePath, db, deleteExisting}, fmt.Errorf("failed to init database: %w", err)
}
return CHDBBolt{comicvinePath, db, deleteExisting}, nil
}
func (c CHDBBolt) Import(paths []string, bad_urls []string) {
slices.Sort(paths)
slices.Sort(bad_urls)
c.db.Update(func(tx *bolt.Tx) error {
p := tx.Bucket([]byte("paths"))
b := tx.Bucket([]byte("bad_urls"))
for _, path := range paths {
p.Put([]byte(path), []byte{})
}
for _, url := range bad_urls {
b.Put([]byte(url), []byte{})
}
return nil
})
}
func (c CHDBBolt) Dump() (paths []string, bad_urls []string) {
c.db.View(func(tx *bolt.Tx) error {
p := tx.Bucket([]byte("paths"))
b := tx.Bucket([]byte("bad_urls"))
paths = make([]string, 0, p.Inspect().KeyN)
bad_urls = make([]string, 0, b.Inspect().KeyN)
b.ForEach(func(k, v []byte) error {
bad_urls = append(bad_urls, string(k)+"")
return nil
})
p.ForEach(func(k, v []byte) error {
paths = append(paths, string(k)+"")
return nil
})
return nil
})
return paths, bad_urls
}
func (c CHDBBolt) PathHashed(path string) bool {
path, _ = filepath.Rel(c.comicvinePath, path)
tx, err := c.db.Begin(false)
if err != nil {
return false
}
defer tx.Rollback()
b := tx.Bucket([]byte("paths"))
dbRes := b.Get([]byte(path))
if dbRes != nil {
if c.deleteExisting {
os.Remove(filepath.Join(c.comicvinePath, path))
}
return true
}
return false
}
func (c CHDBBolt) PathDownloaded(path string) bool {
relPath, _ := filepath.Rel(c.comicvinePath, path)
tx, err := c.db.Begin(false)
if err != nil {
return false
}
defer tx.Rollback()
b := tx.Bucket([]byte("paths"))
dbRes := b.Get([]byte(relPath))
if dbRes == nil {
f, err := os.Open(path)
if err == nil {
defer f.Close()
}
return !os.IsNotExist(err)
}
return true
}
func (c CHDBBolt) AddPath(path string) {
relPath, _ := filepath.Rel(c.comicvinePath, path)
tx, err := c.db.Begin(true)
if err != nil {
c.db.Logger().Errorf("Failed to open transaction: %v", err)
}
defer tx.Rollback()
b := tx.Bucket([]byte("paths"))
err = b.Put([]byte(relPath), []byte{})
if err != nil {
log.Println(fmt.Errorf("Failed to insert %v (%v) into paths: %w", path, relPath, err))
}
tx.Commit()
if c.deleteExisting {
_ = os.Remove(path)
_ = RmdirP(filepath.Dir(path))
}
}
func (c CHDBBolt) CheckURL(url string) bool {
tx, err := c.db.Begin(true)
if err != nil {
return false
}
defer tx.Rollback()
b := tx.Bucket([]byte("bad_urls"))
return b.Get([]byte(url)) != nil
}
func (c CHDBBolt) AddURL(url string) {
tx, err := c.db.Begin(true)
if err != nil {
c.db.Logger().Errorf("Failed to open transaction: %v", err)
}
defer tx.Rollback()
b := tx.Bucket([]byte("bad_urls"))
err = b.Put([]byte(url), []byte{})
if err != nil {
log.Println(fmt.Errorf("Failed to insert %v into bad_urls: %w", url, err))
}
tx.Commit()
}
func (c CHDBBolt) Close() error {
return c.db.Close()
}

View File

@ -1,142 +0,0 @@
package ch
import (
"database/sql"
"fmt"
"log"
"os"
"path/filepath"
_ "modernc.org/sqlite"
)
type CHDBSqlite struct {
comicvinePath string
sql *sql.DB
deleteExisting bool
}
func OpenCHDBSqlite(path string, comicvinePath string, deleteExisting bool) (CHDBSqlite, error) {
path, _ = filepath.Abs(path)
err := os.MkdirAll(filepath.Dir(path), 0o755)
if err != nil {
panic("Unable to create directory " + filepath.Dir(path))
}
println(fmt.Sprintf("file://%s?&_pragma=busy_timeout(500)&_pragma=journal_mode(wal)", path))
sql, err := sql.Open("sqlite", fmt.Sprintf("file://%s?&_pragma=busy_timeout(500)&_pragma=journal_mode(wal)", path))
if err != nil {
return CHDBSqlite{comicvinePath, sql, deleteExisting}, fmt.Errorf("Failed to open database: %w", err)
}
err = sql.Ping()
if err != nil {
return CHDBSqlite{comicvinePath, sql, deleteExisting}, fmt.Errorf("Failed to open database: %w", err)
}
_, err = sql.Exec(`
CREATE TABLE IF NOT EXISTS paths(
path STRING PRIMARY KEY
);
CREATE TABLE IF NOT EXISTS bad_urls(
url STRING PRIMARY KEY
);
`)
if err != nil {
err = fmt.Errorf("Failed to create table: %w", err)
}
return CHDBSqlite{comicvinePath, sql, deleteExisting}, err
}
func (s CHDBSqlite) Dump() (paths []string, bad_urls []string) {
rows, err := s.sql.Query("SELECT path from paths")
if err != nil {
panic(err)
}
for rows.Next() {
var value string
err = rows.Scan(&value)
if err != nil {
panic(err)
}
paths = append(paths, value)
}
rows.Close()
rows, err = s.sql.Query("SELECT url from bad_urls")
if err != nil {
panic(err)
}
for rows.Next() {
var value string
err = rows.Scan(&value)
if err != nil {
panic(err)
}
bad_urls = append(bad_urls, value)
}
rows.Close()
return paths, bad_urls
}
func (s CHDBSqlite) PathHashed(path string) bool {
path, _ = filepath.Rel(s.comicvinePath, path)
dbPath := ""
if s.deleteExisting {
_ = s.sql.QueryRow("SELECT path FROM paths where path=?", path).Scan(&dbPath)
if dbPath == path {
os.Remove(filepath.Join(s.comicvinePath, path))
}
return dbPath == path
}
count := 0
_ = s.sql.QueryRow("SELECT count(path) FROM paths where path=?", path).Scan(&count)
return count > 0
}
func (s CHDBSqlite) PathDownloaded(path string) bool {
relPath, _ := filepath.Rel(s.comicvinePath, path)
count := 0
_ = s.sql.QueryRow("SELECT count(path) FROM paths where path=?", relPath).Scan(&count)
if count != 1 {
f, err := os.Open(path)
if err == nil {
defer f.Close()
}
return !os.IsNotExist(err)
}
return true
}
func (s CHDBSqlite) AddPath(path string) {
relPath, _ := filepath.Rel(s.comicvinePath, path)
_, err := s.sql.Exec("INSERT INTO paths VALUES(?) ON CONFLICT DO NOTHING", relPath)
if err != nil {
log.Println(fmt.Errorf("Failed to insert %v into paths: %w", relPath, err))
}
if s.deleteExisting {
_ = os.Remove(path)
_ = RmdirP(filepath.Dir(path))
}
}
func (s CHDBSqlite) CheckURL(url string) bool {
count := 0
_ = s.sql.QueryRow("SELECT count(url) FROM bad_urls where url=?", url).Scan(&count)
return count > 0
}
func (s CHDBSqlite) AddURL(url string) {
_, err := s.sql.Exec("INSERT INTO bad_urls VALUES(?) ON CONFLICT DO NOTHING", url)
if err != nil {
log.Println(fmt.Errorf("Failed to insert %v into bad_urls: %w", url, err))
}
}
func (s CHDBSqlite) Close() error {
return s.sql.Close()
}

View File

@ -1,31 +0,0 @@
package main
import (
"fmt"
"os"
ch "gitea.narnian.us/lordwelch/comic-hasher"
)
func main() {
fmt.Printf("cv path: %s Sqlite path: %s Bolt path: %s\n", os.Args[1], os.Args[2], os.Args[3])
sql, err := ch.OpenCHDBSqlite(os.Args[2], os.Args[1], false)
if err != nil {
panic(err)
}
db, err := ch.OpenCHDBBolt(os.Args[3], os.Args[1], false)
if err != nil {
panic(err)
}
paths, bad_urls := sql.Dump()
fmt.Printf("Dumped %d %d", len(paths), len(bad_urls))
db.Import(paths, bad_urls)
// for _, path := range paths {
// db.AddPath(filepath.Join(os.Args[1], path))
// }
// for _, url := range bad_urls {
// db.AddURL(url)
// }
sql.Close()
db.Close()
}

File diff suppressed because it is too large Load Diff

View File

@ -1,12 +0,0 @@
//go:build !unix
package main
import (
"os"
"os/signal"
)
func Notify(sig chan os.Signal) {
signal.Notify(sig, os.Interrupt, os.Kill)
}

View File

@ -1,13 +0,0 @@
//go:build unix
package main
import (
"os"
"os/signal"
"syscall"
)
func Notify(sig chan os.Signal) {
signal.Notify(sig, os.Interrupt, syscall.SIGQUIT, syscall.SIGTERM)
}

View File

@ -1,366 +0,0 @@
package main
import (
"bufio"
"cmp"
"context"
"encoding/json"
"fmt"
"image"
"io/fs"
"log"
"net/http"
"net/url"
"os"
"path/filepath"
"strconv"
"strings"
"time"
ch "gitea.narnian.us/lordwelch/comic-hasher"
"gitea.narnian.us/lordwelch/goimagehash"
"golang.org/x/exp/slices"
)
type Server struct {
httpServer *http.Server
mux *CHMux
BaseURL *url.URL
hashes ch.HashStorage
Context context.Context
cancel func()
signalQueue chan os.Signal
readerQueue chan string
hashingQueue chan ch.Im
mappingQueue chan ch.ImageHash
onlyHashNewIDs bool
version string
}
type CHMux struct {
version string
*http.ServeMux
}
func (CHM *CHMux) ServeHTTP(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Server", "Comic-Hasher "+CHM.version)
CHM.ServeMux.ServeHTTP(w, r)
}
func (s *Server) authenticated(w http.ResponseWriter, r *http.Request) (string, bool) {
return strings.TrimSpace("lordwelch"), true
}
func (s *Server) setupAppHandlers() {
s.mux.HandleFunc("/add_cover", s.addCover)
s.mux.HandleFunc("/match_cover_hash", s.matchCoverHash)
s.mux.HandleFunc("/associate_ids", s.associateIDs)
}
func (s *Server) associateIDs(w http.ResponseWriter, r *http.Request) {
user, authed := s.authenticated(w, r)
if !authed || user == "" {
http.Error(w, "Invalid Auth", http.StatusForbidden)
return
}
var (
values = r.URL.Query()
domain = ch.Source(strings.ToLower(strings.TrimSpace(values.Get("domain"))))
ID = strings.ToLower(strings.TrimSpace(values.Get("id")))
newDomain = ch.Source(strings.ToLower(strings.TrimSpace(values.Get("newDomain"))))
newID = strings.ToLower(strings.TrimSpace(values.Get("newID")))
)
if ID == "" {
msg := "No ID Provided"
log.Println(msg)
writeJson(w, http.StatusBadRequest, result{Msg: msg})
return
}
if domain == "" {
msg := "No domain Provided"
log.Println(msg)
writeJson(w, http.StatusBadRequest, result{Msg: msg})
return
}
if newID == "" {
msg := "No newID Provided"
log.Println(msg)
writeJson(w, http.StatusBadRequest, result{Msg: msg})
return
}
if newDomain == "" {
msg := "No newDomain Provided"
log.Println(msg)
writeJson(w, http.StatusBadRequest, result{Msg: msg})
return
}
if newDomain == domain {
msg := "newDomain cannot be the same as the existing domain"
log.Println(msg)
writeJson(w, http.StatusBadRequest, result{Msg: msg})
return
}
log.Printf("Attempting to associate %s:%s to %s:%s", domain, ID, newDomain, newID)
err := s.hashes.AssociateIDs([]ch.NewIDs{{
OldID: ch.ID{
Domain: &domain,
ID: ID,
},
NewID: ch.ID{
Domain: &newDomain,
ID: newID,
},
}})
if err == nil {
writeJson(w, http.StatusOK, result{Msg: "New ID added"})
} else {
writeJson(w, http.StatusOK, result{Msg: err.Error()})
}
}
type result struct {
Results []ch.Result `json:"results,omitempty"`
Msg string `json:"msg,omitempty"`
}
func writeJson(w http.ResponseWriter, status int, res result) {
w.Header().Set("Content-Type", "application/json; charset=utf-8")
w.Header().Set("X-Content-Type-Options", "nosniff")
var (
bytes []byte
err error
)
if bytes, err = json.Marshal(res); err != nil {
bytes, _ = json.Marshal(result{Msg: fmt.Sprintf("Failed to create json: %s", err)})
}
w.WriteHeader(status)
_, _ = w.Write(bytes)
_, _ = w.Write([]byte("\n"))
}
func (s *Server) matchCoverHash(w http.ResponseWriter, r *http.Request) {
user, authed := s.authenticated(w, r)
if !authed || user == "" {
http.Error(w, "Invalid Auth", http.StatusForbidden)
return
}
var (
values = r.URL.Query()
ahashStr = strings.TrimSpace(values.Get("ahash"))
dhashStr = strings.TrimSpace(values.Get("dhash"))
phashStr = strings.TrimSpace(values.Get("phash"))
maxStr = strings.TrimSpace(values.Get("max"))
exactOnly = strings.ToLower(strings.TrimSpace(values.Get("exactOnly"))) != "false"
simple = strings.ToLower(strings.TrimSpace(values.Get("simple"))) == "true"
ahash uint64
dhash uint64
phash uint64
max int = 8
max_tmp int
err error
hashes []ch.Hash
)
if simple {
writeJson(w, http.StatusBadRequest, result{Msg: "Simple results are no longer Supported"})
return
}
if ahash, err = strconv.ParseUint(ahashStr, 16, 64); err != nil && ahashStr != "" {
log.Printf("could not parse ahash: %s", ahashStr)
writeJson(w, http.StatusBadRequest, result{Msg: "hash parse failed"})
return
}
if ahash > 0 {
hashes = append(hashes, ch.Hash{Hash: ahash, Kind: goimagehash.AHash})
}
if dhash, err = strconv.ParseUint(dhashStr, 16, 64); err != nil && dhashStr != "" {
log.Printf("could not parse dhash: %s", dhashStr)
writeJson(w, http.StatusBadRequest, result{Msg: "hash parse failed"})
return
}
if dhash > 0 {
hashes = append(hashes, ch.Hash{Hash: dhash, Kind: goimagehash.DHash})
}
if phash, err = strconv.ParseUint(phashStr, 16, 64); err != nil && phashStr != "" {
log.Printf("could not parse phash: %s", phashStr)
writeJson(w, http.StatusBadRequest, result{Msg: "hash parse failed"})
return
}
if phash > 0 {
hashes = append(hashes, ch.Hash{Hash: phash, Kind: goimagehash.PHash})
}
if max_tmp, err = strconv.Atoi(maxStr); err != nil && maxStr != "" {
log.Printf("Invalid Max: %s", maxStr)
writeJson(w, http.StatusBadRequest, result{Msg: fmt.Sprintf("Invalid Max: %s", maxStr)})
return
}
if maxStr != "" {
max = max_tmp
}
if max > 8 {
log.Printf("Max must be less than 9: %d", max)
writeJson(w, http.StatusBadRequest, result{Msg: fmt.Sprintf("Max must be less than 9: %d", max)})
return
}
matches, err := s.hashes.GetMatches(hashes, max, exactOnly)
slices.SortFunc(matches, func(a ch.Result, b ch.Result) int {
return cmp.Compare(a.Distance, b.Distance)
})
log.Println(err)
if len(matches) > 0 {
var msg string = ""
if err != nil {
msg = err.Error()
}
writeJson(w, http.StatusOK, result{
Results: matches,
Msg: msg,
})
return
}
writeJson(w, http.StatusNotFound, result{Msg: "No hashes found"})
}
func (s *Server) addCover(w http.ResponseWriter, r *http.Request) {
user, authed := s.authenticated(w, r)
if !authed || user == "" {
http.Error(w, "Invalid Auth", http.StatusForbidden)
return
}
if true {
w.WriteHeader(http.StatusNotImplemented)
return
}
var (
values = r.URL.Query()
domain = strings.TrimSpace(values.Get("domain"))
ID = strings.TrimSpace(values.Get("id"))
)
if ID == "" {
log.Println("No ID Provided")
writeJson(w, http.StatusBadRequest, result{Msg: "No ID Provided"})
return
}
if domain == "" {
log.Println("No domain Provided")
writeJson(w, http.StatusBadRequest, result{Msg: "No Domain Provided"})
return
}
i, format, err := image.Decode(r.Body)
if err != nil {
msg := fmt.Sprintf("Failed to decode Image: %s", err)
log.Println(msg)
writeJson(w, http.StatusBadRequest, result{Msg: msg})
return
}
log.Printf("Decoded %s image from %s", format, user)
select {
case <-s.Context.Done():
log.Println("Recieved quit")
return
default:
}
s.hashingQueue <- ch.Im{Im: i, Format: format, ID: ch.ID{Domain: ch.NewSource(domain), ID: ID}}
writeJson(w, http.StatusOK, result{Msg: "Success"})
}
func (s *Server) mapper(done func()) {
defer done()
for hash := range s.mappingQueue {
s.hashes.MapHashes(hash)
}
}
func (s *Server) hasher(workerID int, done func(int)) {
defer done(workerID)
for image := range s.hashingQueue {
start := time.Now()
if image.NewOnly && len(s.hashes.GetIDs(image.ID)) > 0 {
log.Printf("Skipping existing hash with ID: %s found", image.ID)
continue
}
hash := ch.HashImage(image)
if *hash.ID.Domain == "" || hash.ID.ID == "" {
continue
}
select {
// TODO: Check channel pipelines
case s.mappingQueue <- hash:
default:
}
elapsed := time.Since(start)
log.Printf("Hashing took %v: worker: %v. %s: %064b id: %s\n", elapsed, workerID, hash.Hashes[0].Kind, hash.Hashes[0].Hash, hash.ID)
}
}
func (s *Server) reader(workerID int, done func(i int)) {
defer done(workerID)
for path := range s.readerQueue {
id := ch.ID{
Domain: ch.NewSource(filepath.Base(filepath.Dir(filepath.Dir(path)))),
ID: filepath.Base(filepath.Dir(path)),
}
if len(s.hashes.GetIDs(id)) > 0 {
continue
}
file, err := os.Open(path)
if err != nil {
panic(err)
}
i, format, err := image.Decode(bufio.NewReader(file))
file.Close()
if err != nil {
continue // skip this image
}
im := ch.Im{
Im: i,
Format: format,
ID: id,
NewOnly: s.onlyHashNewIDs,
}
select {
case s.hashingQueue <- im:
default:
}
}
}
func (s *Server) HashLocalImages(opts Opts) {
if opts.coverPath == "" {
return
}
go func() {
log.Println("Hashing covers at ", opts.coverPath)
start := time.Now()
err := filepath.WalkDir(opts.coverPath, func(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
select {
case <-s.Context.Done():
log.Println("Recieved quit")
err = s.httpServer.Shutdown(context.TODO())
return fmt.Errorf("Recieved quit: %w", err)
default:
}
if d.IsDir() {
return nil
}
s.readerQueue <- path
return nil
})
elapsed := time.Since(start)
log.Println("Err:", err, "local hashing took", elapsed)
}()
}

View File

@ -1,17 +0,0 @@
//go:build main
package main
import (
"fmt"
"time"
)
func main() {
tmp := make([]string, 0, 932456)
for range 932460 {
tmp = append(tmp, "comicvine.gamespot.com:123456")
}
fmt.Println(len(tmp))
time.Sleep(time.Minute)
}

View File

@ -1,19 +1,11 @@
from __future__ import annotations
import argparse
import pathlib
import sys
from typing import Collection
from typing import Sequence
import imagehash
import numpy
from typing import Collection, Sequence
from PIL import Image
import argparse,pathlib,numpy,imagehash,sys
ap = argparse.ArgumentParser()
ap.add_argument('--file', type=pathlib.Path)
ap.add_argument('--debug', action='store_true')
ap.add_argument("--file", type=pathlib.Path)
ap.add_argument("--debug", action='store_true')
opts = ap.parse_args()
opts.file = pathlib.Path(opts.file)
@ -32,7 +24,7 @@ def print_image(image: Image.Image) -> None:
if isinstance(i, Collection):
print('{ ', end='', file=sys.stderr)
for idx, x in enumerate(i):
if idx == len(i) - 1:
if idx == len(i)-1:
print(f'{int(x):03d} ', end='', file=sys.stderr)
else:
print(f'{int(x):03d}, ', end='', file=sys.stderr)
@ -41,29 +33,28 @@ def print_image(image: Image.Image) -> None:
print(f'{int(i):03d}, ', end='', file=sys.stderr)
print(']', file=sys.stderr)
def bin_str(hash):
return ''.join(str(b) for b in 1 * hash.hash.flatten())
if opts.debug:
image.save('py.rgb.png')
print('rgb', file=sys.stderr)
image.save("py.rgb.png")
print("rgb", file=sys.stderr)
print_image(image)
print(file=sys.stderr)
if opts.debug:
gray.save('py.gray.png')
print('gray', file=sys.stderr)
gray.save("py.gray.png")
print("gray", file=sys.stderr)
print_image(gray)
print(file=sys.stderr)
if opts.debug:
resized.save('py.resized.png')
print('resized', file=sys.stderr)
resized.save("py.resized.png")
print("resized", file=sys.stderr)
print_image(resized)
print(file=sys.stderr)
print('ahash: ', str(imagehash.average_hash(image)))
print('dhash: ', str(imagehash.dhash(image)))
print('phash: ', str(imagehash.phash(image)))
print('ahash: ', bin_str(imagehash.average_hash(image)))
print('dhash: ', bin_str(imagehash.dhash(image)))
print('phash: ', bin_str(imagehash.phash(image)))

View File

@ -106,9 +106,9 @@ func main() {
debugImage(debugim, 8, 8)
}
hash := ch.HashImage(ch.Im{Im: im, Format: format, ID: ch.ID{Domain: ch.NewSource(ch.ComicVine), ID: "nothing"}})
hash := ch.HashImage(ch.Im{Im: im, Format: format, Domain: ch.Source(ch.ComicVine), ID: "nothing"})
fmt.Println("ahash: ", goimagehash.NewImageHash(hash.Hashes[0].Hash, hash.Hashes[0].Kind).BinString())
fmt.Println("dhash: ", goimagehash.NewImageHash(hash.Hashes[1].Hash, hash.Hashes[1].Kind).BinString())
fmt.Println("phash: ", goimagehash.NewImageHash(hash.Hashes[2].Hash, hash.Hashes[2].Kind).BinString())
fmt.Println("ahash: ", hash.Ahash.BinString())
fmt.Println("dhash: ", hash.Dhash.BinString())
fmt.Println("phash: ", hash.Phash.BinString())
}

View File

@ -1,467 +1,91 @@
from __future__ import annotations
import argparse
import itertools
import json
import logging
import pathlib
from datetime import datetime
from enum import auto
from io import BytesIO
from typing import Any
from typing import cast
from typing import TypedDict
from urllib.parse import urljoin
from PIL import Image
import appdirs
import comictaggerlib.cli
import imagehash
import requests
import settngs
from comicapi import comicarchive
from comicapi import merge
from comicapi.genericmetadata import GenericMetadata
import pathlib, imagehash, requests
import settngs, comictaggerlib.cli
from io import BytesIO
from comicapi import comicarchive, merge
from datetime import datetime
from comicapi import utils
from comicapi.genericmetadata import GenericMetadata
from comicapi.issuestring import IssueString
from comictalker.comiccacher import ComicCacher
from comictalker.comiccacher import Issue
from comictalker.comiccacher import Series
from comictalker.comictalker import ComicSeries
from comictaggerlib import ctversion
from comictaggerlib.cbltransformer import CBLTransformer
from comictaggerlib.ctsettings.settngs_namespace import SettngsNS
from comictalker.talker_utils import cleanup_html
from comictalker.talkers.comicvine import ComicVineTalker
from comictalker.talkers.comicvine import CVIssue
from comictalker.talkers.comicvine import CVResult
from comictalker.talkers.comicvine import CVSeries
from PIL import Image
logger = logging.getLogger('quick_tag')
logger = logging.getLogger("quick_tag")
__version__ = '0.1'
class CV(ComicVineTalker):
def fetch_comics(self, *, issue_ids: list[str]) -> list[GenericMetadata]:
# before we search online, look in our cache, since we might already have this info
cvc = ComicCacher(self.cache_folder, self.version)
cached_results: list[GenericMetadata] = []
needed_issues: list[int] = []
for issue_id in issue_ids:
cached_issue = cvc.get_issue_info(issue_id, self.id)
if cached_issue and cached_issue[1]:
cached_results.append(
self._map_comic_issue_to_metadata(
json.loads(cached_issue[0].data), self._fetch_series([int(cached_issue[0].series_id)])[0][0],
),
)
else:
needed_issues.append(int(issue_id)) # CV uses integers for it's IDs
if not needed_issues:
return cached_results
issue_filter = ""
for iid in needed_issues:
issue_filter += str(iid) + "|"
flt = "id:" + issue_filter.rstrip('|')
issue_url = urljoin(self.api_url, "issues/")
params: dict[str, Any] = {
"api_key": self.api_key,
"format": "json",
"filter": flt,
}
cv_response: CVResult[list[CVIssue]] = self._get_cv_content(issue_url, params)
issue_results = cv_response["results"]
page = 1
offset = 0
current_result_count = cv_response["number_of_page_results"]
total_result_count = cv_response["number_of_total_results"]
# see if we need to keep asking for more pages...
while current_result_count < total_result_count:
page += 1
offset += cv_response["number_of_page_results"]
params["offset"] = offset
cv_response = self._get_cv_content(issue_url, params)
issue_results.extend(cv_response["results"])
current_result_count += cv_response["number_of_page_results"]
series_info = {s[0].id: s[0] for s in self._fetch_series([int(i["volume"]["id"]) for i in issue_results])}
for issue in issue_results:
cvc.add_issues_info(
self.id,
[
Issue(
id=str(issue["id"]),
series_id=str(issue["volume"]["id"]),
data=json.dumps(issue).encode("utf-8"),
),
],
True,
)
cached_results.append(
self._map_comic_issue_to_metadata(issue, series_info[str(issue["volume"]["id"])]),
)
return cached_results
def _fetch_series(self, series_ids: list[int]) -> list[tuple[ComicSeries, bool]]:
# before we search online, look in our cache, since we might already have this info
cvc = ComicCacher(self.cache_folder, self.version)
cached_results: list[tuple[ComicSeries, bool]] = []
needed_series: list[int] = []
for series_id in series_ids:
cached_series = cvc.get_series_info(str(series_id), self.id)
if cached_series is not None:
cached_results.append((self._format_series(json.loads(cached_series[0].data)), cached_series[1]))
else:
needed_series.append(series_id)
if needed_series == []:
return cached_results
series_filter = ""
for vid in needed_series:
series_filter += str(vid) + "|"
flt = "id:" + series_filter.rstrip('|') # CV uses volume to mean series
series_url = urljoin(self.api_url, "volumes/") # CV uses volume to mean series
params: dict[str, Any] = {
"api_key": self.api_key,
"format": "json",
"filter": flt,
}
cv_response: CVResult[list[CVSeries]] = self._get_cv_content(series_url, params)
series_results = cv_response["results"]
page = 1
offset = 0
current_result_count = cv_response["number_of_page_results"]
total_result_count = cv_response["number_of_total_results"]
# see if we need to keep asking for more pages...
while current_result_count < total_result_count:
page += 1
offset += cv_response["number_of_page_results"]
params["offset"] = offset
cv_response = self._get_cv_content(series_url, params)
series_results.extend(cv_response["results"])
current_result_count += cv_response["number_of_page_results"]
if series_results:
for series in series_results:
cvc.add_series_info(
self.id, Series(id=str(series["id"]), data=json.dumps(series).encode("utf-8")), True,
)
cached_results.append((self._format_series(series), True))
return cached_results
class HashType(utils.StrEnum):
AHASH = auto()
DHASH = auto()
PHASH = auto()
class SimpleResult(TypedDict):
Distance: int
# Mapping of domains (eg comicvine.gamespot.com) to IDs
IDList: dict[str, list[str]]
class Hash(TypedDict):
Hash: int
Kind: str
class Result(TypedDict):
# Mapping of domains (eg comicvine.gamespot.com) to IDs
IDList: dict[str, list[str]]
Distance: int
Hash: Hash
def ihash(types: str) -> list[str]:
result = []
types = types.casefold()
choices = ", ".join(HashType)
for typ in utils.split(types, ","):
if typ not in list(HashType):
raise argparse.ArgumentTypeError(f"invalid choice: {typ} (choose from {choices.upper()})")
result.append(HashType[typ.upper()])
if not result:
raise argparse.ArgumentTypeError(f"invalid choice: {types} (choose from {choices.upper()})")
return result
Distance: int
IDList: dict[str, list[str]] # Mapping of domains (eg comicvine.gamespot.com) to IDs
def settings(manager: settngs.Manager):
manager.add_setting(
'--url', '-u', default='https://comic-hasher.narnian.us',
type=utils.parse_url, help='Website to use for searching cover hashes',
)
manager.add_setting(
'--max', '-m', default=8, type=int,
help='Maximum score to allow. Lower score means more accurate',
)
manager.add_setting(
'--simple', '-s', default=False, action=argparse.BooleanOptionalAction,
help='Whether to retrieve simple results or full results',
)
manager.add_setting(
'--force-interactive', '-f', default=True, action=argparse.BooleanOptionalAction,
help='When not set will automatically tag comics that have a single match with a score of 4 or lower',
)
manager.add_setting(
'--aggressive-filtering', '-a', default=False, action=argparse.BooleanOptionalAction,
help='Will filter out worse matches if better matches are found',
)
manager.add_setting(
'--hash', default=['ahash', 'dhash', 'phash'], type=ihash,
help='Pick what hashes you want to use to search',
)
manager.add_setting(
'--skip-non-exact', default=True, action=argparse.BooleanOptionalAction,
help='Skip non-exact matches if we have exact matches',
)
manager.add_setting('--cv-api-key', '-c')
manager.add_setting('comic_archive', type=pathlib.Path)
manager.add_setting("--url", '-u', default='https://comic-hasher.narnian.us', type=utils.parse_url, help='Website to use for searching cover hashes')
manager.add_setting("--max","-m", default=8, type=int, help='Maximum score to allow. Lower score means more accurate')
manager.add_setting("--simple", "-s", default=True, action=argparse.BooleanOptionalAction, help='Whether to retrieve simple results or full results')
manager.add_setting("--force-interactive", "-f", default=True, action=argparse.BooleanOptionalAction, help='When not set will automatically tag comics that have a single match with a score of 4 or lower')
manager.add_setting("--cv-api-key", "-c")
manager.add_setting("comic_archive", type=pathlib.Path)
def SearchHashes(url: str, simple: bool, max: int, ahash: str, dhash: str, phash: str, skip_non_exact: bool) -> list[SimpleResult] | list[Result]:
resp = requests.get(
urljoin(url, '/match_cover_hash'),
{
'simple': simple,
'max': max,
'ahash': ahash,
'dhash': dhash,
'phash': phash,
'skipNonExact': skip_non_exact,
},
)
if resp.status_code != 200:
try:
text = resp.json()['msg']
except Exception:
text = resp.text
logger.error('message from server: %s', text)
raise SystemExit(3)
return resp.json()['results']
def SearchHashes(url: str, simple: bool, max: int, ahash: str, dhash: str, phash: str) -> list[SimpleResult]:
resp = requests.get(urljoin(url, '/match_cover_hash'), {"simple": simple, "max": max, "ahash":ahash, "dhash": dhash, "phash": phash})
if resp.status_code != 200:
logger.error("bad response from server: %s", resp.text)
raise SystemExit(3)
return resp.json()
def get_simple_results(results: list[SimpleResult], cv_api_key: str | None = None) -> list[tuple[int, GenericMetadata]]:
cache_dir = pathlib.Path(appdirs.user_cache_dir('quick_tag'))
cache_dir.mkdir(parents=True, exist_ok=True)
cv = CV(f"quick_tag/{__version__}", cache_dir)
cv.parse_settings({
'comicvine_key': cv_api_key,
'cv_use_series_start_as_volume': True,
})
md_results: list[tuple[int, GenericMetadata]] = []
results.sort(key=lambda r: r['Distance'])
all_cv_ids = set()
for res in results:
all_cv_ids.update(res['IDList']['comicvine.gamespot.com'])
# Do a bulk feth of basic issue data
mds = cv.fetch_comics(issue_ids=list(all_cv_ids))
# Re-associate the md to the distance
for res in results:
for md in mds:
if md.issue_id in res['IDList']['comicvine.gamespot.com']:
md_results.append((res['Distance'], md))
return md_results
from comictalker.talkers.comicvine import ComicVineTalker
cache_dir = pathlib.Path(appdirs.user_cache_dir('quick_tag'))
cache_dir.mkdir(parents=True, exist_ok=True)
cv = ComicVineTalker(f"quick_tag/{__version__}",cache_dir)
cv.parse_settings({"comicvine_key": cv_api_key})
md_results: list[tuple[int, GenericMetadata]] = []
results.sort(key=lambda r: r['Distance'])
for result in results:
for cv_id in result['IDList']['comicvine.gamespot.com']:
md_results.append((result['Distance'], cv.fetch_comic_data(issue_id=cv_id)))
return md_results
def get_results(results: list[Result], cv_api_key: str | None = None) -> list[tuple[int, Hash, GenericMetadata]]:
cache_dir = pathlib.Path(appdirs.user_cache_dir('quick_tag'))
cache_dir.mkdir(parents=True, exist_ok=True)
cv = CV(f"quick_tag/{__version__}", cache_dir)
cv.parse_settings({
'comicvine_key': cv_api_key,
'cv_use_series_start_as_volume': True,
})
md_results: list[tuple[int, Hash, GenericMetadata]] = []
results.sort(key=lambda r: r['Distance'])
all_cv_ids = set()
for res in results:
all_cv_ids.update(res['IDList']['comicvine.gamespot.com'])
# Do a bulk feth of basic issue data
mds = cv.fetch_comics(issue_ids=list(all_cv_ids))
def display_simple_results(md_results: list[tuple[int, GenericMetadata]], force_interactive=True) -> GenericMetadata:
if len(md_results) == 1 and md_results[0][0] <= 4 and not force_interactive:
return md_results[0][1]
for counter, r in enumerate(md_results, 1):
print(
" {}. {} #{} [{}] ({}/{}) - {} score: {}".format(
counter,
r[1].series,
r[1].issue,
r[1].publisher,
r[1].month,
r[1].year,
r[1].title,
r[0]
),
)
while True:
i = input(f'Please select a result to tag the comic with or "q" to quit: [1-{len(md_results)}] ').casefold()
if (i.isdigit() and int(i) in range(1, len(md_results) + 1)):
break
if i == 'q':
logger.warning("User quit without saving metadata")
raise SystemExit(4)
# Re-associate the md to the distance
for res in results:
for md in mds:
if md.issue_id in res['IDList']['comicvine.gamespot.com']:
md_results.append((res['Distance'], res['Hash'], md))
return md_results
return md_results[int(i)-1][1]
def filter_simple_results(results: list[SimpleResult], force_interactive=True, aggressive_filtering=False) -> list[SimpleResult]:
if not force_interactive:
# If there is a single exact match return it
exact = [r for r in results if r['Distance'] == 0]
if len(exact) == 1:
return exact
# If ther are more than 4 results and any are better than 6 return the first group of results
if len(results) > 4:
dist: list[tuple[int, list[SimpleResult]]] = []
filtered_results: list[SimpleResult] = []
for distance, group in itertools.groupby(results, key=lambda r: r['Distance']):
dist.append((distance, list(group)))
if aggressive_filtering and dist[0][0] < 6:
for _, res in dist[:1]:
filtered_results.extend(res)
return filtered_results
return results
def filter_results(results: list[Result], force_interactive=True, aggressive_filtering=False) -> list[Result]:
ahash_results = sorted([r for r in results if r['Hash']['Kind'] == 'ahash'], key=lambda r: r['Distance'])
dhash_results = sorted([r for r in results if r['Hash']['Kind'] == 'dhash'], key=lambda r: r['Distance'])
phash_results = sorted([r for r in results if r['Hash']['Kind'] == 'phash'], key=lambda r: r['Distance'])
hash_results = [phash_results, dhash_results, ahash_results]
if not force_interactive:
# If any of the hash types have a single exact match return it. Prefer phash for no particular reason
for hashed_results in (phash_results, dhash_results, ahash_results):
exact = [r for r in hashed_results if r['Distance'] == 0]
if len(exact) == 1:
return exact
# If any of the hash types have more than 4 results and they have results better than 6 return the first group of results for each hash type
for i, hashed_results in enumerate(hash_results):
filtered_results: list[Result] = []
if len(hashed_results) > 4:
dist: list[tuple[int, list[Result]]] = []
for distance, group in itertools.groupby(hashed_results, key=lambda r: r['Distance']):
dist.append((distance, list(group)))
if aggressive_filtering and dist[0][0] < 6:
for _, res in dist[:1]:
filtered_results.extend(res)
if filtered_results:
hash_results[i] = filtered_results
return list(itertools.chain(*hash_results))
def display_simple_results(md_results: list[tuple[int, GenericMetadata]], ca: comictaggerlib.cli.ComicArchive, force_interactive=True) -> GenericMetadata:
filename_md = ca.metadata_from_filename(utils.Parser.COMICFN2DICT)
if len(md_results) < 1:
logger.warning('No results found for comic')
raise SystemExit(4)
if not force_interactive:
if len(md_results) == 1 and md_results[0][0] <= 4:
return md_results[0][1]
series_match = []
for score, md in md_results:
if (
score < 10
and filename_md.series
and md.series
and utils.titles_match(filename_md.series, md.series)
and IssueString(filename_md.issue).as_string() == IssueString(md.issue).as_string()
):
series_match.append(md)
if len(series_match) == 1:
return series_match[0]
md_results.sort(key=lambda r: (r[0], len(r[1].publisher or '')))
for counter, r in enumerate(md_results, 1):
print(
' {:2}. score: {} [{:15}] ({:02}/{:04}) - {} #{} - {}'.format(
counter,
r[0],
r[1].publisher,
r[1].month or 0,
r[1].year or 0,
r[1].series,
r[1].issue,
r[1].title,
),
)
while True:
i = input(
f'Please select a result to tag the comic with or "q" to quit: [1-{len(md_results)}] ',
).casefold()
if (i.isdigit() and int(i) in range(1, len(md_results) + 1)):
break
if i == 'q':
logger.warning('User quit without saving metadata')
raise SystemExit(4)
return md_results[int(i) - 1][1]
def display_results(md_results: list[tuple[int, Hash, GenericMetadata]], ca: comictaggerlib.cli.ComicArchive, force_interactive=True) -> GenericMetadata:
filename_md = ca.metadata_from_filename(utils.Parser.COMICFN2DICT)
if len(md_results) < 1:
logger.warning('No results found for comic')
raise SystemExit(4)
if not force_interactive:
if len(md_results) == 1 and md_results[0][0] <= 4:
return md_results[0][2]
series_match = []
for score, hash, md in md_results:
if (
score < 10
and filename_md.series
and md.series
and utils.titles_match(filename_md.series, md.series)
and IssueString(filename_md.issue).as_string() == IssueString(md.issue).as_string()
):
series_match.append(md)
if len(series_match) == 1:
return series_match[0]
md_results.sort(key=lambda r: (r[0], len(r[2].publisher or ''), r[1]["Kind"]))
for counter, r in enumerate(md_results, 1):
print(
' {:2}. score: {} {}: {:064b} [{:15}] ({:02}/{:04}) - {} #{} - {}'.format(
counter,
r[0],
r[1]["Kind"],
r[1]["Hash"],
r[2].publisher,
r[2].month or 0,
r[2].year or 0,
r[2].series,
r[2].issue,
r[2].title,
),
)
while True:
i = input(
f'Please select a result to tag the comic with or "q" to quit: [1-{len(md_results)}] ',
).casefold()
if (i.isdigit() and int(i) in range(1, len(md_results) + 1)):
break
if i == 'q':
logger.warning('User quit without saving metadata')
raise SystemExit(4)
return md_results[int(i) - 1][2]
def fetch_full_issue_data(md: GenericMetadata, cv_api_key: str | None = None) -> GenericMetadata:
cache_dir = pathlib.Path(appdirs.user_cache_dir('quick_tag'))
cache_dir.mkdir(parents=True, exist_ok=True)
cv = CV(f"quick_tag/{__version__}", cache_dir)
cv.parse_settings({
'comicvine_key': cv_api_key,
'cv_use_series_start_as_volume': True,
})
return cv.fetch_comic_data(issue_id=md.issue_id)
def prepare_metadata(md: GenericMetadata, new_md: GenericMetadata, clear_tags: bool, auto_imprint: bool, remove_html_tables: bool) -> GenericMetadata:
@ -472,11 +96,11 @@ def prepare_metadata(md: GenericMetadata, new_md: GenericMetadata, clear_tags: b
final_md.overlay(new_md, merge.Mode.OVERLAY, True)
issue_id = ''
issue_id = ""
if final_md.issue_id:
issue_id = f" [Issue ID {final_md.issue_id}]"
origin = ''
origin = ""
if final_md.data_origin is not None:
origin = f" using info from {final_md.data_origin.name}"
notes = f"Tagged with quick_tag {__version__}{origin} on {datetime.now():%Y-%m-%d %H:%M:%S}.{issue_id}"
@ -486,63 +110,54 @@ def prepare_metadata(md: GenericMetadata, new_md: GenericMetadata, clear_tags: b
return final_md.replace(
is_empty=False,
notes=utils.combine_notes(final_md.notes, notes, 'Tagged with quick_tag'),
description=cleanup_html(final_md.description, remove_html_tables),
notes=utils.combine_notes(final_md.notes, notes, "Tagged with quick_tag"),
description=cleanup_html(final_md.description, remove_html_tables) or None,
)
def main():
manager = settngs.Manager('Simple comictagging script using ImageHash: https://pypi.org/project/ImageHash/')
manager.add_group('runtime', settings)
opts, _ = manager.parse_cmdline()
url: utils.Url = opts['runtime']['url']
max_hamming_distance: int = opts['runtime']['max']
simple: bool = opts['runtime']['simple']
manager = settngs.Manager('Simple comictagging script using ImageHash: https://pypi.org/project/ImageHash/')
manager.add_group("runtime", settings)
opts,_ = manager.parse_cmdline()
url: utils.Url = opts['runtime']['url']
print(url)
max_hamming_distance: int = opts['runtime']['max']
simple: bool = opts['runtime']['simple']
if not simple:
logger.error("Full results not implemented yet")
raise SystemExit(1)
ca = comicarchive.ComicArchive(opts['runtime']['comic_archive'])
if not ca.seems_to_be_a_comic_archive():
logger.error("Could not open %s as an archive", ca.path)
raise SystemExit(1)
ca = comicarchive.ComicArchive(opts['runtime']['comic_archive'])
if not ca.seems_to_be_a_comic_archive():
logger.error('Could not open %s as an archive', ca.path)
raise SystemExit(1)
try:
tags = ca.read_tags('cr')
cover_index = tags.get_cover_page_index_list()[0]
cover_image = Image.open(BytesIO(ca.get_page(cover_index)))
except Exception:
logger.exception("Unable to read cover image from archive")
raise SystemExit(2)
print('Tagging: ', ca.path)
try:
tags = ca.read_tags('cr')
cover_index = tags.get_cover_page_index_list()[0]
cover_image = Image.open(BytesIO(ca.get_page(cover_index)))
except Exception:
logger.exception('Unable to read cover image from archive')
raise SystemExit(2)
print('Tagging: ', ca.path)
ahash = imagehash.average_hash(cover_image)
dhash = imagehash.dhash(cover_image)
phash = imagehash.phash(cover_image)
print("hashing cover")
phash = dhash = ahash = ''
if HashType.AHASH in opts['runtime']['hash']:
ahash = imagehash.average_hash(cover_image)
if HashType.DHASH in opts['runtime']['hash']:
dhash = imagehash.dhash(cover_image)
if HashType.PHASH in opts['runtime']['hash']:
phash = imagehash.phash(cover_image)
results = SearchHashes(url.url, simple,max_hamming_distance,str(ahash),str(dhash),str(phash))
print(results)
if simple:
metadata_results = get_simple_results(results, opts['runtime']['cv_api_key'])
chosen_result = display_simple_results(metadata_results, opts['runtime']['force_interactive'])
else:
metadata_results = get_full_results(results)
chosen_result = display_full_results(metadata_results)
print("Searching hashes")
results = SearchHashes(url.url, simple, max_hamming_distance, str(ahash), str(dhash), str(phash), opts['runtime']['skip_non_exact'])
print("Retrieving basic ComicVine data")
if simple:
filtered_results = filter_simple_results(cast(list[SimpleResult], results), opts['runtime']['force_interactive'], opts['runtime']['aggressive_filtering'])
metadata_results = get_simple_results(filtered_results, opts['runtime']['cv_api_key'])
chosen_result = display_simple_results(metadata_results, ca, opts['runtime']['force_interactive'])
else:
filtered_results = filter_results(cast(list[Result], results), opts['runtime']['force_interactive'], opts['runtime']['aggressive_filtering'])
metadata_results = get_results(filtered_results, opts['runtime']['cv_api_key'])
chosen_result = display_results(metadata_results, ca, opts['runtime']['force_interactive'])
full_cv_md = fetch_full_issue_data(chosen_result, opts['runtime']['cv_api_key'])
if ca.write_tags(prepare_metadata(tags, full_cv_md, clear_tags=False, auto_imprint=True, remove_html_tables=True), 'cr'):
print(f'successfully saved metadata to {ca.path}')
raise SystemExit(0)
logger.error('Failed to save metadata to %s', ca.path)
raise SystemExit(2)
if ca.write_tags(prepare_metadata(GenericMetadata(), chosen_result, clear_tags=False, auto_imprint=True, remove_html_tables=True), 'cr'):
print(f'successfully saved metadata to {ca.path}')
raise SystemExit(0)
logger.error("Failed to save metadata to %s", ca.path)
raise SystemExit(2)
if __name__ == '__main__':
main()
if __name__ == "__main__":
main()

BIN
cv-goimagehash.html Normal file

Binary file not shown.

701
cv/cv.go
View File

@ -1,701 +0,0 @@
package cv
import (
"bufio"
"bytes"
"cmp"
"context"
"encoding/json"
"errors"
"fmt"
"io"
"io/fs"
"log"
"net/http"
"net/url"
"os"
"path"
"path/filepath"
"strconv"
"strings"
"sync"
"time"
"slices"
ch "gitea.narnian.us/lordwelch/comic-hasher"
)
type Download struct {
URL string
Dest string
IssueID string
Image *bytes.Buffer
}
type Issue struct {
ID int `json:"id"`
IssueNumber string `json:"issue_number"`
Image struct {
IconURL string `json:"icon_url,omitempty"`
MediumURL string `json:"medium_url,omitempty"`
ScreenURL string `json:"screen_url,omitempty"`
ScreenLargeURL string `json:"screen_large_url,omitempty"`
SmallURL string `json:"small_url,omitempty"`
SuperURL string `json:"super_url,omitempty"`
ThumbURL string `json:"thumb_url"`
TinyURL string `json:"tiny_url,omitempty"`
OriginalURL string `json:"original_url"`
ImageTags string `json:"image_tags"`
} `json:"image"`
Volume struct {
ID int `json:"id"`
} `json:"volume"`
}
type CVResult struct {
// Error string `json:"error"`
// Limit int `json:"limit"`
Offset int `json:"offset"`
NumberOfPageResults int `json:"number_of_page_results"`
NumberOfTotalResults int `json:"number_of_total_results"`
StatusCode int `json:"status_code"`
Results []Issue `json:"results"`
// Version string `json:"version"`
}
type CVDownloader struct {
APIKey string
JSONPath string
ImagePath string
ImageTypes []string
SendExistingImages bool
KeepDownloadedImages bool
Context context.Context
FinishedDownloadQueue chan Download
fileList []string
totalResults int
imageWG sync.WaitGroup
downloadQueue chan *CVResult
imageDownloads chan download
notFound chan download
chdb ch.CHDB
bufPool *sync.Pool
get_id func(id ch.ID) ch.IDList
only_hash_new_ids bool
}
var (
ErrQuit = errors.New("quit")
ErrInvalidPage = errors.New("invalid ComicVine page")
ErrInvalidIndex = errors.New("invalid page index")
ErrDownloadFail = errors.New("download failed")
ErrMissingPage = errors.New("page missing")
ErrUpdateNeeded = errors.New("update needed")
)
func (c *CVDownloader) readJson() ([]*CVResult, error) {
var issues []*CVResult
for _, filename := range c.fileList {
if c.hasQuit() {
return nil, ErrQuit
}
result, err := c.loadIssues(filename)
if err != nil {
if err == ErrInvalidPage {
continue
}
return issues, err
}
c.totalResults = max(result.NumberOfTotalResults, c.totalResults)
issues = append(issues, result)
}
return issues, nil
}
func (c *CVDownloader) loadIssues(filename string) (*CVResult, error) {
tmp := &CVResult{Results: make([]Issue, 0, 100)}
file, err := os.Open(filepath.Join(c.JSONPath, filename))
if err != nil {
return nil, err
}
bytes, err := io.ReadAll(file)
if err != nil {
return nil, err
}
err = json.Unmarshal(bytes, tmp)
if err != nil {
return nil, err
}
if getOffset(filename) != tmp.Offset {
return nil, ErrInvalidPage
}
return tmp, nil
}
func Get(url string) (*http.Response, error, func()) {
ctx, cancel := context.WithTimeout(context.Background(), time.Second*20)
req, err := http.NewRequestWithContext(ctx, "GET", url, nil)
if err != nil {
return nil, err, cancel
}
resp, err := http.DefaultClient.Do(req)
return resp, err, cancel
}
func getOffset(name string) int {
i, _ := strconv.Atoi(name[3 : len(name)-1-4])
return i
}
func (c *CVDownloader) findDownloadedPage(offset int) int {
index := offset / 100
if index < len(c.fileList) && getOffset(c.fileList[index]) == offset { // If it's in order and it's not missing it should be here
return index
}
index, found := slices.BinarySearchFunc(c.fileList, offset, func(a string, b int) int {
return cmp.Compare(getOffset(a), b)
})
if found {
return index
}
return -1
}
func (c *CVDownloader) getDownloadedIssues(offset int, update bool) (*CVResult, error) {
index := c.findDownloadedPage(offset)
if index < 0 {
return nil, ErrMissingPage
}
issue, err := c.loadIssues(c.fileList[index])
if err != nil || issue == nil {
err = fmt.Errorf("Failed to read page at offset %d: %w", offset, err)
os.Remove(filepath.Join(c.JSONPath, c.fileList[index]))
c.fileList = slices.Delete(c.fileList, index, index+1)
return nil, err
}
c.totalResults = max(c.totalResults, issue.NumberOfTotalResults)
if update && (len(issue.Results) == 0 || issue.Results[0].IssueNumber == "") {
err = fmt.Errorf("Deleting page %d to update records from cv", offset)
os.Remove(filepath.Join(c.JSONPath, c.fileList[index]))
c.fileList = slices.Delete(c.fileList, index, index+1)
return nil, err
}
if c.totalResults == issue.Offset+issue.NumberOfPageResults {
if index != len(c.fileList)-1 {
err = fmt.Errorf("Wrong index: expected %d got %d", len(c.fileList), index)
return nil, err
}
log.Println("Deleting the last page to detect new comics")
os.Remove(filepath.Join(c.JSONPath, c.fileList[index]))
c.fileList = slices.Delete(c.fileList, index, index+1)
}
return issue, nil
}
// updateIssues c.downloadQueue must not be closed before this function has returned
func (c *CVDownloader) updateIssues() (int, error) {
base_url, err := url.Parse("https://comicvine.gamespot.com/api/issues/?sort=date_added,id:asc&format=json&field_list=id,issue_number,image,volume")
if err != nil {
log.Fatal(err)
}
query := base_url.Query()
query.Add("api_key", c.APIKey)
base_url.RawQuery = query.Encode()
c.totalResults = max(c.totalResults, 1)
failCount := 0
prev := -1
offset := 0
retry := func(url string, err error) bool {
if errors.Is(err, context.Canceled) {
log.Println("Server closed")
return false
}
log.Printf("Failed to download %#v at offset %v: %v Attempt #%d", url, offset, err, failCount+1)
if prev == offset {
sleepTime := time.Second * 36
if failCount > 2 {
sleepTime = time.Minute * 10
}
log.Println("This page failed to download, lets wait for", sleepTime, "and hope it works")
select {
case <-c.Context.Done(): // allows us to return immediately even during a timeout
return false
case <-time.After(sleepTime):
}
}
prev = offset
failCount += 1
offset -= 100
return failCount < 15
}
updated := 0
for offset = 0; offset <= c.totalResults; offset += 100 {
if c.hasQuit() {
return offset - 100, ErrQuit
}
issue, err := c.getDownloadedIssues(offset, updated < 9)
if err == nil && issue != nil {
prev = -1
failCount = 0
select {
case <-c.Context.Done(): // allows us to return immediately even during a timeout
return offset - 100, ErrQuit
case c.downloadQueue <- issue:
}
continue
}
if errors.Is(err, ErrInvalidIndex) {
return offset - 100, err
}
if err != nil && !errors.Is(err, ErrMissingPage) {
log.Println(err)
}
log.Println("Starting download at offset", offset)
issue = &CVResult{}
URI := (*base_url)
query = base_url.Query()
query.Add("offset", strconv.Itoa(offset))
URI.RawQuery = query.Encode()
select {
case <-c.Context.Done(): // Allows us to return immediately even during a timeout
return offset - 100, ErrQuit
case <-time.After(10 * time.Second): // Enforces a minimum 10s wait between API hits
}
resp, err, cancelDownloadCTX := Get(URI.String())
if err != nil {
cancelDownloadCTX()
if retry(URI.String(), err) {
continue
}
// Fail and let comic-hasher try the whole thing again later
return offset - 100, fmt.Errorf("%w: %w", ErrDownloadFail, err)
}
if resp.StatusCode != 200 {
cancelDownloadCTX()
if retry(URI.String(), nil) {
_ = resp.Body.Close()
continue
}
msg, _ := io.ReadAll(resp.Body)
_ = resp.Body.Close()
return offset - 100, fmt.Errorf("%w: response code: %d Message: %s", ErrDownloadFail, resp.StatusCode, string(msg))
}
file, err := os.Create(filepath.Join(c.JSONPath, "cv-"+strconv.Itoa(offset)+".json"))
if err != nil {
log.Fatal(err)
}
body := io.TeeReader(resp.Body, file)
err = json.NewDecoder(bufio.NewReader(body)).Decode(issue)
_ = resp.Body.Close()
_ = file.Close()
if err != nil || issue.Offset != offset {
os.Remove(filepath.Join(c.JSONPath, "cv-"+strconv.Itoa(offset)+".json"))
cancelDownloadCTX()
if retry(URI.String(), err) {
continue
}
return offset - 100, fmt.Errorf("%w: %w", ErrDownloadFail, err)
}
cancelDownloadCTX()
if issue.NumberOfTotalResults > c.totalResults {
c.totalResults = issue.NumberOfTotalResults
}
prev = -1
failCount = 0
updated += 1
select {
case c.downloadQueue <- issue:
}
c.insertIssuePage(offset)
log.Printf("Downloaded %s/cv-%v.json", c.JSONPath, offset)
}
return offset, nil
}
type download struct {
url string
dest string
offset int
volumeID int
issueID int
finished bool
}
func (c *CVDownloader) start_downloader() {
for i := range 5 {
go func() {
log.Println("starting downloader", i)
for dl := range c.imageDownloads {
if dl.finished {
select {
case c.FinishedDownloadQueue <- Download{
URL: dl.url,
Dest: dl.dest,
IssueID: strconv.Itoa(dl.issueID),
}:
c.imageWG.Done()
}
continue
}
dir := filepath.Dir(dl.dest)
resp, err, cancelDownload := Get(dl.url)
if err != nil {
cancelDownload()
log.Println("Failed to download", dl.volumeID, "/", dl.issueID, dl.url, err)
c.imageWG.Done()
continue
}
cleanup := func() {
resp.Body.Close()
cancelDownload()
c.imageWG.Done()
}
if resp.StatusCode == 404 {
c.notFound <- dl
cleanup()
continue
}
if resp.StatusCode != 200 {
log.Println("Failed to download", dl.url, resp.StatusCode)
cleanup()
continue
}
if c.KeepDownloadedImages {
_ = os.MkdirAll(dir, 0o755)
image, err := os.Create(dl.dest)
if err != nil {
log.Println("Unable to create image file", dl.dest, err)
os.Remove(dl.dest)
image.Close()
cleanup()
continue
}
log.Println("downloading", dl.dest)
_, err = io.Copy(image, resp.Body)
image.Close()
if err != nil {
log.Println("Failed when downloading image", err)
os.Remove(dl.dest)
cleanup()
continue
}
c.FinishedDownloadQueue <- Download{
URL: dl.url,
Dest: dl.dest,
IssueID: strconv.Itoa(dl.issueID),
}
} else {
image := c.bufPool.Get().(*bytes.Buffer)
image.Reset()
log.Println("downloading", dl.dest)
_, err = io.Copy(image, resp.Body)
if err != nil {
log.Println("Failed when downloading image", err)
cleanup()
os.Remove(dl.dest)
// Something failed let this buffer GC instead of saving it
continue
}
c.FinishedDownloadQueue <- Download{
URL: dl.url,
Dest: dl.dest,
IssueID: strconv.Itoa(dl.issueID),
Image: image,
}
}
cleanup()
}
}()
}
}
func (c *CVDownloader) handleNotFound() {
for failedDownload := range c.notFound {
c.chdb.AddURL(failedDownload.url)
log.Printf("Not found: volumeID: %d issueID: %d Offset: %d URL: %s\n", failedDownload.volumeID, failedDownload.issueID, failedDownload.offset, failedDownload.url)
}
}
func (c *CVDownloader) downloadImages() {
defer func() {
log.Println("Waiting for final images to complete download")
c.imageWG.Wait()
}()
go c.start_downloader()
go c.handleNotFound()
added := 0
for list := range c.downloadQueue {
log.Printf("Checking downloads at offset %v\r", list.Offset)
for _, issue := range list.Results {
type image struct {
url string
name string
}
imageURLs := []image{{issue.Image.IconURL, "icon_url"}, {issue.Image.MediumURL, "medium_url"}, {issue.Image.ScreenURL, "screen_url"}, {issue.Image.ScreenLargeURL, "screen_large_url"}, {issue.Image.SmallURL, "small_url"}, {issue.Image.SuperURL, "super_url"}, {issue.Image.ThumbURL, "thumb_url"}, {issue.Image.TinyURL, "tiny_url"}, {issue.Image.OriginalURL, "original_url"}}
for _, image := range imageURLs {
if len(c.ImageTypes) > 0 && !slices.Contains(c.ImageTypes, image.name) {
continue
}
if c.chdb.CheckURL(image.url) {
log.Printf("Skipping known bad url %s", image.url)
continue
}
if strings.HasSuffix(image.url, "6373148-blank.png") {
c.notFound <- download{
url: image.url,
offset: list.Offset,
volumeID: issue.Volume.ID,
issueID: issue.ID,
}
continue
}
uri, err := url.ParseRequestURI(image.url)
if err != nil {
c.notFound <- download{
url: image.url,
offset: list.Offset,
volumeID: issue.Volume.ID,
issueID: issue.ID,
finished: true,
}
continue
}
ext := strings.TrimSuffix(strings.ToLower(path.Ext(uri.Path)), "~original")
if ext == "" || (len(ext) > 4 && !slices.Contains([]string{".avif", ".webp", ".tiff", ".heif"}, ext)) {
ext = ".jpg"
}
dir := filepath.Join(c.ImagePath, strconv.Itoa(issue.Volume.ID), strconv.Itoa(issue.ID))
path := filepath.Join(dir, image.name+ext)
ids := c.get_id(ch.ID{
Domain: ch.NewSource(ch.ComicVine),
ID: strconv.Itoa(issue.ID),
})
if c.chdb.PathDownloaded(path) || c.only_hash_new_ids && len(ids) > 0 {
if _, err = os.Stat(path); c.SendExistingImages && err == nil {
// We don't add to the count of added as these should be processed immediately
log.Printf("Sending Existing image %v/%v %v", issue.Volume.ID, issue.ID, path)
c.imageWG.Add(1)
c.imageDownloads <- download{
url: image.url,
dest: path,
offset: list.Offset,
volumeID: issue.Volume.ID,
issueID: issue.ID,
finished: true,
}
}
continue // If it exists assume it is fine, adding some basic verification might be a good idea later
}
added++
c.imageWG.Add(1)
c.imageDownloads <- download{
url: image.url,
dest: path,
offset: list.Offset,
volumeID: issue.Volume.ID,
issueID: issue.ID,
}
}
if added > 200 {
// On a clean single image type run each page would have 100 downloads of a single cover type but stuff happens so we only wait once we have sent 200 to the queue
log.Println("waiting for", added, "downloads at offset", list.Offset)
beforeWait := time.Now()
c.imageWG.Wait()
waited := time.Since(beforeWait)
added = 0
// If we had to wait for the arbitrarily picked time of 7.4 seconds it means we had a backed up queue (slow hashing can also cause it to wait longer), lets wait to give the CV servers a break
if waited > time.Duration(7.4*float64(time.Second)) {
t := 10 * time.Second
log.Println("Waiting for", t, "at offset", list.Offset, "had to wait for", waited)
select {
case <-time.After(t):
}
} else {
// Things are too fast we can't depend CV being slow to manage our download speed
// We sleep for 3 seconds so we don't overload CV
time.Sleep(3 * time.Second)
}
}
}
}
}
func (c *CVDownloader) cleanBadURLs() error {
var indexesToRemove []int
list:
for i, jsonFile := range c.fileList {
list, err := c.loadIssues(jsonFile)
if err != nil {
indexesToRemove = append(indexesToRemove, i)
os.Remove(filepath.Join(c.JSONPath, jsonFile))
continue
}
for _, issue := range list.Results {
for _, url := range []string{issue.Image.IconURL, issue.Image.MediumURL, issue.Image.ScreenURL, issue.Image.ScreenLargeURL, issue.Image.SmallURL, issue.Image.SuperURL, issue.Image.ThumbURL, issue.Image.TinyURL, issue.Image.OriginalURL} {
if c.chdb.CheckURL(url) {
indexesToRemove = append(indexesToRemove, i)
if err := os.Remove(filepath.Join(c.JSONPath, jsonFile)); err != nil {
return err
}
// We've removed the entire page, lets see if the new url works
continue list
}
}
}
}
slices.Reverse(indexesToRemove)
for _, i := range indexesToRemove {
c.fileList = slices.Delete(c.fileList, i, min(i+1, len(c.fileList)-1))
}
return nil
}
func (c *CVDownloader) hasQuit() bool {
select {
case <-c.Context.Done():
return true
default:
return false
}
}
func (c *CVDownloader) cleanDirs() {
_ = filepath.WalkDir(c.ImagePath, func(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
if d.IsDir() {
path, _ = filepath.Abs(path)
err := ch.RmdirP(path)
// The error is only for the first path value. EG ch.RmdirP("/test/t") will only return the error for os.Remove("/test/t") not os.Remove("test")
if err == nil {
return filepath.SkipDir
}
}
return nil
})
}
func (c *CVDownloader) insertIssuePage(offset int) {
index, found := slices.BinarySearchFunc(c.fileList, offset, func(a string, b int) int {
return cmp.Compare(getOffset(a), b)
})
if found {
return
}
c.fileList = slices.Insert(c.fileList, index, fmt.Sprintf("cv-%v.json", offset))
}
func NewCVDownloader(ctx context.Context, bufPool *sync.Pool, only_hash_new_ids bool, get_id func(id ch.ID) ch.IDList, chdb ch.CHDB, workPath, APIKey string, imageTypes []string, keepDownloadedImages, sendExistingImages bool, finishedDownloadQueue chan Download) *CVDownloader {
return &CVDownloader{
Context: ctx,
JSONPath: filepath.Join(workPath, "_json"),
ImagePath: filepath.Join(workPath, "_image"),
APIKey: APIKey,
bufPool: bufPool, // Only used if keepDownloadedImages is false to save memory on byte buffers. The buffers get sent back via finishedDownloadQueue
FinishedDownloadQueue: finishedDownloadQueue,
SendExistingImages: sendExistingImages,
KeepDownloadedImages: keepDownloadedImages,
ImageTypes: imageTypes,
chdb: chdb,
get_id: get_id,
only_hash_new_ids: only_hash_new_ids,
}
}
func DownloadCovers(c *CVDownloader) {
var (
err error
)
c.downloadQueue = make(chan *CVResult) // This is just json it shouldn't take up much more than 122 MB
c.imageDownloads = make(chan download, 1) // These are just URLs should only take a few MB
c.notFound = make(chan download, 1) // Same here
os.MkdirAll(c.JSONPath, 0o777)
f, _ := os.Create(filepath.Join(c.ImagePath, ".keep"))
f.Close()
if !c.KeepDownloadedImages {
log.Println("Cleaning directories")
c.cleanDirs()
}
log.Println("Reading json")
var d *os.File
d, err = os.Open(c.JSONPath)
c.fileList, err = d.Readdirnames(-1)
for i := len(c.fileList) - 1; i >= 0; i-- {
if !strings.Contains(c.fileList[i], "json") {
c.fileList = slices.Delete(c.fileList, i, i+1)
}
}
if err != nil {
panic(fmt.Errorf("Unable to open path for json files: %w", err))
}
slices.SortFunc(c.fileList, func(x, y string) int {
return cmp.Compare(getOffset(x), getOffset(y))
})
if len(c.fileList) > 0 {
c.totalResults = getOffset(c.fileList[len(c.fileList)-1])
}
log.Println("Number of pages", len(c.fileList), "Expected Pages:", c.totalResults/100)
log.Println("Updating issues now")
dwg := sync.WaitGroup{}
dwg.Add(1)
go func() {
c.downloadImages()
dwg.Done()
}()
offset, err := c.updateIssues()
if err != nil {
log.Printf("Failed to download CV Covers: %s", err)
}
issueCount := len(c.fileList) * 100
log.Println("Number of issues", issueCount, " expected:", c.totalResults)
close(c.downloadQueue) // sends only happen in c.updateIssues which has already been called
// We don't drain here as we want to process them
log.Println("Waiting for downloaders")
dwg.Wait()
close(c.imageDownloads)
for dw := range c.imageDownloads {
fmt.Println("Skipping cv download", dw.issueID)
}
close(c.notFound)
for dw := range c.notFound {
fmt.Println("Skipping not found", dw.issueID)
}
// We drain this at the end because we need to wait for the images to download
for dw := range c.downloadQueue {
fmt.Println("Skipping page download", dw.Offset)
}
log.Println("Completed downloading images")
log.Println("Last offset", offset)
}

83
go.mod
View File

@ -1,71 +1,40 @@
module gitea.narnian.us/lordwelch/comic-hasher
go 1.23.0
go 1.22.1
toolchain go1.24.0
toolchain go1.22.2
// Main comic-hasher
require (
gitea.narnian.us/lordwelch/goimagehash v0.0.0-20250130004139-e91c39c79e0d
gitea.narnian.us/lordwelch/goimagehash v0.0.0-20240502010648-cb5a8237c420
github.com/disintegration/imaging v1.6.3-0.20201218193011-d40f48ce0f09
github.com/json-iterator/go v1.1.12
github.com/kr/pretty v0.2.1
github.com/vmihailenco/msgpack v4.0.4+incompatible
go.etcd.io/bbolt v1.4.0
golang.org/x/exp v0.0.0-20250218142911-aa4b98e5adaa
golang.org/x/image v0.25.0
)
// Storage types
require (
github.com/mattn/go-sqlite3 v1.14.24
github.com/ncruces/go-sqlite3 v0.26.0
gonum.org/v1/gonum v0.16.0
modernc.org/sqlite v1.35.0
)
// other commands
require (
github.com/fmartingr/go-comicinfo/v2 v2.0.2
github.com/mholt/archiver/v4 v4.0.0-alpha.9
golang.org/x/text v0.25.0
github.com/mholt/archiver/v4 v4.0.0-alpha.8
golang.org/x/image v0.7.0
)
require (
github.com/STARRY-S/zip v0.1.0 // indirect
github.com/andybalholm/brotli v1.1.1 // indirect
github.com/bodgit/plumbing v1.3.0 // indirect
github.com/bodgit/sevenzip v1.5.2 // indirect
github.com/bodgit/windows v1.0.1 // indirect
github.com/dsnet/compress v0.0.2-0.20230904184137-39efe44ab707 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/golang/protobuf v1.5.2 // indirect
github.com/andybalholm/brotli v1.0.4 // indirect
github.com/bodgit/plumbing v1.2.0 // indirect
github.com/bodgit/sevenzip v1.3.0 // indirect
github.com/bodgit/windows v1.0.0 // indirect
github.com/connesc/cipherio v0.2.1 // indirect
github.com/dsnet/compress v0.0.1 // indirect
github.com/golang/mock v1.6.0 // indirect
github.com/golang/snappy v0.0.4 // indirect
github.com/google/uuid v1.6.0 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/errwrap v1.0.0 // indirect
github.com/hashicorp/go-multierror v1.1.1 // indirect
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
github.com/klauspost/compress v1.17.11 // indirect
github.com/klauspost/pgzip v1.2.6 // indirect
github.com/kr/text v0.1.0 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/ncruces/go-strftime v0.1.9 // indirect
github.com/ncruces/julianday v1.0.0 // indirect
github.com/nwaples/rardecode/v2 v2.0.0-beta.4 // indirect
github.com/pierrec/lz4/v4 v4.1.21 // indirect
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
github.com/sorairolake/lzip-go v0.3.5 // indirect
github.com/tetratelabs/wazero v1.9.0 // indirect
github.com/klauspost/compress v1.15.9 // indirect
github.com/klauspost/pgzip v1.2.5 // indirect
github.com/nwaples/rardecode/v2 v2.0.0-beta.2 // indirect
github.com/pierrec/lz4/v4 v4.1.15 // indirect
github.com/therootcompany/xz v1.0.1 // indirect
github.com/ulikunitz/xz v0.5.12 // indirect
go4.org v0.0.0-20230225012048-214862532bf5 // indirect
golang.org/x/sys v0.33.0 // indirect
google.golang.org/appengine v1.6.8 // indirect
google.golang.org/protobuf v1.26.0 // indirect
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect
modernc.org/libc v1.61.13 // indirect
modernc.org/mathutil v1.7.1 // indirect
modernc.org/memory v1.8.2 // indirect
github.com/ulikunitz/xz v0.5.10 // indirect
go4.org v0.0.0-20200411211856-f5505b9728dd // indirect
golang.org/x/exp v0.0.0-20240416160154-fe59bbe5cc7f // indirect
)
require golang.org/x/text v0.14.0
replace golang.org/x/text v0.14.0 => /home/timmy/build/source/text/
replace gitea.narnian.us/lordwelch/goimagehash v0.0.0-20240502010648-cb5a8237c420 => ../goimagehash

186
go.sum
View File

@ -15,35 +15,31 @@ cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+
cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw=
cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
gitea.narnian.us/lordwelch/goimagehash v0.0.0-20250130004139-e91c39c79e0d h1:mFnVC/tEHk6woq6FBulwzGcuNdYn+zNhXNBILuetQJs=
gitea.narnian.us/lordwelch/goimagehash v0.0.0-20250130004139-e91c39c79e0d/go.mod h1:UDwa7njhbB5nzxIjHbT9Mjlve9GYn3wzxAcQax1XRvE=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/STARRY-S/zip v0.1.0 h1:eUER3jKmHKXjv+iy3BekLa+QnNSo1Lqz4eTzYBcGDqo=
github.com/STARRY-S/zip v0.1.0/go.mod h1:qj/mTZkvb3AvfGQ2e775/3AODRvB4peSw8KNMvrM8/I=
github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA=
github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA=
github.com/bodgit/plumbing v1.3.0 h1:pf9Itz1JOQgn7vEOE7v7nlEfBykYqvUYioC61TwWCFU=
github.com/bodgit/plumbing v1.3.0/go.mod h1:JOTb4XiRu5xfnmdnDJo6GmSbSbtSyufrsyZFByMtKEs=
github.com/bodgit/sevenzip v1.5.2 h1:acMIYRaqoHAdeu9LhEGGjL9UzBD4RNf9z7+kWDNignI=
github.com/bodgit/sevenzip v1.5.2/go.mod h1:gTGzXA67Yko6/HLSD0iK4kWaWzPlPmLfDO73jTjSRqc=
github.com/bodgit/windows v1.0.1 h1:tF7K6KOluPYygXa3Z2594zxlkbKPAOvqr97etrGNIz4=
github.com/bodgit/windows v1.0.1/go.mod h1:a6JLwrB4KrTR5hBpp8FI9/9W9jJfeQ2h4XDXU74ZCdM=
github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY3JY=
github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
github.com/bodgit/plumbing v1.2.0 h1:gg4haxoKphLjml+tgnecR4yLBV5zo4HAZGCtAh3xCzM=
github.com/bodgit/plumbing v1.2.0/go.mod h1:b9TeRi7Hvc6Y05rjm8VML3+47n4XTZPtQ/5ghqic2n8=
github.com/bodgit/sevenzip v1.3.0 h1:1ljgELgtHqvgIp8W8kgeEGHIWP4ch3xGI8uOBZgLVKY=
github.com/bodgit/sevenzip v1.3.0/go.mod h1:omwNcgZTEooWM8gA/IJ2Nk/+ZQ94+GsytRzOJJ8FBlM=
github.com/bodgit/windows v1.0.0 h1:rLQ/XjsleZvx4fR1tB/UxQrK+SJ2OFHzfPjLWWOhDIA=
github.com/bodgit/windows v1.0.0/go.mod h1:a6JLwrB4KrTR5hBpp8FI9/9W9jJfeQ2h4XDXU74ZCdM=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/connesc/cipherio v0.2.1 h1:FGtpTPMbKNNWByNrr9aEBtaJtXjqOzkIXNYJp6OEycw=
github.com/connesc/cipherio v0.2.1/go.mod h1:ukY0MWJDFnJEbXMQtOcn2VmTpRfzcTz4OoVrWGGJZcA=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/disintegration/imaging v1.6.3-0.20201218193011-d40f48ce0f09 h1:MJFqtdxTq94XqUgg7DcGCaOIXrDTJE/tPHK66Jshguc=
github.com/disintegration/imaging v1.6.3-0.20201218193011-d40f48ce0f09/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4=
github.com/dsnet/compress v0.0.2-0.20230904184137-39efe44ab707 h1:2tV76y6Q9BB+NEBasnqvs7e49aEBFI8ejC89PSnWH+4=
github.com/dsnet/compress v0.0.2-0.20230904184137-39efe44ab707/go.mod h1:qssHWj60/X5sZFNxpG4HBPDHVqxNm4DfnCKgrbZOT+s=
github.com/dsnet/compress v0.0.1 h1:PlZu0n3Tuv04TzpfPbrnI0HW/YwodEXDS+oPKahKF0Q=
github.com/dsnet/compress v0.0.1/go.mod h1:Aw8dCMJ7RioblQeTqt88akK31OvO8Dhf5JflhBbQEHo=
github.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdfkVLjJ8T6VcRQv3SXugXy999NBtR9aFY=
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/fmartingr/go-comicinfo/v2 v2.0.2 h1:VppvrHr8C4+iktBTOd7vzTMNbVecZ7F/Ji1kPTOIGg4=
@ -58,13 +54,13 @@ github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfb
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc=
github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs=
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw=
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
@ -73,107 +69,61 @@ github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5a
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20240409012703-83162a5b38cd h1:gbpYu9NMq8jhDVbvlGkMFWCjLFlqqEZjEmObmhUy6Vo=
github.com/google/pprof v0.0.0-20240409012703-83162a5b38cd/go.mod h1:kf6iHlnVGwgKolg33glAes7Yg/8iWP8ukqeldJSO7jw=
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA=
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I=
github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k=
github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc=
github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0=
github.com/klauspost/compress v1.15.9 h1:wKRjX6JRtDdrE9qwa4b/Cip7ACOshUI4smpCQanqjSY=
github.com/klauspost/compress v1.15.9/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU=
github.com/klauspost/cpuid v1.2.0/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
github.com/klauspost/pgzip v1.2.6 h1:8RXeL5crjEUFnR2/Sn6GJNWtSQ3Dk8pq4CL3jvdDyjU=
github.com/klauspost/pgzip v1.2.6/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs=
github.com/klauspost/pgzip v1.2.5 h1:qnWYvvKqedOF2ulHpMG72XQol4ILEJ8k2wwRl/Km8oE=
github.com/klauspost/pgzip v1.2.5/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI=
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-sqlite3 v1.14.24 h1:tpSp2G2KyMnnQu99ngJ47EIkWVmliIizyZBfPrBWDRM=
github.com/mattn/go-sqlite3 v1.14.24/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
github.com/mholt/archiver/v4 v4.0.0-alpha.9 h1:EZgAsW6DsuawxDgTtIdjCUBa2TQ6AOe9pnCidofSRtE=
github.com/mholt/archiver/v4 v4.0.0-alpha.9/go.mod h1:5D3uct315OMkMRXKwEuMB+wQi/2m5NQngKDmApqwVlo=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/ncruces/go-sqlite3 v0.26.0 h1:dY6ASfuhSEbtSge6kJwjyJVC7bXCpgEVOycmdboKJek=
github.com/ncruces/go-sqlite3 v0.26.0/go.mod h1:46HIzeCQQ+aNleAxCli+vpA2tfh7ttSnw24kQahBc1o=
github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4=
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
github.com/ncruces/julianday v1.0.0 h1:fH0OKwa7NWvniGQtxdJRxAgkBMolni2BjDHaWTxqt7M=
github.com/ncruces/julianday v1.0.0/go.mod h1:Dusn2KvZrrovOMJuOt0TNXL6tB7U2E8kvza5fFc9G7g=
github.com/nwaples/rardecode/v2 v2.0.0-beta.4 h1:sdiJxQdPjECn2lh9nLFFhgLCf+0ulDU5rODbtERTlUY=
github.com/nwaples/rardecode/v2 v2.0.0-beta.4/go.mod h1:yntwv/HfMc/Hbvtq9I19D1n58te3h6KsqCf3GxyfBGY=
github.com/pierrec/lz4/v4 v4.1.21 h1:yOVMLb6qSIDP67pl/5F7RepeKYu/VmTyEXvuMI5d9mQ=
github.com/pierrec/lz4/v4 v4.1.21/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
github.com/mholt/archiver/v4 v4.0.0-alpha.8 h1:tRGQuDVPh66WCOelqe6LIGh0gwmfwxUrSSDunscGsRM=
github.com/mholt/archiver/v4 v4.0.0-alpha.8/go.mod h1:5f7FUYGXdJWUjESffJaYR4R60VhnHxb2X3T1teMyv5A=
github.com/nwaples/rardecode/v2 v2.0.0-beta.2 h1:e3mzJFJs4k83GXBEiTaQ5HgSc/kOK8q0rDaRO0MPaOk=
github.com/nwaples/rardecode/v2 v2.0.0-beta.2/go.mod h1:yntwv/HfMc/Hbvtq9I19D1n58te3h6KsqCf3GxyfBGY=
github.com/pierrec/lz4/v4 v4.1.15 h1:MO0/ucJhngq7299dKLwIMtgTfbkoSPF6AoMYDd8Q4q0=
github.com/pierrec/lz4/v4 v4.1.15/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rwcarlsen/goexif v0.0.0-20190401172101-9e8deecbddbd/go.mod h1:hPqNNc0+uJM6H+SuU8sEs5K5IQeKccPqeSjfgcKGgPk=
github.com/sorairolake/lzip-go v0.3.5 h1:ms5Xri9o1JBIWvOFAorYtUNik6HI3HgBTkISiqu0Cwg=
github.com/sorairolake/lzip-go v0.3.5/go.mod h1:N0KYq5iWrMXI0ZEXKXaS9hCyOjZUQdBDEIbXfoUwbdk=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/tetratelabs/wazero v1.9.0 h1:IcZ56OuxrtaEz8UYNRHBrUa9bYeX9oVY93KspZZBf/I=
github.com/tetratelabs/wazero v1.9.0/go.mod h1:TSbcXCfFP0L2FGkRPxHphadXPjo1T6W+CseNNY7EkjM=
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
github.com/therootcompany/xz v1.0.1 h1:CmOtsn1CbtmyYiusbfmhmkpAAETj0wBIH6kCYaX+xzw=
github.com/therootcompany/xz v1.0.1/go.mod h1:3K3UH1yCKgBneZYhuQUvJ9HPD19UEXEI0BWbMn8qNMY=
github.com/ulikunitz/xz v0.5.8/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
github.com/ulikunitz/xz v0.5.12 h1:37Nm15o69RwBkXM0J6A5OlE67RZTfzUxTj8fB3dfcsc=
github.com/ulikunitz/xz v0.5.12/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
github.com/vmihailenco/msgpack v4.0.4+incompatible h1:dSLoQfGFAo3F6OoNhwUmLwVgaUXK79GlxNBwueZn0xI=
github.com/vmihailenco/msgpack v4.0.4+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk=
github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU=
github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E=
github.com/ulikunitz/xz v0.5.6/go.mod h1:2bypXElzHzzJZwzH67Y6wb67pO62Rzfn7BSiF4ABRW8=
github.com/ulikunitz/xz v0.5.10 h1:t92gobL9l3HE202wg3rlk19F6X+JOxl9BBrCCMYEYd8=
github.com/ulikunitz/xz v0.5.10/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14=
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
go.etcd.io/bbolt v1.4.0 h1:TU77id3TnN/zKr7CO/uk+fBCwF2jGcMuw2B/FMAzYIk=
go.etcd.io/bbolt v1.4.0/go.mod h1:AsD+OCi/qPN1giOX1aiLAha3o1U8rAz65bvN4j0sRuk=
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go4.org v0.0.0-20230225012048-214862532bf5 h1:nifaUDeh+rPaBCMPMQHZmvJf+QdpLFnuQPwx+LxVmtc=
go4.org v0.0.0-20230225012048-214862532bf5/go.mod h1:F57wTi5Lrj6WLyswp5EYV1ncrEbFGHD4hhz6S1ZYeaU=
go4.org v0.0.0-20200411211856-f5505b9728dd h1:BNJlw5kRTzdmyfh5U8F93HA2OwkP7ZGwA51eJ/0wKOU=
go4.org v0.0.0-20200411211856-f5505b9728dd/go.mod h1:CIiUVy99QCPfoE13bO4EZaz5GZMZXMSBGhxRdsvzbkg=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
@ -187,13 +137,13 @@ golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE
golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
golang.org/x/exp v0.0.0-20250218142911-aa4b98e5adaa h1:t2QcU6V556bFjYgu4L6C+6VrCPyJZ+eyRsABUPs1mz4=
golang.org/x/exp v0.0.0-20250218142911-aa4b98e5adaa/go.mod h1:BHOTPb3L19zxehTsLoJXVaTktb06DFgmdW6Wb9s8jqk=
golang.org/x/exp v0.0.0-20240416160154-fe59bbe5cc7f h1:99ci1mjWVBWwJiEKYY6jWa4d2nTQVIEhZIptnrVb1XY=
golang.org/x/exp v0.0.0-20240416160154-fe59bbe5cc7f/go.mod h1:/lliqkxwWAhPjf5oSOIJup2XcqJaw8RGS6k3TGEc7GI=
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.25.0 h1:Y6uW6rH1y5y/LK1J8BPWZtr6yZ7hrsy6hFrXjgsc2fQ=
golang.org/x/image v0.25.0/go.mod h1:tCAmOEGthTtkalusGp1g3xa2gke8J6c2N565dTyl9Rs=
golang.org/x/image v0.7.0 h1:gzS29xtG1J5ybQlv0PuyfE3nmc6R4qB73m6LUUmvFuw=
golang.org/x/image v0.7.0/go.mod h1:nd/q4ef1AKKYl/4kft7g+6UyGbdiqWqTP1ZAbRoV7Rg=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
@ -209,9 +159,9 @@ golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKG
golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY=
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.23.0 h1:Zb7khfcRGKk+kqfxFaP5tZqCnDZMjC5VtUBs87Hr6QM=
golang.org/x/mod v0.23.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@ -225,9 +175,11 @@ golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLL
golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
@ -239,9 +191,9 @@ golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.14.0 h1:woo0S4Yywslg6hp4eUFjTVOyKt0RookbpAHG4c1HmhQ=
golang.org/x/sync v0.14.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@ -254,14 +206,14 @@ golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw=
golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
@ -271,10 +223,8 @@ golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.25.0 h1:qVyWApTSYLk/drJRO5mDlNYskwQznZmkpV2c8q9zls4=
golang.org/x/text v0.25.0/go.mod h1:WEdwpYrmk1qmdHvhkSTNPm3app7v4rsT8F2UD6+VHIA=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
@ -301,14 +251,13 @@ golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapK
golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/tools v0.30.0 h1:BgcpHewrV5AUp2G9MebG4XPFI1E2W41zU1SaqVA9vJY=
golang.org/x/tools v0.30.0/go.mod h1:c347cR/OJfw5TI+GfX7RUPNMdDRRbjvYTS0jPyvsVtY=
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk=
gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
@ -322,8 +271,6 @@ google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM=
google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds=
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
@ -344,13 +291,8 @@ google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyac
google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
google.golang.org/protobuf v1.26.0 h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/lk=
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
@ -361,30 +303,6 @@ honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWh
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
modernc.org/cc/v4 v4.24.4 h1:TFkx1s6dCkQpd6dKurBNmpo+G8Zl4Sq/ztJ+2+DEsh0=
modernc.org/cc/v4 v4.24.4/go.mod h1:uVtb5OGqUKpoLWhqwNQo/8LwvoiEBLvZXIQ/SmO6mL0=
modernc.org/ccgo/v4 v4.23.16 h1:Z2N+kk38b7SfySC1ZkpGLN2vthNJP1+ZzGZIlH7uBxo=
modernc.org/ccgo/v4 v4.23.16/go.mod h1:nNma8goMTY7aQZQNTyN9AIoJfxav4nvTnvKThAeMDdo=
modernc.org/fileutil v1.3.0 h1:gQ5SIzK3H9kdfai/5x41oQiKValumqNTDXMvKo62HvE=
modernc.org/fileutil v1.3.0/go.mod h1:XatxS8fZi3pS8/hKG2GH/ArUogfxjpEKs3Ku3aK4JyQ=
modernc.org/gc/v2 v2.6.3 h1:aJVhcqAte49LF+mGveZ5KPlsp4tdGdAOT4sipJXADjw=
modernc.org/gc/v2 v2.6.3/go.mod h1:YgIahr1ypgfe7chRuJi2gD7DBQiKSLMPgBQe9oIiito=
modernc.org/libc v1.61.13 h1:3LRd6ZO1ezsFiX1y+bHd1ipyEHIJKvuprv0sLTBwLW8=
modernc.org/libc v1.61.13/go.mod h1:8F/uJWL/3nNil0Lgt1Dpz+GgkApWh04N3el3hxJcA6E=
modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU=
modernc.org/mathutil v1.7.1/go.mod h1:4p5IwJITfppl0G4sUEDtCr4DthTaT47/N3aT6MhfgJg=
modernc.org/memory v1.8.2 h1:cL9L4bcoAObu4NkxOlKWBWtNHIsnnACGF/TbqQ6sbcI=
modernc.org/memory v1.8.2/go.mod h1:ZbjSvMO5NQ1A2i3bWeDiVMxIorXwdClKE/0SZ+BMotU=
modernc.org/opt v0.1.4 h1:2kNGMRiUjrp4LcaPuLY2PzUfqM/w9N23quVwhKt5Qm8=
modernc.org/opt v0.1.4/go.mod h1:03fq9lsNfvkYSfxrfUhZCWPk1lm4cq4N+Bh//bEtgns=
modernc.org/sortutil v1.2.1 h1:+xyoGf15mM3NMlPDnFqrteY07klSFxLElE2PVuWIJ7w=
modernc.org/sortutil v1.2.1/go.mod h1:7ZI3a3REbai7gzCLcotuw9AC4VZVpYMjDzETGsSMqJE=
modernc.org/sqlite v1.35.0 h1:yQps4fegMnZFdphtzlfQTCNBWtS0CZv48pRpW3RFHRw=
modernc.org/sqlite v1.35.0/go.mod h1:9cr2sicr7jIaWTBKQmAxQLfBv9LL0su4ZTEV+utt3ic=
modernc.org/strutil v1.2.1 h1:UneZBkQA+DX2Rp35KcM69cSsNES9ly8mQWD71HKlOA0=
modernc.org/strutil v1.2.1/go.mod h1:EHkiggD70koQxjVdSBM3JKM7k6L0FbGE5eymy9i3B9A=
modernc.org/token v1.1.0 h1:Xl7Ap9dKaEs5kLoOQeQmPWevfnk/DM5qcLcYlA8ys6Y=
modernc.org/token v1.1.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM=
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=

BIN
hashes.gz

Binary file not shown.

View File

@ -1,269 +0,0 @@
package ch
import (
"cmp"
_ "embed"
"fmt"
"image"
"log"
"math/bits"
"slices"
"strings"
"sync"
"unsafe"
"gitea.narnian.us/lordwelch/goimagehash"
json "github.com/json-iterator/go"
"github.com/vmihailenco/msgpack"
)
//go:embed hashes.gz
var Hashes []byte
const (
H0 uint64 = 0b11111111 << (8 * iota)
H1
H2
H3
H4
H5
H6
H7
)
const (
Shift0 = (8 * iota)
Shift1
Shift2
Shift3
Shift4
Shift5
Shift6
Shift7
)
const (
ComicVine Source = "comicvine.gamespot.com"
SavedHashVersion int = 2
)
var sources *sync.Map = newSourceMap()
type Source string
type Match struct {
Distance int
Hash uint64
}
type ID struct {
Domain *Source
ID string
}
type Result struct {
Hash Hash
ID ID
Distance int
EquivalentIDs []ID
}
type Im struct {
Im image.Image
Format string
ID ID
NewOnly bool
}
type ImageHash struct {
Hashes []Hash
ID ID
}
type Hash struct {
Hash uint64
Kind goimagehash.Kind
}
func (id *ID) Compare(target ID) int {
return cmp.Or(
strings.Compare(string(*id.Domain), string(*target.Domain)),
strings.Compare(id.ID, target.ID),
)
}
func newSourceMap() *sync.Map {
m := &sync.Map{}
for s := range []Source{ComicVine} {
m.Store(s, &s)
}
return m
}
func NewSource[E string | Source](s E) *Source {
s2 := Source(strings.ToLower(Clone(string(s))))
sp, _ := sources.LoadOrStore(s2, &s2)
return sp.(*Source)
}
// IDList is a map of domain to ID eg IDs["comicvine.gamespot.com"] = []string{"1235"}
// Maps are extremely expensive in go for small maps this should only be used to return info to a user or as a map containing all IDs for a source
type IDList map[Source][]string
//go:noinline pragma
func (a *ID) DecodeMsgpack(dec *msgpack.Decoder) error {
var s struct {
Domain, ID string
}
err := dec.Decode(&s)
if err != nil {
return err
}
a.ID = Clone(s.ID)
a.Domain = NewSource(s.Domain)
return nil
}
//go:noinline pragma
func Clone(s string) string {
if len(s) == 0 {
return ""
}
b := make([]byte, len(s))
copy(b, s)
return unsafe.String(&b[0], len(b))
}
//go:noinline pragma
func (a *ID) UnmarshalJSON(b []byte) error {
var s struct {
Domain, ID string
}
if err := json.Unmarshal(b, &s); err != nil {
return err
}
a.ID = Clone(s.ID)
domain := Clone(s.Domain)
a.Domain = NewSource(domain)
return nil
}
func ToIDList(ids []ID) IDList {
idlist := IDList{}
for _, id := range ids {
idlist[*id.Domain] = Insert(idlist[*id.Domain], id.ID)
}
return idlist
}
func InsertIDp(ids []*ID, id *ID) []*ID {
index, itemFound := slices.BinarySearchFunc(ids, id, func(existing, target *ID) int {
return cmp.Or(
cmp.Compare(*existing.Domain, *target.Domain),
cmp.Compare(existing.ID, target.ID),
)
})
if itemFound {
return ids
}
return slices.Insert(ids, index, id)
}
func InsertID(ids []ID, id ID) []ID {
index, itemFound := slices.BinarySearchFunc(ids, id, func(existing, target ID) int {
return cmp.Or(
cmp.Compare(*existing.Domain, *target.Domain),
cmp.Compare(existing.ID, target.ID),
)
})
if itemFound {
return ids
}
return slices.Insert(ids, index, id)
}
type NewIDs struct {
OldID ID
NewID ID
}
type HashStorage interface {
GetMatches(hashes []Hash, max int, exactOnly bool) ([]Result, error)
MapHashes(ImageHash)
DecodeHashes(hashes *SavedHashes) error
EncodeHashes() (*SavedHashes, error)
AssociateIDs(newIDs []NewIDs) error
GetIDs(id ID) IDList
}
func Atleast(maxDistance int, searchHash uint64, hashes []uint64) []Match {
matchingHashes := make([]Match, 0, 20) // hope that we don't need all of them
for _, storedHash := range hashes {
distance := bits.OnesCount64(searchHash ^ storedHash)
if distance <= maxDistance {
matchingHashes = append(matchingHashes, Match{distance, storedHash})
}
}
return matchingHashes
}
func InsertIdx[S ~[]E, E cmp.Ordered](slice S, item E) (S, int) {
index, itemFound := slices.BinarySearch(slice, item)
if itemFound {
return slice, index
}
return slices.Insert(slice, index, item), index
}
func Insert[S ~[]E, E cmp.Ordered](slice S, item E) S {
slice, _ = InsertIdx(slice, item)
return slice
}
func HashImage(i Im) ImageHash {
if i.Format == "webp" {
i.Im = goimagehash.FancyUpscale(i.Im.(*image.YCbCr))
}
var (
err error
)
ahash, err := goimagehash.AverageHash(i.Im)
if err != nil {
msg := fmt.Sprintf("Failed to ahash Image: %s", err)
log.Println(msg)
return ImageHash{}
}
dhash, err := goimagehash.DifferenceHash(i.Im)
if err != nil {
msg := fmt.Sprintf("Failed to dhash Image: %s", err)
log.Println(msg)
return ImageHash{}
}
phash, err := goimagehash.PerceptionHash(i.Im)
if err != nil {
msg := fmt.Sprintf("Failed to phash Image: %s", err)
log.Println(msg)
return ImageHash{}
}
return ImageHash{
Hashes: []Hash{{ahash.GetHash(), ahash.GetKind()}, {dhash.GetHash(), dhash.GetKind()}, {phash.GetHash(), phash.GetKind()}},
ID: i.ID,
}
}
func SplitHash(hash uint64) [8]uint8 {
return [8]uint8{
uint8((hash & H7) >> Shift7),
uint8((hash & H6) >> Shift6),
uint8((hash & H5) >> Shift5),
uint8((hash & H4) >> Shift4),
uint8((hash & H3) >> Shift3),
uint8((hash & H2) >> Shift2),
uint8((hash & H1) >> Shift1),
uint8((hash & H0) >> Shift0),
}
}

151
main.go Normal file
View File

@ -0,0 +1,151 @@
package ch
import (
"cmp"
"fmt"
"image"
"log"
"math/bits"
"runtime"
"slices"
"gitea.narnian.us/lordwelch/goimagehash"
)
const (
H0 uint64 = 0b11111111 << (8 * iota)
H1
H2
H3
H4
H5
H6
H7
)
const (
Shift0 = (8 * iota)
Shift1
Shift2
Shift3
Shift4
Shift5
Shift6
Shift7
)
const (
ComicVine Source = "comicvine.gamespot.com"
)
type Source string
type Match struct {
Distance int
Hash uint64
}
type Result struct {
IDs IDList
Distance int
Hash ImageHash
}
type Im struct {
Im image.Image
Format string
Domain Source
ID, Path string
}
type Hash struct {
Ahash *goimagehash.ImageHash
Dhash *goimagehash.ImageHash
Phash *goimagehash.ImageHash
Domain Source
ID string
}
type ImageHash struct {
Hash uint64
Kind goimagehash.Kind
}
func Atleast(maxDistance int, searchHash uint64, hashes []uint64) []Match {
matchingHashes := make([]Match, 0, len(hashes)/2) // hope that we don't need all of them
for _, storedHash := range hashes {
distance := bits.OnesCount64(searchHash ^ storedHash)
if distance <= maxDistance {
matchingHashes = append(matchingHashes, Match{distance, storedHash})
}
}
return matchingHashes
}
func Insert[S ~[]E, E cmp.Ordered](slice S, item E) S {
index, itemFound := slices.BinarySearch(slice, item)
if itemFound {
return slice
}
return slices.Insert(slice, index, item)
}
func MemStats() uint64 {
var m runtime.MemStats
runtime.ReadMemStats(&m)
return m.Alloc
}
func HashImage(i Im) Hash {
if i.Format == "webp" {
i.Im = goimagehash.FancyUpscale(i.Im.(*image.YCbCr))
}
var (
err error = nil
ahash *goimagehash.ImageHash
dhash *goimagehash.ImageHash
phash *goimagehash.ImageHash
)
ahash, err = goimagehash.AverageHash(i.Im)
if err != nil {
msg := fmt.Sprintf("Failed to ahash Image: %s", err)
log.Println(msg)
return Hash{}
}
dhash, err = goimagehash.DifferenceHash(i.Im)
if err != nil {
msg := fmt.Sprintf("Failed to dhash Image: %s", err)
log.Println(msg)
return Hash{}
}
phash, err = goimagehash.PerceptionHash(i.Im)
if err != nil {
msg := fmt.Sprintf("Failed to phash Image: %s", err)
log.Println(msg)
return Hash{}
}
return Hash{
Ahash: ahash,
Dhash: dhash,
Phash: phash,
Domain: i.Domain,
ID: i.ID,
}
}
func SplitHash(hash uint64) [8]uint8 {
return [8]uint8{
uint8((hash & H7) >> Shift7),
uint8((hash & H6) >> Shift6),
uint8((hash & H5) >> Shift5),
uint8((hash & H4) >> Shift4),
uint8((hash & H3) >> Shift3),
uint8((hash & H2) >> Shift2),
uint8((hash & H1) >> Shift1),
uint8((hash & H0) >> Shift0),
}
}
type IDList map[Source][]string // IDs is a map of domain to ID eg IDs['comicvine.gamespot.com'] = []string{"1235"}

16
path.go
View File

@ -1,16 +0,0 @@
package ch
import (
"os"
"path/filepath"
)
func RmdirP(path string) error {
err := os.Remove(path)
if err != nil {
return err
}
dir, _ := filepath.Split(path)
_ = RmdirP(dir) // We only care about errors for the first directory we always expect atleast one to fail
return nil
}

View File

@ -1,6 +0,0 @@
[build-system]
requires = ["setuptools>=42", "wheel", "setuptools_scm[toml]>=3.4"]
build-backend = "setuptools.build_meta"
[tool.setuptools_scm]
local_scheme = "no-local-version"

View File

@ -1,274 +0,0 @@
package ch
import (
"cmp"
"errors"
"fmt"
"slices"
"strings"
// json "github.com/goccy/go-json"
json "github.com/json-iterator/go"
// "encoding/json"
"gitea.narnian.us/lordwelch/goimagehash"
"github.com/vmihailenco/msgpack"
)
type Format int
const (
Msgpack Format = iota + 1
JSON
CurrentSavedHashesVersion int = 2
)
var versionMap = map[int]versionDecoder{
0: DecodeHashesV0,
1: DecodeHashesV1,
2: DecodeHashesV2,
}
var formatNames = map[Format]string{
JSON: "json",
Msgpack: "msgpack",
}
var formatValues = map[string]Format{
"json": JSON,
"msgpack": Msgpack,
}
type OldSavedHashes map[Source]map[string][3]uint64
type SavedHashesv1 struct {
IDs [][]ID
Hashes [3]map[uint64]int
}
// SavedHashes The IDs and Hashes fields have no direct correlation
// It is perfectly valid to have an empty IDs or an empty Hashes field
// If two covers have identical hashes then they should be two entries in Hashes not a set in IDs with two IDs from the same source
type SavedHashes struct {
Version int
IDs [][]ID // List of sets of IDs that are the same across Sources, should generally only have one Source per set
Hashes []SavedHash // List of all known hashes, hashes will be duplicated for each source
}
type SavedHash struct {
Hash Hash
ID ID
}
type Encoder func(any) ([]byte, error)
type Decoder func([]byte, interface{}) error
type versionDecoder func(Decoder, []byte) (*SavedHashes, error)
var NoHashes = errors.New("no hashes")
var DecodeError = errors.New("decoder failure")
func (f Format) String() string {
if name, known := formatNames[f]; known {
return name
}
return "Unknown"
}
func (f *Format) Set(s string) error {
if format, known := formatValues[strings.ToLower(s)]; known {
*f = format
} else {
return fmt.Errorf("Unknown format: %d", f)
}
return nil
}
func (h *SavedHash) Clone() SavedHash {
return SavedHash{
Hash: Hash{
Hash: h.Hash.Hash,
Kind: h.Hash.Kind,
},
ID: ID{
Domain: NewSource(*h.ID.Domain),
ID: strings.Clone(h.ID.ID),
},
}
}
func (s *SavedHashes) InsertHash(hash SavedHash) {
index, itemFound := slices.BinarySearchFunc(s.Hashes, hash, func(existing SavedHash, target SavedHash) int {
return cmp.Or(
cmp.Compare(existing.Hash.Hash, target.Hash.Hash),
cmp.Compare(existing.Hash.Kind, target.Hash.Kind),
cmp.Compare(*existing.ID.Domain, *target.ID.Domain),
cmp.Compare(existing.ID.ID, target.ID.ID),
)
})
if !itemFound {
s.Hashes = slices.Insert(s.Hashes, index, hash)
}
}
func ConvertHashesV0(oldHashes OldSavedHashes) *SavedHashes {
t := SavedHashes{}
idcount := 0
for _, ids := range oldHashes {
idcount += len(ids)
}
t.Hashes = make([]SavedHash, 0, idcount)
for domain, sourceHashes := range oldHashes {
for id, hashes := range sourceHashes {
for hashType, hash := range hashes {
t.Hashes = append(t.Hashes, SavedHash{
Hash: Hash{
Kind: goimagehash.Kind(hashType + 1),
Hash: hash,
},
ID: ID{NewSource(domain), id},
})
}
}
}
fmt.Println("Length of hashes", len(t.Hashes))
fmt.Println("Length of ID lists", len(t.IDs))
return &t
}
func ConvertHashesV1(oldHashes SavedHashesv1) *SavedHashes {
t := SavedHashes{}
hashCount := 0
for _, hashes := range oldHashes.Hashes {
hashCount += len(hashes)
}
t.IDs = oldHashes.IDs
t.Hashes = make([]SavedHash, 0, hashCount)
for hashType, sourceHashes := range oldHashes.Hashes {
for hash, index := range sourceHashes {
for _, id := range oldHashes.IDs[index] {
t.Hashes = append(t.Hashes, SavedHash{
ID: ID{NewSource(*id.Domain), id.ID},
Hash: Hash{
Kind: goimagehash.Kind(hashType + 1),
Hash: hash,
},
})
}
}
}
fmt.Println("Length of hashes", len(t.Hashes))
fmt.Println("Length of ID lists", len(t.IDs))
return &t
}
func DecodeHashesV0(decode Decoder, hashes []byte) (*SavedHashes, error) {
loadedHashes := OldSavedHashes{}
err := decode(hashes, &loadedHashes)
if err != nil {
return nil, fmt.Errorf("%w: %w", DecodeError, err)
}
if len(loadedHashes) == 0 {
return nil, NoHashes
}
fmt.Println("Loaded V0 hashes")
return ConvertHashesV0(loadedHashes), nil
}
func DecodeHashesV1(decode Decoder, hashes []byte) (*SavedHashes, error) {
loadedHashes := SavedHashesv1{}
err := decode(hashes, &loadedHashes)
if err != nil {
return nil, fmt.Errorf("%w: %w", DecodeError, err)
}
hashesCount := 0
for _, hashes := range loadedHashes.Hashes {
hashesCount += len(hashes)
}
if hashesCount < 1 {
return nil, NoHashes
}
fmt.Println("Loaded V1 hashes")
return ConvertHashesV1(loadedHashes), nil
}
func DecodeHashesV2(decode Decoder, hashes []byte) (*SavedHashes, error) {
fmt.Println("Decode v2 hashes")
loadedHashes := SavedHashes{}
err := decode(hashes, &loadedHashes)
if err != nil {
return nil, fmt.Errorf("%w: %w", DecodeError, err)
}
if len(loadedHashes.Hashes) < 1 && len(loadedHashes.IDs) < 1 {
return nil, NoHashes
}
fmt.Println("Length of hashes", len(loadedHashes.Hashes))
fmt.Println("Length of ID lists", len(loadedHashes.IDs))
fmt.Println("Loaded V2 hashes")
return &loadedHashes, nil
}
func getSavedHashesVersion(decode Decoder, hashes []byte) (int, error) {
type version struct {
Version int
}
var savedVersion version
err := decode(hashes, &savedVersion)
if err != nil {
return -1, fmt.Errorf("%w: %w", DecodeError, err)
}
if savedVersion.Version > 1 {
return savedVersion.Version, nil
}
return -1, nil
}
func DecodeHashes(format Format, hashes []byte) (*SavedHashes, error) {
var decode Decoder
switch format {
case Msgpack:
decode = msgpack.Unmarshal
fmt.Println("Decode Msgpack")
case JSON:
decode = json.Unmarshal
fmt.Println("Decode JSON")
default:
return nil, fmt.Errorf("Unknown format: %v", format)
}
version, err := getSavedHashesVersion(decode, hashes)
if err != nil {
return nil, err
}
if decodeVersion, knownVersion := versionMap[version]; knownVersion {
return decodeVersion(decode, hashes)
}
for _, decodeVersion := range []versionDecoder{
DecodeHashesV0,
DecodeHashesV1,
DecodeHashesV2,
} {
loadedHashes, err := decodeVersion(decode, hashes)
if err == nil {
return loadedHashes, nil
}
}
return nil, NoHashes
}
func EncodeHashes(hashes *SavedHashes, format Format) ([]byte, error) {
var encoder Encoder
switch format {
case Msgpack:
encoder = msgpack.Marshal
case JSON:
encoder = json.Marshal
default:
return nil, fmt.Errorf("Unknown format: %v", format)
}
hashes.Version = CurrentSavedHashesVersion
return encoder(hashes)
}

View File

@ -1,62 +0,0 @@
[metadata]
name = comic_hasher
description = python tools to support comic-hasher
long_description = file: README.md
long_description_content_type = text/markdown
url = https://gitea.narnian.us/lordwelch/comic-hasher
author = Timmy Welch
author_email = timmy@narnian.us
license = MIT
license_files = LICENSE
classifiers =
License :: OSI Approved :: MIT License
Programming Language :: Python :: 3
Programming Language :: Python :: 3 :: Only
Programming Language :: Python :: Implementation :: CPython
Programming Language :: Python :: Implementation :: PyPy
[options]
py_modules = quick_tag
install_requires =
comictagger==1.6.0a20
imagehash
python_requires = >=3.9
package_dir =
=cmd
[options.entry_points]
console_scripts = quick-tag=quick_tag:main
[pep8]
ignore = E265,E501
max_line_length = 120
[flake8]
extend-ignore = E501, A003
max_line_length = 120
per-file-ignores =
*_test.py: LN001
[coverage:run]
plugins = covdefaults
[coverage:report]
fail_under = 95
[mypy]
check_untyped_defs = true
disallow_any_generics = true
warn_return_any = true
disallow_incomplete_defs = true
disallow_untyped_defs = true
no_implicit_optional = true
warn_redundant_casts = true
warn_unused_ignores = true
[mypy-testing.*]
warn_return_any = false
disallow_untyped_defs = false
[mypy-tests.*]
warn_return_any = false
disallow_untyped_defs = false

View File

@ -1,372 +0,0 @@
package storage
import (
"cmp"
"errors"
"fmt"
"math/bits"
"slices"
"sync"
ch "gitea.narnian.us/lordwelch/comic-hasher"
"gitea.narnian.us/lordwelch/goimagehash"
)
type basicMapStorage struct {
hashMutex *sync.RWMutex
ids IDMap
aHashes []ch.SavedHash
dHashes []ch.SavedHash
pHashes []ch.SavedHash
}
type IDs struct {
id *ch.ID
idList *[]*ch.ID
}
type IDMap struct {
ids []IDs
}
func (m *IDMap) InsertID(id *ch.ID) *ch.ID {
return m.insertID(id, &[]*ch.ID{id})
}
func (m *IDMap) insertID(id *ch.ID, idList *[]*ch.ID) *ch.ID {
index, found := slices.BinarySearchFunc(m.ids, id, func(id IDs, target *ch.ID) int {
return id.id.Compare(*target)
})
if !found {
m.ids = slices.Insert(m.ids, index, IDs{
id,
idList,
})
}
return m.ids[index].id
}
func (m *IDMap) sort() {
slices.SortFunc(m.ids, func(a, b IDs) int {
return a.id.Compare(*b.id)
})
}
func (m *IDMap) FindID(id *ch.ID) (int, bool) {
return slices.BinarySearchFunc(m.ids, id, func(id IDs, target *ch.ID) int {
return id.id.Compare(*target)
})
}
func (m *IDMap) GetIDs(id *ch.ID) []ch.ID {
index, found := m.FindID(id)
if !found {
return nil
}
ids := make([]ch.ID, 0, len(*m.ids[index].idList))
for _, id := range *m.ids[index].idList {
ids = append(ids, *id)
}
return ids
}
func (m *IDMap) AssociateIDs(newids []ch.NewIDs) error {
for _, newid := range newids {
index, found := m.FindID(&newid.OldID)
if !found {
return ErrIDNotFound
}
*(m.ids[index].idList) = ch.InsertIDp(*(m.ids[index].idList), &newid.NewID)
m.insertID(&newid.NewID, m.ids[index].idList)
}
return nil
}
// func (m *IDMap) NewID(domain Source, id string) *ch.ID {
// newID := ch.ID{domain, id}
// index, found := slices.BinarySearchFunc(m.idList, newID, func(id *ch.ID, target ch.ID) int {
// return id.Compare(*target)
// })
// if !found {
// m.idList = slices.Insert(m.idList, index, &newID)
// }
// return m.idList[index]
// }
var ErrIDNotFound = errors.New("ID not found on this server")
// atleast must have a read lock before using
func (b *basicMapStorage) atleast(kind goimagehash.Kind, maxDistance int, searchHash uint64) []ch.Result {
matchingHashes := make([]ch.Result, 0, 20) // hope that we don't need more
mappedIds := map[int]bool{}
storedHash := ch.SavedHash{} // reduces allocations and ensures queries are <1s
for _, storedHash = range *b.getCurrentHashes(kind) {
distance := bits.OnesCount64(searchHash ^ storedHash.Hash.Hash)
if distance <= maxDistance {
index, _ := b.ids.FindID(&storedHash.ID)
if mappedIds[index] {
continue
}
mappedIds[index] = true
matchingHashes = append(matchingHashes, ch.Result{
Hash: storedHash.Hash,
ID: storedHash.ID,
Distance: distance,
EquivalentIDs: b.ids.GetIDs(&storedHash.ID),
})
}
}
return matchingHashes
}
func (b *basicMapStorage) exactMatches(hashes []ch.Hash, max int) []ch.Result {
var foundMatches []ch.Result
for _, hash := range hashes {
mappedIds := map[int]bool{}
index, count := b.findHash(hash)
if count > 0 {
for _, storedHash := range (*b.getCurrentHashes(hash.Kind))[index : index+count] {
index, _ := b.ids.FindID(&storedHash.ID)
if mappedIds[index] {
continue
}
mappedIds[index] = true
foundMatches = append(foundMatches, ch.Result{
Hash: storedHash.Hash,
ID: storedHash.ID,
Distance: 0,
EquivalentIDs: b.ids.GetIDs(&storedHash.ID),
})
}
}
}
return foundMatches
}
func (b *basicMapStorage) GetMatches(hashes []ch.Hash, max int, exactOnly bool) ([]ch.Result, error) {
var (
foundMatches []ch.Result
tl ch.TimeLog
)
tl.ResetTime()
defer tl.LogTime(fmt.Sprintf("Search Complete: max: %v ExactOnly: %v", max, exactOnly))
b.hashMutex.RLock()
defer b.hashMutex.RUnlock()
if exactOnly { // exact matches are also found by partial matches. Don't bother with exact matches so we don't have to de-duplicate
foundMatches = b.exactMatches(hashes, max)
tl.LogTime("Search Exact")
if len(foundMatches) > 0 {
return foundMatches, nil
}
}
foundHashes := make(map[uint64]struct{})
totalPartialHashes := 0
for _, hash := range hashes {
foundMatches = append(foundMatches, b.atleast(hash.Kind, max, hash.Hash)...)
}
fmt.Println("Total partial hashes tested:", totalPartialHashes, len(foundHashes))
return foundMatches, nil
}
// getCurrentHashes must have a read lock before using
func (b *basicMapStorage) getCurrentHashes(kind goimagehash.Kind) *[]ch.SavedHash {
if kind == goimagehash.AHash {
return &b.aHashes
}
if kind == goimagehash.DHash {
return &b.dHashes
}
if kind == goimagehash.PHash {
return &b.pHashes
}
panic("Unknown hash type: " + kind.String())
}
// findHash must have a read lock before using
// return value is index, count
// if count < 1 then no results were found
func (b *basicMapStorage) findHash(hash ch.Hash) (int, int) {
currentHashes := *b.getCurrentHashes(hash.Kind)
index, found := slices.BinarySearchFunc(currentHashes, hash, func(existing ch.SavedHash, target ch.Hash) int {
return cmp.Compare(existing.Hash.Hash, target.Hash)
})
if !found {
return index, 0
}
count := 0
for i := index + 1; i < len(currentHashes) && currentHashes[i].Hash.Hash == hash.Hash; i++ {
count++
}
return index, count
}
// insertHash must already have a lock
func (b *basicMapStorage) insertHash(hash ch.Hash, id ch.ID) {
currentHashes := b.getCurrentHashes(hash.Kind)
index, count := b.findHash(hash)
max := index + count
for ; index < max; index++ {
if (*currentHashes)[index].ID == id {
return
}
}
sh := ch.SavedHash{
Hash: hash,
ID: id,
}
*currentHashes = slices.Insert(*currentHashes, index, sh)
b.ids.InsertID(&sh.ID)
}
func (b *basicMapStorage) MapHashes(hash ch.ImageHash) {
b.hashMutex.Lock()
defer b.hashMutex.Unlock()
for _, ih := range hash.Hashes {
b.insertHash(ih, hash.ID)
}
}
// DecodeHashes must already have a lock
func (b *basicMapStorage) DecodeHashes(hashes *ch.SavedHashes) error {
if hashes == nil {
return nil
}
b.ids.ids = make([]IDs, 0, len(hashes.Hashes))
// Initialize all the known equal IDs
for _, ids := range hashes.IDs {
new_ids := make([]*ch.ID, 0, len(ids))
for _, id := range ids {
new_ids = append(new_ids, &id)
}
for _, id := range new_ids {
b.ids.ids = append(b.ids.ids, IDs{
id,
&new_ids,
})
}
}
b.ids.sort()
slices.SortFunc(hashes.Hashes, func(existing, target ch.SavedHash) int {
return cmp.Or(
cmp.Compare(*existing.ID.Domain, *target.ID.Domain), // Sorted for id insertion efficiency
cmp.Compare(existing.ID.ID, target.ID.ID), // Sorted for id insertion efficiency
cmp.Compare(existing.Hash.Kind, target.Hash.Kind),
cmp.Compare(existing.Hash.Hash, target.Hash.Hash),
)
})
aHashCount := 0
dHashCount := 0
pHashCount := 0
for _, savedHash := range hashes.Hashes {
if savedHash.Hash.Kind == goimagehash.AHash {
aHashCount += 1
}
if savedHash.Hash.Kind == goimagehash.DHash {
dHashCount += 1
}
if savedHash.Hash.Kind == goimagehash.PHash {
pHashCount += 1
}
}
// Assume they are probably fairly equally split between hash types
b.aHashes = make([]ch.SavedHash, 0, aHashCount)
b.dHashes = make([]ch.SavedHash, 0, dHashCount)
b.pHashes = make([]ch.SavedHash, 0, pHashCount)
for i := range hashes.Hashes {
hash := hashes.Hashes[i].Clone() // Not cloning this will keep strings/slices loaded from json wasting memory
if hashes.Hashes[i].Hash.Kind == goimagehash.AHash {
b.aHashes = append(b.aHashes, hash)
}
if hashes.Hashes[i].Hash.Kind == goimagehash.DHash {
b.dHashes = append(b.dHashes, hash)
}
if hashes.Hashes[i].Hash.Kind == goimagehash.PHash {
b.pHashes = append(b.pHashes, hash)
}
if hashes.Hashes[i].ID == (ch.ID{}) {
fmt.Println("Empty ID detected")
panic(hashes.Hashes[i])
}
// TODO: Make loading this more efficient
// All known equal IDs are already mapped we can add any missing ones from hashes
b.ids.InsertID(&hash.ID)
}
hashCmp := func(existing, target ch.SavedHash) int {
return cmp.Or(
cmp.Compare(existing.Hash.Hash, target.Hash.Hash),
cmp.Compare(*existing.ID.Domain, *target.ID.Domain),
cmp.Compare(existing.ID.ID, target.ID.ID),
)
}
slices.SortFunc(b.aHashes, hashCmp)
slices.SortFunc(b.dHashes, hashCmp)
slices.SortFunc(b.pHashes, hashCmp)
return nil
}
// EncodeHashes should already have a lock
func (b *basicMapStorage) EncodeHashes() (*ch.SavedHashes, error) {
savedHashes := ch.SavedHashes{
Hashes: make([]ch.SavedHash, 0, len(b.aHashes)+len(b.dHashes)+len(b.pHashes)),
}
// savedHashes.Hashes = append(savedHashes.Hashes, b.aHashes...)
// savedHashes.Hashes = append(savedHashes.Hashes, b.dHashes...)
// savedHashes.Hashes = append(savedHashes.Hashes, b.pHashes...)
// // Only keep groups len>1 as they are mapped in SavedHashes.Hashes
// for _, ids := range b.ids.ids {
// if len(*ids.idList) > 1 {
// idl := make([]ID, 0, len(*ids.idList))
// for _, id := range *ids.idList {
// idl = append(idl, *id)
// }
// savedHashes.IDs = append(savedHashes.IDs, idl)
// }
// }
return &savedHashes, nil
}
func (b *basicMapStorage) AssociateIDs(newids []ch.NewIDs) error {
b.hashMutex.RLock()
defer b.hashMutex.RUnlock()
return b.ids.AssociateIDs(newids)
}
func (b *basicMapStorage) GetIDs(id ch.ID) ch.IDList {
b.hashMutex.RLock()
defer b.hashMutex.RUnlock()
ids := b.ids.GetIDs(&id)
return ch.ToIDList(ids)
}
func NewBasicMapStorage() (ch.HashStorage, error) {
storage := &basicMapStorage{
hashMutex: &sync.RWMutex{},
ids: IDMap{
ids: []IDs{},
},
aHashes: []ch.SavedHash{},
dHashes: []ch.SavedHash{},
pHashes: []ch.SavedHash{},
}
return storage, nil
}

View File

@ -1,171 +0,0 @@
package storage
import (
"fmt"
"slices"
"sync"
ch "gitea.narnian.us/lordwelch/comic-hasher"
"gitea.narnian.us/lordwelch/goimagehash"
)
type MapStorage struct {
basicMapStorage
partialAHash [8]map[uint8][]uint64
partialDHash [8]map[uint8][]uint64
partialPHash [8]map[uint8][]uint64
}
func (m *MapStorage) GetMatches(hashes []ch.Hash, max int, exactOnly bool) ([]ch.Result, error) {
var (
foundMatches []ch.Result
tl ch.TimeLog
)
m.hashMutex.RLock()
defer m.hashMutex.RUnlock()
if exactOnly { // exact matches are also found by partial matches. Don't bother with exact matches so we don't have to de-duplicate
foundMatches = m.exactMatches(hashes, max)
tl.LogTime("Search Exact")
if len(foundMatches) > 0 {
return foundMatches, nil
}
}
tl.ResetTime()
defer tl.LogTime("Search Complete")
totalPartialHashes := 0
for _, searchHash := range hashes {
currentHashes, currentPartialHashes := m.getCurrentHashes(searchHash.Kind)
potentialMatches := []uint64{}
for i, partialHash := range ch.SplitHash(searchHash.Hash) {
potentialMatches = append(potentialMatches, currentPartialHashes[i][partialHash]...)
}
totalPartialHashes += len(potentialMatches)
mappedIds := map[int]bool{}
for _, match := range ch.Atleast(max, searchHash.Hash, potentialMatches) {
matchedHash := ch.Hash{
Hash: match.Hash,
Kind: searchHash.Kind,
}
index, count := m.findHash(matchedHash)
if count < 1 {
continue
}
for _, storedHash := range currentHashes[index : index+count] {
idIndex, _ := m.ids.FindID(&storedHash.ID)
if mappedIds[idIndex] {
continue
}
mappedIds[idIndex] = true
foundMatches = append(foundMatches, ch.Result{
Hash: storedHash.Hash,
ID: storedHash.ID,
Distance: 0,
EquivalentIDs: m.ids.GetIDs(&storedHash.ID),
})
}
}
}
fmt.Println("Total partial hashes tested:", totalPartialHashes)
return foundMatches, nil
}
// getCurrentHashes must have a read lock before using
func (m *MapStorage) getCurrentHashes(kind goimagehash.Kind) ([]ch.SavedHash, [8]map[uint8][]uint64) {
if kind == goimagehash.AHash {
return m.aHashes, m.partialAHash
}
if kind == goimagehash.DHash {
return m.dHashes, m.partialDHash
}
if kind == goimagehash.PHash {
return m.pHashes, m.partialPHash
}
panic("Unknown hash type: " + kind.String())
}
func (m *MapStorage) MapHashes(hash ch.ImageHash) {
m.basicMapStorage.MapHashes(hash)
for _, hash := range hash.Hashes {
_, partialHashes := m.getCurrentHashes(hash.Kind)
for i, partialHash := range ch.SplitHash(hash.Hash) {
partialHashes[i][partialHash] = ch.Insert(partialHashes[i][partialHash], hash.Hash)
}
}
}
func (m *MapStorage) DecodeHashes(hashes *ch.SavedHashes) error {
if hashes == nil {
return nil
}
if err := m.basicMapStorage.DecodeHashes(hashes); err != nil {
return err
}
mapPartialHashes(m.aHashes, m.partialAHash)
mapPartialHashes(m.dHashes, m.partialDHash)
mapPartialHashes(m.pHashes, m.partialPHash)
compactPartialHashes(m.partialAHash)
compactPartialHashes(m.partialDHash)
compactPartialHashes(m.partialPHash)
return nil
}
func NewMapStorage() (ch.HashStorage, error) {
storage := &MapStorage{
basicMapStorage: basicMapStorage{
hashMutex: &sync.RWMutex{},
ids: IDMap{
ids: []IDs{},
},
aHashes: []ch.SavedHash{},
dHashes: []ch.SavedHash{},
pHashes: []ch.SavedHash{},
},
partialAHash: newPartialHash(),
partialDHash: newPartialHash(),
partialPHash: newPartialHash(),
}
return storage, nil
}
func newPartialHash() [8]map[uint8][]uint64 {
return [8]map[uint8][]uint64{
map[uint8][]uint64{},
map[uint8][]uint64{},
map[uint8][]uint64{},
map[uint8][]uint64{},
map[uint8][]uint64{},
map[uint8][]uint64{},
map[uint8][]uint64{},
map[uint8][]uint64{},
}
}
func mapPartialHashes(hashes []ch.SavedHash, partialHashMap [8]map[uint8][]uint64) {
for _, savedHash := range hashes {
for i, partialHash := range ch.SplitHash(savedHash.Hash.Hash) {
partialHashMap[i][partialHash] = append(partialHashMap[i][partialHash], savedHash.Hash.Hash)
}
}
}
func compactPartialHashes(partialHashMap [8]map[uint8][]uint64) {
for _, partMap := range partialHashMap {
for part, hashes := range partMap {
slices.Sort(hashes)
partMap[part] = slices.Compact(hashes)
}
}
}

View File

@ -1,530 +0,0 @@
package storage
import (
"context"
"database/sql"
"errors"
"fmt"
"log"
"math/bits"
ch "gitea.narnian.us/lordwelch/comic-hasher"
_ "modernc.org/sqlite"
)
type sqliteStorage struct {
db *sql.DB
hashExactMatchStatement *sql.Stmt
hashPartialMatchStatement *sql.Stmt
idMatchStatement *sql.Stmt
insertHash *sql.Stmt
insertID *sql.Stmt
insertEID *sql.Stmt
insertIEID *sql.Stmt
idExists *sql.Stmt
}
func (s *sqliteStorage) findExactHashes(statement *sql.Stmt, hash ch.Hash) (map[ch.ID][]ch.ID, error) {
if statement == nil {
statement = s.hashExactMatchStatement
}
hashes := map[ch.ID][]ch.ID{}
rows, err := statement.Query(hash.Kind, int64(hash.Hash))
if err != nil {
return hashes, err
}
for rows.Next() {
var (
id ch.ID
foundID ch.ID
)
err = rows.Scan(&foundID.Domain, &foundID.ID, &id.Domain, &id.ID)
if err != nil {
rows.Close()
return hashes, err
}
hashes[foundID] = append(hashes[foundID], id)
}
rows.Close()
return hashes, nil
}
func (s *sqliteStorage) findPartialHashes(tl ch.TimeLog, statement *sql.Stmt, max int, hash ch.Hash) ([]ch.Result, error) {
if statement == nil {
statement = s.hashPartialMatchStatement
}
hashResults := []ch.Result{}
rows, err := statement.Query(hash.Kind, int64(hash.Hash))
if err != nil {
return hashResults, err
}
results := map[ch.SavedHash][]ch.ID{}
for rows.Next() {
var (
tmpHash int64
sqlHash = ch.SavedHash{
Hash: ch.Hash{Kind: hash.Kind},
}
id ch.ID
)
err = rows.Scan(&sqlHash.ID.Domain, &sqlHash.ID.ID, &tmpHash, &id.Domain, &id.ID)
if err != nil {
rows.Close()
return hashResults, err
}
sqlHash.Hash.Hash = uint64(tmpHash)
results[sqlHash] = append(results[sqlHash], id)
}
for sqlHash, ids := range results {
res := ch.Result{
Hash: sqlHash.Hash,
ID: sqlHash.ID,
Distance: bits.OnesCount64(hash.Hash ^ sqlHash.Hash.Hash),
EquivalentIDs: ids,
}
if res.Distance <= max {
hashResults = append(hashResults, res)
}
}
return hashResults, nil
}
func (s *sqliteStorage) dropIndexes() error {
_, err := s.db.Exec(`
DROP INDEX IF EXISTS hash_index;
DROP INDEX IF EXISTS hash_1_index;
DROP INDEX IF EXISTS hash_2_index;
DROP INDEX IF EXISTS hash_3_index;
DROP INDEX IF EXISTS hash_4_index;
DROP INDEX IF EXISTS hash_5_index;
DROP INDEX IF EXISTS hash_6_index;
DROP INDEX IF EXISTS hash_7_index;
DROP INDEX IF EXISTS hash_8_index;
DROP INDEX IF EXISTS id_domain;
`)
if err != nil {
return err
}
return nil
}
func (s *sqliteStorage) createIndexes() error {
_, err := s.db.Exec(`
CREATE INDEX IF NOT EXISTS hash_index ON Hashes (kind, hash);
CREATE INDEX IF NOT EXISTS hash_1_index ON Hashes ((hash >> (0 * 8) & 0xFF));
CREATE INDEX IF NOT EXISTS hash_2_index ON Hashes ((hash >> (1 * 8) & 0xFF));
CREATE INDEX IF NOT EXISTS hash_3_index ON Hashes ((hash >> (2 * 8) & 0xFF));
CREATE INDEX IF NOT EXISTS hash_4_index ON Hashes ((hash >> (3 * 8) & 0xFF));
CREATE INDEX IF NOT EXISTS hash_5_index ON Hashes ((hash >> (4 * 8) & 0xFF));
CREATE INDEX IF NOT EXISTS hash_6_index ON Hashes ((hash >> (5 * 8) & 0xFF));
CREATE INDEX IF NOT EXISTS hash_7_index ON Hashes ((hash >> (6 * 8) & 0xFF));
CREATE INDEX IF NOT EXISTS hash_8_index ON Hashes ((hash >> (7 * 8) & 0xFF));
CREATE INDEX IF NOT EXISTS id_domain ON IDs (domain, stringid);
PRAGMA shrink_memory;
ANALYZE;
`)
if err != nil {
return err
}
return nil
}
func (s *sqliteStorage) GetMatches(hashes []ch.Hash, max int, exactOnly bool) ([]ch.Result, error) {
var (
foundMatches []ch.Result
tl ch.TimeLog
)
tl.ResetTime()
if exactOnly { // exact matches are also found by partial matches. Don't bother with exact matches so we don't have to de-duplicate
for _, hash := range hashes {
idlist, err := s.findExactHashes(nil, hash)
if err != nil {
return foundMatches, err
}
for id, equivalentIDs := range idlist {
foundMatches = append(foundMatches, ch.Result{
Hash: hash,
ID: id,
Distance: 0,
EquivalentIDs: equivalentIDs,
})
}
}
tl.LogTime("Search Exact")
if len(foundMatches) > 0 {
return foundMatches, nil
}
}
foundHashes := make(map[uint64]struct{})
for _, hash := range hashes {
results, err := s.findPartialHashes(tl, nil, max, hash)
if err != nil {
return foundMatches, err
}
tl.LogTime(fmt.Sprintf("Search partial %v", hash.Kind))
for _, hash := range results {
if _, alreadyMatched := foundHashes[hash.Hash.Hash]; !alreadyMatched {
foundHashes[hash.Hash.Hash] = struct{}{}
foundMatches = append(foundMatches, hash)
} else {
log.Println("Hash already found", hash)
}
}
}
return foundMatches, nil
}
func (s *sqliteStorage) mapHashes(tx *sql.Tx, hash ch.ImageHash) {
var err error
insertHash := tx.Stmt(s.insertHash)
insertID := tx.Stmt(s.insertID)
idExists := tx.Stmt(s.idExists)
insertEID := tx.Stmt(s.insertEID)
insertIEID := tx.Stmt(s.insertIEID)
rows, err := insertID.Query(hash.ID.Domain, hash.ID.ID)
if err != nil {
panic(err)
}
if !rows.Next() {
panic("Unable to insert ID")
}
var id_id int64
err = rows.Scan(&id_id)
if err != nil {
panic(err)
}
rows.Close()
for _, hash := range hash.Hashes {
_, err := insertHash.Exec(hash.Kind, int64(hash.Hash), id_id)
if err != nil {
panic(err)
}
}
rows.Close()
row := idExists.QueryRow(id_id)
var count int64
err = row.Scan(&count)
if err != nil {
panic(fmt.Errorf("failed to query id: %w", err))
}
if count < 1 {
row := insertEID.QueryRow()
var eid int64
err = row.Scan(&eid)
if err != nil {
panic(fmt.Errorf("failed to insert equivalent id: %w", err))
}
_, err := insertIEID.Exec(id_id, eid)
if err != nil {
panic(fmt.Errorf("failed to associate equivalent IDs: %w", err))
}
}
}
func (s *sqliteStorage) MapHashes(hash ch.ImageHash) {
tx, err := s.db.BeginTx(context.Background(), nil)
if err != nil {
panic(err)
}
s.mapHashes(tx, hash)
err = tx.Commit()
if err != nil {
panic(err)
}
}
func (s *sqliteStorage) DecodeHashes(hashes *ch.SavedHashes) error {
return nil
err := s.dropIndexes()
if err != nil {
return err
}
tx, err := s.db.BeginTx(context.Background(), nil)
if err != nil {
panic(err)
}
insertID := tx.Stmt(s.insertID)
insertEID := tx.Stmt(s.insertEID)
insertIEID := tx.Stmt(s.insertIEID)
for _, idlist := range hashes.IDs {
var eid int64
id_ids := make([]int64, 0, len(idlist))
for _, id := range idlist {
var id_id int64
row := insertID.QueryRow(id.Domain, id.ID)
err = row.Scan(&id_id)
if err != nil {
return fmt.Errorf("failed to insert id: %w", err)
}
id_ids = append(id_ids, id_id)
}
row := insertEID.QueryRow()
err = row.Scan(&eid)
if err != nil {
return fmt.Errorf("failed to insert equivalent id: %w", err)
}
for _, id_id := range id_ids {
_, err = insertIEID.Exec(id_id, eid)
if err != nil {
return err
}
}
}
for _, savedHash := range hashes.Hashes {
s.mapHashes(tx, ch.ImageHash{
Hashes: []ch.Hash{savedHash.Hash},
ID: savedHash.ID,
})
}
err = tx.Commit()
if err != nil {
panic(err)
}
err = s.createIndexes()
if err != nil {
return err
}
return nil
}
func (s *sqliteStorage) EncodeHashes() (*ch.SavedHashes, error) {
hashes := ch.SavedHashes{}
tx, err := s.db.Begin()
if err != nil {
return &hashes, err
}
rows, err := tx.Query("SELECT Hashes.kind, Hashes.hash, IDs.domain, IDs.stringid FROM Hashes JOIN IDs ON Hashes.id=IDs.id ORDER BY Hashes.kind, Hashes.hash;")
if err != nil {
return &hashes, err
}
for rows.Next() {
var (
hash ch.SavedHash
tmpHash int64
)
err = rows.Scan(&hash.Hash.Kind, &tmpHash, &hash.ID.Domain, &hash.ID.ID)
if err != nil {
return &hashes, err
}
hash.Hash.Hash = uint64(tmpHash)
hashes.InsertHash(hash)
}
rows, err = tx.Query("SELECT IEIDs.equivalentid, IDs.domain, IDs.stringid FROM IDs JOIN IDsToEquivalantIDs AS IEIDs ON IDs.id=IEIDs.idid ORDER BY IEIDs.equivalentid, IDs.domain, IDs.stringid;")
if err != nil {
return &hashes, err
}
var (
previousEid int64 = -1
ids []ch.ID
)
for rows.Next() {
var (
id ch.ID
newEid int64
)
err = rows.Scan(&newEid, &id.Domain, &id.Domain)
if err != nil {
return &hashes, err
}
if newEid != previousEid {
previousEid = newEid
// Only keep groups len>1 as they are mapped in SavedHashes.Hashes
if len(ids) > 1 {
hashes.IDs = append(hashes.IDs, ids)
}
ids = make([]ch.ID, 0)
}
ids = append(ids, id)
}
return &hashes, nil
}
func (s *sqliteStorage) AssociateIDs(newIDs []ch.NewIDs) error {
tx, err := s.db.BeginTx(context.Background(), nil)
if err != nil {
panic(err)
}
insertID := tx.Stmt(s.insertID)
insertIEID := tx.Stmt(s.insertIEID)
for _, ids := range newIDs {
var (
newRowid int64
oldRowid int64
eid int64
)
rows := tx.QueryRow(`SELECT ITEI.idid, ITEI.equivalentid from IDs JOIN IDsToEquivalantIDs AS ITEI ON IDs.id=ITEI.idid WHERE domain=? AND stringid=?`, ids.OldID.Domain, ids.OldID.ID)
err := rows.Scan(&oldRowid, &eid)
if err != nil {
if errors.Is(err, sql.ErrNoRows) {
return ErrIDNotFound
}
return err
}
rows = insertID.QueryRow(ids.NewID.Domain, ids.NewID.ID)
err = rows.Scan(&newRowid)
if err != nil {
return err
}
_, err = insertIEID.Exec(newRowid, eid)
if err != nil {
return err
}
}
err = tx.Commit()
if err != nil {
panic(err)
}
return nil
}
func (s *sqliteStorage) GetIDs(id ch.ID) ch.IDList {
var ids []ch.ID
rows, err := s.idMatchStatement.Query(id.Domain, id.ID)
if err != nil {
return nil
}
for rows.Next() {
var id ch.ID
err = rows.Scan(&id.Domain, &id.ID)
if err != nil {
return nil
}
ids = append(ids, id)
}
return ch.ToIDList(ids)
}
func (s *sqliteStorage) PrepareStatements() error {
var err error
s.insertHash, err = s.db.Prepare(`INSERT INTO Hashes (kind, hash, id) VALUES (?, ?, ?) ON CONFLICT DO UPDATE SET kind=?1`)
if err != nil {
return fmt.Errorf("failed to prepare database statements: %w", err)
}
s.insertID, err = s.db.Prepare(`INSERT INTO IDs (domain, stringid) VALUES (?,?) ON CONFLICT DO UPDATE SET domain=?1 RETURNING id`)
if err != nil {
return fmt.Errorf("failed to prepare database statements: %w", err)
}
s.insertEID, err = s.db.Prepare(`INSERT INTO EquivalentIDs DEFAULT VALUES RETURNING id;`)
if err != nil {
return fmt.Errorf("failed to prepare database statements: %w", err)
}
s.insertIEID, err = s.db.Prepare(`INSERT INTO IDsToEquivalantIDs (idid, equivalentid) VALUES (?, ?);`)
if err != nil {
return fmt.Errorf("failed to prepare database statements: %w", err)
}
s.idExists, err = s.db.Prepare(`SELECT COUNT(*) from IDsToEquivalantIDs WHERE idid=?`)
if err != nil {
return fmt.Errorf("failed to prepare database statements: %w", err)
}
s.hashExactMatchStatement, err = s.db.Prepare(`
select QIDs.domain, QIDs.stringid, IDs.domain, IDs.stringid from IDs
join IDsToEquivalantIDs as IEIDs on IDs.id=IEIDs.idid
join (
select QEIDs.id as id from EquivalentIDs as QEIDs
join IDsToEquivalantIDs as QIEIDs on QEIDs.id=QIEIDs.equivalentid
join IDs as QIDs on QIDs.id=QIEIDs.idid
join Hashes on Hashes.id=QIDs.id
where (Hashes.kind=? AND Hashes.hash=?)
) as EIDs on EIDs.id=IEIDs.equivalentid;
`)
if err != nil {
return fmt.Errorf("failed to prepare database statements: %w", err)
}
s.hashPartialMatchStatement, err = s.db.Prepare(`
select QIDs.domain, QIDs.stringid, EIDs.hash, IDs.domain, IDs.stringid from IDs
join IDsToEquivalantIDs as IEIDs on IDs.id=IEIDs.idid
join (
select Hashes.hash as hash, QEIDs.id as id from EquivalentIDs as QEIDs
join IDsToEquivalantIDs as QIEIDs on QEIDs.id=QIEIDs.equivalentid
join IDs as QIDs on QIDs.id=QIEIDs.idid
join Hashes on Hashes.id=QIDs.id
where (Hashes.kind=? AND (((Hashes.hash >> (0 * 8) & 0xFF)=(?2 >> (0 * 8) & 0xFF)) OR ((Hashes.hash >> (1 * 8) & 0xFF)=(?2 >> (1 * 8) & 0xFF)) OR ((Hashes.hash >> (2 * 8) & 0xFF)=(?2 >> (2 * 8) & 0xFF)) OR ((Hashes.hash >> (3 * 8) & 0xFF)=(?2 >> (3 * 8) & 0xFF)) OR ((Hashes.hash >> (4 * 8) & 0xFF)=(?2 >> (4 * 8) & 0xFF)) OR ((Hashes.hash >> (5 * 8) & 0xFF)=(?2 >> (5 * 8) & 0xFF)) OR ((Hashes.hash >> (6 * 8) & 0xFF)=(?2 >> (6 * 8) & 0xFF)) OR ((Hashes.hash >> (7 * 8) & 0xFF)=(?2 >> (7 * 8) & 0xFF))))
) as EIDs on EIDs.id=IEIDs.equivalentid;
`)
if err != nil {
return fmt.Errorf("failed to prepare database statements: %w", err)
}
s.idMatchStatement, err = s.db.Prepare(`
select IDs.domain, IDs.stringid from IDs
join IDsToEquivalantIDs as IEIDs on IDs.id=IEIDs.idid
join (
select EIDs.* from EquivalentIDs as EIDs
join IDsToEquivalantIDs as QIEIDs on EIDs.id=QIEIDs.equivalentid
join IDs as QIDs on QIDs.id=QIEIDs.idid
where (QIDs.domain=? AND QIDs.stringid=?)
) as EIDs on EIDs.id=IEIDs.equivalentid;
`)
if err != nil {
return fmt.Errorf("failed to prepare database statements: %w", err)
}
return nil
}
func NewSqliteStorage(db, path string) (ch.HashStorage, error) {
sqlite := &sqliteStorage{}
sqlDB, err := sql.Open(db, fmt.Sprintf("file://%s?_pragma=cache_size(-200000)&_pragma=busy_timeout(500)&_pragma=hard_heap_limit(1073741824)&_pragma=journal_mode(wal)&_pragma=soft_heap_limit(314572800)", path))
if err != nil {
panic(err)
}
sqlite.db = sqlDB
_, err = sqlite.db.Exec(`
PRAGMA foreign_keys=ON;
CREATE TABLE IF NOT EXISTS IDs(
id INTEGER PRIMARY KEY,
stringid TEXT NOT NULL,
domain TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS Hashes(
hash INTEGER NOT NULL,
kind INTEGER NOT NULL,
id INTEGER NOT NULL,
FOREIGN KEY(id) REFERENCES IDs(id)
);
CREATE TABLE IF NOT EXISTS EquivalentIDs(
id integer primary key
);
CREATE TABLE IF NOT EXISTS IDsToEquivalantIDs(
idid INTEGER NOT NULL,
equivalentid INTEGER NOT NULL,
PRIMARY KEY (idid, equivalentid),
FOREIGN KEY(idid) REFERENCES IDs(id),
FOREIGN KEY(equivalentid) REFERENCES EquivalentIDs(id)
);
`)
if err != nil {
panic(err)
}
sqlite.createIndexes()
sqlite.db.SetMaxOpenConns(1)
err = sqlite.PrepareStatements()
if err != nil {
return nil, err
}
return sqlite, nil
}

View File

@ -1,7 +0,0 @@
//go:build cgo && !gokrazy
package storage
import (
_ "github.com/mattn/go-sqlite3"
)

View File

@ -1,8 +0,0 @@
//go:build !cgo && !gokrazy
package storage
import (
_ "github.com/ncruces/go-sqlite3/driver"
_ "github.com/ncruces/go-sqlite3/embed"
)

View File

@ -1,184 +0,0 @@
//go:build !gokrazy
package storage
import (
"errors"
"fmt"
"math/bits"
ch "gitea.narnian.us/lordwelch/comic-hasher"
"gitea.narnian.us/lordwelch/goimagehash"
"gonum.org/v1/gonum/spatial/vptree"
)
type VPTree struct {
aTree *vptree.Tree
dTree *vptree.Tree
pTree *vptree.Tree
ids map[ch.ID]*[]ch.ID
aHashes []vptree.Comparable // temporary, only used for vptree creation
dHashes []vptree.Comparable // temporary, only used for vptree creation
pHashes []vptree.Comparable // temporary, only used for vptree creation
}
type VPHash struct {
ch.SavedHash
}
func (h *VPHash) Distance(c vptree.Comparable) float64 {
h2, ok := c.(*VPHash)
if !ok {
return -99
}
return float64(bits.OnesCount64(h.Hash.Hash ^ h2.Hash.Hash))
}
func (v *VPTree) GetMatches(hashes []ch.Hash, max int, exactOnly bool) ([]ch.Result, error) {
var (
matches []ch.Result
exactMatches []ch.Result
tl ch.TimeLog
)
tl.ResetTime()
defer tl.LogTime("Search Complete")
for _, hash := range hashes {
results := vptree.NewDistKeeper(float64(max))
currentTree := v.getCurrentTree(hash.Kind)
currentTree.NearestSet(results, &VPHash{ch.SavedHash{Hash: hash}})
mappedIds := map[*[]ch.ID]bool{}
for _, result := range results.Heap {
storedHash := result.Comparable.(*VPHash)
ids := v.ids[storedHash.ID]
if mappedIds[ids] {
continue
}
mappedIds[ids] = true
if result.Dist == 0 {
exactMatches = append(exactMatches, ch.Result{
Hash: storedHash.Hash,
ID: storedHash.ID,
Distance: 0,
EquivalentIDs: *v.ids[storedHash.ID],
})
} else {
matches = append(matches, ch.Result{
Hash: storedHash.Hash,
ID: storedHash.ID,
Distance: 0,
EquivalentIDs: *v.ids[storedHash.ID],
})
}
}
}
if exactOnly && len(exactMatches) > 0 {
return exactMatches, nil
}
exactMatches = append(exactMatches, matches...)
return matches, nil
}
func (v *VPTree) getCurrentTree(kind goimagehash.Kind) *vptree.Tree {
if kind == goimagehash.AHash {
return v.aTree
}
if kind == goimagehash.DHash {
return v.dTree
}
if kind == goimagehash.PHash {
return v.pTree
}
panic("Unknown hash type: " + kind.String())
}
func (v *VPTree) MapHashes(ch.ImageHash) {
panic("Not Implemented")
}
func (v *VPTree) DecodeHashes(hashes *ch.SavedHashes) error {
if hashes == nil {
return nil
}
// Initialize all the known equal IDs
for _, ids := range hashes.IDs {
for _, id := range ids {
v.ids[id] = &ids
}
}
var err error
for _, savedHash := range hashes.Hashes {
if savedHash.Hash.Kind == goimagehash.AHash {
v.aHashes = append(v.aHashes, &VPHash{savedHash})
}
if savedHash.Hash.Kind == goimagehash.DHash {
v.dHashes = append(v.dHashes, &VPHash{savedHash})
}
if savedHash.Hash.Kind == goimagehash.PHash {
v.pHashes = append(v.pHashes, &VPHash{savedHash})
}
if savedHash.ID == (ch.ID{}) {
fmt.Println("Empty ID detected")
panic(savedHash)
}
// All known equal IDs are already mapped we can add any missing ones from hashes
if _, ok := v.ids[savedHash.ID]; !ok {
v.ids[savedHash.ID] = &[]ch.ID{savedHash.ID}
}
}
v.aTree, err = vptree.New(v.aHashes, 3, nil)
if err != nil {
return err
}
v.dTree, err = vptree.New(v.dHashes, 3, nil)
if err != nil {
return err
}
v.pTree, err = vptree.New(v.pHashes, 3, nil)
if err != nil {
return err
}
return nil
}
func (v *VPTree) EncodeHashes() (*ch.SavedHashes, error) {
return &ch.SavedHashes{}, errors.New("Not Implemented")
}
func (v *VPTree) AssociateIDs(newIDs []ch.NewIDs) error {
return errors.New("Not Implemented")
}
func (v *VPTree) GetIDs(id ch.ID) ch.IDList {
ids, found := v.ids[id]
if !found {
return nil
}
return ch.ToIDList(*ids)
}
func NewVPStorage() (ch.HashStorage, error) {
var err error
v := &VPTree{
aHashes: []vptree.Comparable{},
dHashes: []vptree.Comparable{},
pHashes: []vptree.Comparable{},
}
v.aTree, err = vptree.New(v.aHashes, 3, nil)
if err != nil {
return v, err
}
v.dTree, err = vptree.New(v.dHashes, 3, nil)
if err != nil {
return v, err
}
v.pTree, err = vptree.New(v.pHashes, 3, nil)
if err != nil {
return v, err
}
return v, nil
}

View File

@ -1,13 +0,0 @@
//go:build gokrazy
package storage
import (
"errors"
ch "gitea.narnian.us/lordwelch/comic-hasher"
)
func NewVPStorage() (ch.HashStorage, error) {
return nil, errors.New("VPTree not available")
}

View File

@ -1,24 +0,0 @@
package ch
import (
"fmt"
"time"
)
type TimeLog struct {
total time.Duration
last time.Time
}
func (t *TimeLog) ResetTime() {
t.total = 0
t.last = time.Now()
}
func (t *TimeLog) LogTime(log string) {
now := time.Now()
diff := now.Sub(t.last)
t.last = now
t.total += diff
fmt.Printf("total: %v, %s: %v\n", t.total, log, diff)
}