2012-11-02 13:54:17 -07:00
|
|
|
#!/usr/bin/python
|
|
|
|
|
2015-02-21 18:30:32 -08:00
|
|
|
"""ComicTagger CLI functions"""
|
2012-11-06 12:56:30 -08:00
|
|
|
|
2015-02-21 18:30:32 -08:00
|
|
|
# Copyright 2013 Anthony Beville
|
2012-11-06 12:56:30 -08:00
|
|
|
|
2015-02-21 18:30:32 -08:00
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
2012-11-06 12:56:30 -08:00
|
|
|
|
2015-02-21 18:30:32 -08:00
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
2012-11-06 12:56:30 -08:00
|
|
|
|
2015-02-21 18:30:32 -08:00
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2012-11-02 13:54:17 -07:00
|
|
|
|
|
|
|
import sys
|
2012-11-06 12:29:18 -08:00
|
|
|
import os
|
2012-11-19 12:28:19 -08:00
|
|
|
from pprint import pprint
|
|
|
|
import json
|
2015-02-21 18:30:32 -08:00
|
|
|
#import signal
|
|
|
|
#import traceback
|
|
|
|
#import time
|
|
|
|
#import platform
|
|
|
|
#import locale
|
|
|
|
#import codecs
|
2012-11-06 12:29:18 -08:00
|
|
|
|
2012-12-29 21:06:12 -08:00
|
|
|
filename_encoding = sys.getfilesystemencoding()
|
2012-11-02 13:54:17 -07:00
|
|
|
|
2018-09-19 13:05:39 -07:00
|
|
|
from .settings import ComicTaggerSettings
|
|
|
|
from .options import Options
|
|
|
|
from .comicarchive import ComicArchive, MetaDataStyle
|
|
|
|
from .issueidentifier import IssueIdentifier
|
|
|
|
from .genericmetadata import GenericMetadata
|
|
|
|
from .comicvinetalker import ComicVineTalker, ComicVineTalkerException
|
|
|
|
from .filerenamer import FileRenamer
|
|
|
|
from .cbltransformer import CBLTransformer
|
|
|
|
from . import utils
|
2015-02-13 15:08:07 -08:00
|
|
|
|
2012-11-12 16:12:43 -08:00
|
|
|
|
2012-12-05 20:46:01 -08:00
|
|
|
class MultipleMatch():
|
2015-02-15 02:44:00 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
def __init__(self, filename, match_list):
|
2015-02-12 14:57:46 -08:00
|
|
|
self.filename = filename
|
|
|
|
self.matches = match_list
|
2012-12-05 20:46:01 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
|
2012-12-05 20:46:01 -08:00
|
|
|
class OnlineMatchResults():
|
2015-02-15 02:44:00 -08:00
|
|
|
|
2015-02-12 14:57:46 -08:00
|
|
|
def __init__(self):
|
|
|
|
self.goodMatches = []
|
|
|
|
self.noMatches = []
|
|
|
|
self.multipleMatches = []
|
|
|
|
self.lowConfidenceMatches = []
|
|
|
|
self.writeFailures = []
|
|
|
|
self.fetchDataFailures = []
|
|
|
|
|
2012-11-02 13:54:17 -07:00
|
|
|
#-----------------------------
|
2012-12-05 20:46:01 -08:00
|
|
|
|
2015-02-15 02:44:00 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
def actual_issue_data_fetch(match, settings, opts):
|
2012-12-05 20:46:01 -08:00
|
|
|
|
2015-02-12 14:57:46 -08:00
|
|
|
# now get the particular issue data
|
|
|
|
try:
|
|
|
|
comicVine = ComicVineTalker()
|
|
|
|
comicVine.wait_for_rate_limit = opts.wait_and_retry_on_rate_limit
|
2015-02-15 02:44:00 -08:00
|
|
|
cv_md = comicVine.fetchIssueData(
|
2015-02-15 03:44:09 -08:00
|
|
|
match['volume_id'], match['issue_number'], settings)
|
2015-02-12 14:57:46 -08:00
|
|
|
except ComicVineTalkerException:
|
2018-09-19 13:05:39 -07:00
|
|
|
print("Network error while getting issue details. Save aborted", file=sys.stderr)
|
2015-02-12 14:57:46 -08:00
|
|
|
return None
|
|
|
|
|
|
|
|
if settings.apply_cbl_transform_on_cv_import:
|
2015-02-13 15:08:07 -08:00
|
|
|
cv_md = CBLTransformer(cv_md, settings).apply()
|
2012-12-17 13:19:21 -08:00
|
|
|
|
2015-02-12 14:57:46 -08:00
|
|
|
return cv_md
|
2012-12-05 20:46:01 -08:00
|
|
|
|
2015-02-15 02:44:00 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
def actual_metadata_save(ca, opts, md):
|
2012-12-05 20:46:01 -08:00
|
|
|
|
2015-02-12 14:57:46 -08:00
|
|
|
if not opts.dryrun:
|
|
|
|
# write out the new data
|
2015-02-13 15:08:07 -08:00
|
|
|
if not ca.writeMetadata(md, opts.data_style):
|
2018-09-19 13:05:39 -07:00
|
|
|
print("The tag save seemed to fail!", file=sys.stderr)
|
2015-02-12 14:57:46 -08:00
|
|
|
return False
|
|
|
|
else:
|
2018-09-19 13:05:39 -07:00
|
|
|
print("Save complete.", file=sys.stderr)
|
2015-02-12 14:57:46 -08:00
|
|
|
else:
|
|
|
|
if opts.terse:
|
2018-09-19 13:05:39 -07:00
|
|
|
print("dry-run option was set, so nothing was written", file=sys.stderr)
|
2015-02-12 14:57:46 -08:00
|
|
|
else:
|
2018-09-19 13:05:39 -07:00
|
|
|
print("dry-run option was set, so nothing was written, but here is the final set of tags:", file=sys.stderr)
|
|
|
|
print(("{0}".format(md)))
|
2015-02-12 14:57:46 -08:00
|
|
|
return True
|
2013-02-05 14:27:35 -08:00
|
|
|
|
2015-02-15 02:44:00 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
def display_match_set_for_choice(label, match_set, opts, settings):
|
2018-09-19 13:05:39 -07:00
|
|
|
print(("{0} -- {1}:".format(match_set.filename, label)))
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
# sort match list by year
|
|
|
|
match_set.matches.sort(key=lambda k: k['year'])
|
|
|
|
|
2015-02-15 02:44:00 -08:00
|
|
|
for (counter, m) in enumerate(match_set.matches):
|
2015-02-12 14:57:46 -08:00
|
|
|
counter += 1
|
2018-09-19 13:05:39 -07:00
|
|
|
print((
|
|
|
|
" {0}. {1} #{2} [{3}] ({4}/{5}) - {6}".format(
|
2015-02-15 03:55:04 -08:00
|
|
|
counter,
|
|
|
|
m['series'],
|
|
|
|
m['issue_number'],
|
|
|
|
m['publisher'],
|
|
|
|
m['month'],
|
|
|
|
m['year'],
|
2018-09-19 13:05:39 -07:00
|
|
|
m['issue_title'])))
|
2015-02-12 14:57:46 -08:00
|
|
|
if opts.interactive:
|
|
|
|
while True:
|
2018-09-19 13:05:39 -07:00
|
|
|
i = input("Choose a match #, or 's' to skip: ")
|
2015-02-15 03:44:09 -08:00
|
|
|
if (i.isdigit() and int(i) in range(
|
|
|
|
1, len(match_set.matches) + 1)) or i == 's':
|
2015-02-12 14:57:46 -08:00
|
|
|
break
|
|
|
|
if i != 's':
|
|
|
|
i = int(i) - 1
|
|
|
|
# save the data!
|
|
|
|
# we know at this point, that the file is all good to go
|
2015-02-21 18:30:32 -08:00
|
|
|
ca = ComicArchive(
|
|
|
|
match_set.filename,
|
|
|
|
settings.rar_exe_path,
|
|
|
|
ComicTaggerSettings.getGraphic('nocover.png'))
|
2015-02-15 02:44:00 -08:00
|
|
|
md = create_local_metadata(
|
|
|
|
opts, ca, ca.hasMetadata(opts.data_style))
|
|
|
|
cv_md = actual_issue_data_fetch(
|
|
|
|
match_set.matches[int(i)], settings, opts)
|
2015-02-13 15:08:07 -08:00
|
|
|
md.overlay(cv_md)
|
|
|
|
actual_metadata_save(ca, opts, md)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2012-12-05 20:46:01 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
def post_process_matches(match_results, opts, settings):
|
2015-02-12 14:57:46 -08:00
|
|
|
# now go through the match results
|
|
|
|
if opts.show_save_summary:
|
2015-02-13 15:08:07 -08:00
|
|
|
if len(match_results.goodMatches) > 0:
|
|
|
|
print("\nSuccessful matches:\n------------------")
|
2015-02-12 14:57:46 -08:00
|
|
|
for f in match_results.goodMatches:
|
2015-02-13 15:08:07 -08:00
|
|
|
print(f)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
if len(match_results.noMatches) > 0:
|
|
|
|
print("\nNo matches:\n------------------")
|
2015-02-12 14:57:46 -08:00
|
|
|
for f in match_results.noMatches:
|
2015-02-13 15:08:07 -08:00
|
|
|
print(f)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
if len(match_results.writeFailures) > 0:
|
|
|
|
print("\nFile Write Failures:\n------------------")
|
2015-02-12 14:57:46 -08:00
|
|
|
for f in match_results.writeFailures:
|
2015-02-13 15:08:07 -08:00
|
|
|
print(f)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
if len(match_results.fetchDataFailures) > 0:
|
|
|
|
print("\nNetwork Data Fetch Failures:\n------------------")
|
2015-02-12 14:57:46 -08:00
|
|
|
for f in match_results.fetchDataFailures:
|
2015-02-13 15:08:07 -08:00
|
|
|
print(f)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
if not opts.show_save_summary and not opts.interactive:
|
2015-02-15 02:44:00 -08:00
|
|
|
# just quit if we're not interactive or showing the summary
|
2015-02-12 14:57:46 -08:00
|
|
|
return
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
if len(match_results.multipleMatches) > 0:
|
2015-02-15 02:44:00 -08:00
|
|
|
print(
|
|
|
|
"\nArchives with multiple high-confidence matches:\n------------------")
|
2015-02-12 14:57:46 -08:00
|
|
|
for match_set in match_results.multipleMatches:
|
2015-02-15 02:44:00 -08:00
|
|
|
display_match_set_for_choice(
|
|
|
|
"Multiple high-confidence matches", match_set, opts, settings)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
if len(match_results.lowConfidenceMatches) > 0:
|
|
|
|
print("\nArchives with low-confidence matches:\n------------------")
|
2015-02-12 14:57:46 -08:00
|
|
|
for match_set in match_results.lowConfidenceMatches:
|
2015-02-13 15:08:07 -08:00
|
|
|
if len(match_set.matches) == 1:
|
2015-02-12 14:57:46 -08:00
|
|
|
label = "Single low-confidence match"
|
|
|
|
else:
|
|
|
|
label = "Multiple low-confidence matches"
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
display_match_set_for_choice(label, match_set, opts, settings)
|
2012-12-05 20:46:01 -08:00
|
|
|
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
def cli_mode(opts, settings):
|
|
|
|
if len(opts.file_list) < 1:
|
2018-09-19 13:05:39 -07:00
|
|
|
print("You must specify at least one filename. Use the -h option for more info", file=sys.stderr)
|
2015-02-12 14:57:46 -08:00
|
|
|
return
|
|
|
|
|
|
|
|
match_results = OnlineMatchResults()
|
|
|
|
|
|
|
|
for f in opts.file_list:
|
2015-02-15 03:44:09 -08:00
|
|
|
if isinstance(f, str):
|
2018-09-19 13:05:39 -07:00
|
|
|
pass
|
2015-02-13 15:08:07 -08:00
|
|
|
process_file_cli(f, opts, settings, match_results)
|
2015-02-12 14:57:46 -08:00
|
|
|
sys.stdout.flush()
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
post_process_matches(match_results, opts, settings)
|
2012-11-02 13:54:17 -07:00
|
|
|
|
2012-12-03 17:16:58 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
def create_local_metadata(opts, ca, has_desired_tags):
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
md = GenericMetadata()
|
2015-02-13 15:08:07 -08:00
|
|
|
md.setDefaultPageList(ca.getNumberOfPages())
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
if has_desired_tags:
|
2015-02-13 15:08:07 -08:00
|
|
|
md = ca.readMetadata(opts.data_style)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
# now, overlay the parsed filename info
|
|
|
|
if opts.parse_filename:
|
2015-02-13 15:08:07 -08:00
|
|
|
md.overlay(ca.metadataFromFilename())
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
# finally, use explicit stuff
|
|
|
|
if opts.metadata is not None:
|
2015-02-13 15:08:07 -08:00
|
|
|
md.overlay(opts.metadata)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
return md
|
2012-12-03 17:16:58 -08:00
|
|
|
|
2015-02-15 02:44:00 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
def process_file_cli(filename, opts, settings, match_results):
|
2012-11-18 21:15:16 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
batch_mode = len(opts.file_list) > 1
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-21 18:30:32 -08:00
|
|
|
ca = ComicArchive(
|
|
|
|
filename,
|
|
|
|
settings.rar_exe_path,
|
|
|
|
ComicTaggerSettings.getGraphic('nocover.png'))
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
if not os.path.lexists(filename):
|
2018-09-19 13:05:39 -07:00
|
|
|
print("Cannot find " + filename, file=sys.stderr)
|
2015-02-12 14:57:46 -08:00
|
|
|
return
|
|
|
|
|
|
|
|
if not ca.seemsToBeAComicArchive():
|
2018-09-19 13:05:39 -07:00
|
|
|
print("Sorry, but " + \
|
|
|
|
filename + " is not a comic archive!", file=sys.stderr)
|
2015-02-12 14:57:46 -08:00
|
|
|
return
|
|
|
|
|
2015-02-15 02:44:00 -08:00
|
|
|
# if not ca.isWritableForStyle(opts.data_style) and (opts.delete_tags or
|
|
|
|
# opts.save_tags or opts.rename_file):
|
2015-02-15 03:44:09 -08:00
|
|
|
if not ca.isWritable() and (
|
|
|
|
opts.delete_tags or opts.copy_tags or opts.save_tags or opts.rename_file):
|
2018-09-19 13:05:39 -07:00
|
|
|
print("This archive is not writable for that tag type", file=sys.stderr)
|
2015-02-12 14:57:46 -08:00
|
|
|
return
|
|
|
|
|
2015-02-15 02:44:00 -08:00
|
|
|
has = [False, False, False]
|
|
|
|
if ca.hasCIX():
|
|
|
|
has[MetaDataStyle.CIX] = True
|
|
|
|
if ca.hasCBI():
|
|
|
|
has[MetaDataStyle.CBI] = True
|
|
|
|
if ca.hasCoMet():
|
|
|
|
has[MetaDataStyle.COMET] = True
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
if opts.print_tags:
|
|
|
|
|
|
|
|
if opts.data_style is None:
|
|
|
|
page_count = ca.getNumberOfPages()
|
|
|
|
|
|
|
|
brief = ""
|
|
|
|
|
|
|
|
if batch_mode:
|
2018-09-19 13:05:39 -07:00
|
|
|
brief = "{0}: ".format(filename)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2022-03-18 12:14:42 -07:00
|
|
|
if ca.isSevenZip():
|
|
|
|
brief += "7Z archive "
|
|
|
|
elif ca.isZip():
|
2015-02-15 02:44:00 -08:00
|
|
|
brief += "ZIP archive "
|
|
|
|
elif ca.isRar():
|
|
|
|
brief += "RAR archive "
|
|
|
|
elif ca.isFolder():
|
|
|
|
brief += "Folder archive "
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
brief += "({0: >3} pages)".format(page_count)
|
|
|
|
brief += " tags:[ "
|
|
|
|
|
2015-02-15 03:44:09 -08:00
|
|
|
if not (has[MetaDataStyle.CBI] or has[
|
|
|
|
MetaDataStyle.CIX] or has[MetaDataStyle.COMET]):
|
2015-02-12 14:57:46 -08:00
|
|
|
brief += "none "
|
|
|
|
else:
|
2015-02-15 02:44:00 -08:00
|
|
|
if has[MetaDataStyle.CBI]:
|
|
|
|
brief += "CBL "
|
|
|
|
if has[MetaDataStyle.CIX]:
|
|
|
|
brief += "CR "
|
|
|
|
if has[MetaDataStyle.COMET]:
|
|
|
|
brief += "CoMet "
|
2015-02-12 14:57:46 -08:00
|
|
|
brief += "]"
|
|
|
|
|
2018-09-19 13:05:39 -07:00
|
|
|
print(brief)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
if opts.terse:
|
|
|
|
return
|
|
|
|
|
2018-09-19 13:05:39 -07:00
|
|
|
print()
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
if opts.data_style is None or opts.data_style == MetaDataStyle.CIX:
|
2015-02-15 02:44:00 -08:00
|
|
|
if has[MetaDataStyle.CIX]:
|
2015-02-13 15:08:07 -08:00
|
|
|
print("--------- ComicRack tags ---------")
|
2015-02-12 14:57:46 -08:00
|
|
|
if opts.raw:
|
2018-09-19 13:05:39 -07:00
|
|
|
print((
|
|
|
|
"{0}".format(
|
|
|
|
str(
|
2015-02-15 03:55:04 -08:00
|
|
|
ca.readRawCIX(),
|
2018-09-19 13:05:39 -07:00
|
|
|
errors='ignore'))))
|
2015-02-12 14:57:46 -08:00
|
|
|
else:
|
2018-09-19 13:05:39 -07:00
|
|
|
print(("{0}".format(ca.readCIX())))
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
if opts.data_style is None or opts.data_style == MetaDataStyle.CBI:
|
2015-02-15 02:44:00 -08:00
|
|
|
if has[MetaDataStyle.CBI]:
|
2015-02-13 15:08:07 -08:00
|
|
|
print("------- ComicBookLover tags -------")
|
2015-02-12 14:57:46 -08:00
|
|
|
if opts.raw:
|
|
|
|
pprint(json.loads(ca.readRawCBI()))
|
|
|
|
else:
|
2018-09-19 13:05:39 -07:00
|
|
|
print(("{0}".format(ca.readCBI())))
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
if opts.data_style is None or opts.data_style == MetaDataStyle.COMET:
|
2015-02-15 02:44:00 -08:00
|
|
|
if has[MetaDataStyle.COMET]:
|
2015-02-13 15:08:07 -08:00
|
|
|
print("----------- CoMet tags -----------")
|
2015-02-12 14:57:46 -08:00
|
|
|
if opts.raw:
|
2018-09-19 13:05:39 -07:00
|
|
|
print(("{0}".format(ca.readRawCoMet())))
|
2015-02-12 14:57:46 -08:00
|
|
|
else:
|
2018-09-19 13:05:39 -07:00
|
|
|
print(("{0}".format(ca.readCoMet())))
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
elif opts.delete_tags:
|
2015-02-13 15:08:07 -08:00
|
|
|
style_name = MetaDataStyle.name[opts.data_style]
|
|
|
|
if has[opts.data_style]:
|
2015-02-12 14:57:46 -08:00
|
|
|
if not opts.dryrun:
|
2015-02-13 15:08:07 -08:00
|
|
|
if not ca.removeMetadata(opts.data_style):
|
2018-09-19 13:05:39 -07:00
|
|
|
print(("{0}: Tag removal seemed to fail!".format(filename)))
|
2015-02-12 14:57:46 -08:00
|
|
|
else:
|
2018-09-19 13:05:39 -07:00
|
|
|
print((
|
|
|
|
"{0}: Removed {1} tags.".format(filename, style_name)))
|
2015-02-12 14:57:46 -08:00
|
|
|
else:
|
2018-09-19 13:05:39 -07:00
|
|
|
print((
|
|
|
|
"{0}: dry-run. {1} tags not removed".format(filename, style_name)))
|
2015-02-12 14:57:46 -08:00
|
|
|
else:
|
2018-09-19 13:05:39 -07:00
|
|
|
print(("{0}: This archive doesn't have {1} tags to remove.".format(
|
|
|
|
filename, style_name)))
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
elif opts.copy_tags:
|
2015-02-15 02:44:00 -08:00
|
|
|
dst_style_name = MetaDataStyle.name[opts.data_style]
|
|
|
|
if opts.no_overwrite and has[opts.data_style]:
|
2018-09-19 13:05:39 -07:00
|
|
|
print(("{0}: Already has {1} tags. Not overwriting.".format(
|
|
|
|
filename, dst_style_name)))
|
2015-02-12 14:57:46 -08:00
|
|
|
return
|
|
|
|
if opts.copy_source == opts.data_style:
|
2018-09-19 13:05:39 -07:00
|
|
|
print((
|
|
|
|
"{0}: Destination and source are same: {1}. Nothing to do.".format(
|
2015-02-15 03:55:04 -08:00
|
|
|
filename,
|
2018-09-19 13:05:39 -07:00
|
|
|
dst_style_name)))
|
2015-02-12 14:57:46 -08:00
|
|
|
return
|
|
|
|
|
2015-02-15 02:44:00 -08:00
|
|
|
src_style_name = MetaDataStyle.name[opts.copy_source]
|
|
|
|
if has[opts.copy_source]:
|
2015-02-12 14:57:46 -08:00
|
|
|
if not opts.dryrun:
|
2015-02-13 15:08:07 -08:00
|
|
|
md = ca.readMetadata(opts.copy_source)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
if settings.apply_cbl_transform_on_bulk_operation and opts.data_style == MetaDataStyle.CBI:
|
2015-02-13 15:08:07 -08:00
|
|
|
md = CBLTransformer(md, settings).apply()
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
if not ca.writeMetadata(md, opts.data_style):
|
2018-09-19 13:05:39 -07:00
|
|
|
print(("{0}: Tag copy seemed to fail!".format(filename)))
|
2015-02-12 14:57:46 -08:00
|
|
|
else:
|
2018-09-19 13:05:39 -07:00
|
|
|
print(("{0}: Copied {1} tags to {2} .".format(
|
|
|
|
filename, src_style_name, dst_style_name)))
|
2015-02-12 14:57:46 -08:00
|
|
|
else:
|
2018-09-19 13:05:39 -07:00
|
|
|
print((
|
|
|
|
"{0}: dry-run. {1} tags not copied".format(filename, src_style_name)))
|
2015-02-12 14:57:46 -08:00
|
|
|
else:
|
2018-09-19 13:05:39 -07:00
|
|
|
print(("{0}: This archive doesn't have {1} tags to copy.".format(
|
|
|
|
filename, src_style_name)))
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
elif opts.save_tags:
|
|
|
|
|
2015-02-15 02:44:00 -08:00
|
|
|
if opts.no_overwrite and has[opts.data_style]:
|
2018-09-19 13:05:39 -07:00
|
|
|
print(("{0}: Already has {1} tags. Not overwriting.".format(
|
|
|
|
filename, MetaDataStyle.name[opts.data_style])))
|
2015-02-12 14:57:46 -08:00
|
|
|
return
|
|
|
|
|
|
|
|
if batch_mode:
|
2018-09-19 13:05:39 -07:00
|
|
|
print(("Processing {0}...".format(filename)))
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-15 02:44:00 -08:00
|
|
|
md = create_local_metadata(opts, ca, has[opts.data_style])
|
2015-02-12 14:57:46 -08:00
|
|
|
if md.issue is None or md.issue == "":
|
|
|
|
if opts.assume_issue_is_one_if_not_set:
|
|
|
|
md.issue = "1"
|
|
|
|
|
|
|
|
# now, search online
|
|
|
|
if opts.search_online:
|
|
|
|
if opts.issue_id is not None:
|
|
|
|
# we were given the actual ID to search with
|
|
|
|
try:
|
|
|
|
comicVine = ComicVineTalker()
|
|
|
|
comicVine.wait_for_rate_limit = opts.wait_and_retry_on_rate_limit
|
2015-02-15 02:44:00 -08:00
|
|
|
cv_md = comicVine.fetchIssueDataByIssueID(
|
|
|
|
opts.issue_id, settings)
|
2015-02-12 14:57:46 -08:00
|
|
|
except ComicVineTalkerException:
|
2018-09-19 13:05:39 -07:00
|
|
|
print("Network error while getting issue details. Save aborted", file=sys.stderr)
|
2015-02-12 14:57:46 -08:00
|
|
|
match_results.fetchDataFailures.append(filename)
|
|
|
|
return
|
|
|
|
|
|
|
|
if cv_md is None:
|
2018-09-19 13:05:39 -07:00
|
|
|
print("No match for ID {0} was found.".format(
|
|
|
|
opts.issue_id), file=sys.stderr)
|
2015-02-12 14:57:46 -08:00
|
|
|
match_results.noMatches.append(filename)
|
|
|
|
return
|
|
|
|
|
|
|
|
if settings.apply_cbl_transform_on_cv_import:
|
2015-02-13 15:08:07 -08:00
|
|
|
cv_md = CBLTransformer(cv_md, settings).apply()
|
2015-02-12 14:57:46 -08:00
|
|
|
else:
|
2015-02-13 15:08:07 -08:00
|
|
|
ii = IssueIdentifier(ca, settings)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
if md is None or md.isEmpty:
|
2018-09-19 13:05:39 -07:00
|
|
|
print("No metadata given to search online with!", file=sys.stderr)
|
2015-02-12 14:57:46 -08:00
|
|
|
match_results.noMatches.append(filename)
|
|
|
|
return
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
def myoutput(text):
|
2015-02-12 14:57:46 -08:00
|
|
|
if opts.verbose:
|
2015-02-13 15:08:07 -08:00
|
|
|
IssueIdentifier.defaultWriteOutput(text)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
# use our overlayed MD struct to search
|
2015-02-13 15:08:07 -08:00
|
|
|
ii.setAdditionalMetadata(md)
|
2015-02-12 14:57:46 -08:00
|
|
|
ii.onlyUseAdditionalMetaData = True
|
|
|
|
ii.waitAndRetryOnRateLimit = opts.wait_and_retry_on_rate_limit
|
2015-02-13 15:08:07 -08:00
|
|
|
ii.setOutputFunction(myoutput)
|
2015-02-12 14:57:46 -08:00
|
|
|
ii.cover_page_index = md.getCoverPageIndexList()[0]
|
|
|
|
matches = ii.search()
|
|
|
|
|
|
|
|
result = ii.search_result
|
|
|
|
|
|
|
|
found_match = False
|
|
|
|
choices = False
|
|
|
|
low_confidence = False
|
|
|
|
|
|
|
|
if result == ii.ResultNoMatches:
|
|
|
|
pass
|
|
|
|
elif result == ii.ResultFoundMatchButBadCoverScore:
|
|
|
|
low_confidence = True
|
|
|
|
found_match = True
|
2015-02-15 02:44:00 -08:00
|
|
|
elif result == ii.ResultFoundMatchButNotFirstPage:
|
2015-02-12 14:57:46 -08:00
|
|
|
found_match = True
|
|
|
|
elif result == ii.ResultMultipleMatchesWithBadImageScores:
|
|
|
|
low_confidence = True
|
|
|
|
choices = True
|
|
|
|
elif result == ii.ResultOneGoodMatch:
|
|
|
|
found_match = True
|
|
|
|
elif result == ii.ResultMultipleGoodMatches:
|
|
|
|
choices = True
|
|
|
|
|
|
|
|
if choices:
|
|
|
|
if low_confidence:
|
2018-09-19 13:05:39 -07:00
|
|
|
print("Online search: Multiple low confidence matches. Save aborted", file=sys.stderr)
|
2015-02-15 02:44:00 -08:00
|
|
|
match_results.lowConfidenceMatches.append(
|
|
|
|
MultipleMatch(filename, matches))
|
2015-02-12 14:57:46 -08:00
|
|
|
return
|
|
|
|
else:
|
2018-09-19 13:05:39 -07:00
|
|
|
print("Online search: Multiple good matches. Save aborted", file=sys.stderr)
|
2015-02-15 02:44:00 -08:00
|
|
|
match_results.multipleMatches.append(
|
|
|
|
MultipleMatch(filename, matches))
|
2015-02-12 14:57:46 -08:00
|
|
|
return
|
|
|
|
if low_confidence and opts.abortOnLowConfidence:
|
2018-09-19 13:05:39 -07:00
|
|
|
print("Online search: Low confidence match. Save aborted", file=sys.stderr)
|
2015-02-15 02:44:00 -08:00
|
|
|
match_results.lowConfidenceMatches.append(
|
|
|
|
MultipleMatch(filename, matches))
|
2015-02-12 14:57:46 -08:00
|
|
|
return
|
|
|
|
if not found_match:
|
2018-09-19 13:05:39 -07:00
|
|
|
print("Online search: No match found. Save aborted", file=sys.stderr)
|
2015-02-12 14:57:46 -08:00
|
|
|
match_results.noMatches.append(filename)
|
|
|
|
return
|
|
|
|
|
|
|
|
# we got here, so we have a single match
|
|
|
|
|
|
|
|
# now get the particular issue data
|
|
|
|
cv_md = actual_issue_data_fetch(matches[0], settings, opts)
|
|
|
|
if cv_md is None:
|
|
|
|
match_results.fetchDataFailures.append(filename)
|
|
|
|
return
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
md.overlay(cv_md)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
# ok, done building our metadata. time to save
|
2015-02-13 15:08:07 -08:00
|
|
|
if not actual_metadata_save(ca, opts, md):
|
2015-02-15 02:44:00 -08:00
|
|
|
match_results.writeFailures.append(filename)
|
2015-02-12 14:57:46 -08:00
|
|
|
else:
|
2015-02-15 02:44:00 -08:00
|
|
|
match_results.goodMatches.append(filename)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
elif opts.rename_file:
|
|
|
|
|
|
|
|
msg_hdr = ""
|
|
|
|
if batch_mode:
|
2018-09-19 13:05:39 -07:00
|
|
|
msg_hdr = "{0}: ".format(filename)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
if opts.data_style is not None:
|
2015-02-15 02:44:00 -08:00
|
|
|
use_tags = has[opts.data_style]
|
2015-02-12 14:57:46 -08:00
|
|
|
else:
|
|
|
|
use_tags = False
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
md = create_local_metadata(opts, ca, use_tags)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
if md.series is None:
|
2018-09-19 13:05:39 -07:00
|
|
|
print(msg_hdr + "Can't rename without series name", file=sys.stderr)
|
2015-02-12 14:57:46 -08:00
|
|
|
return
|
|
|
|
|
|
|
|
new_ext = None # default
|
|
|
|
if settings.rename_extension_based_on_archive:
|
2022-03-18 12:14:42 -07:00
|
|
|
if ca.isSevenZip():
|
|
|
|
new_ext = ".cb7"
|
|
|
|
elif ca.isZip():
|
2015-02-12 14:57:46 -08:00
|
|
|
new_ext = ".cbz"
|
|
|
|
elif ca.isRar():
|
|
|
|
new_ext = ".cbr"
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
renamer = FileRenamer(md)
|
|
|
|
renamer.setTemplate(settings.rename_template)
|
|
|
|
renamer.setIssueZeroPadding(settings.rename_issue_number_padding)
|
|
|
|
renamer.setSmartCleanup(settings.rename_use_smart_string_cleanup)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
new_name = renamer.determineName(filename, ext=new_ext)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
if new_name == os.path.basename(filename):
|
2018-09-19 13:05:39 -07:00
|
|
|
print(msg_hdr + "Filename is already good!", file=sys.stderr)
|
2015-02-12 14:57:46 -08:00
|
|
|
return
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
folder = os.path.dirname(os.path.abspath(filename))
|
|
|
|
new_abs_path = utils.unique_file(os.path.join(folder, new_name))
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
suffix = ""
|
|
|
|
if not opts.dryrun:
|
|
|
|
# rename the file
|
2015-02-13 15:08:07 -08:00
|
|
|
os.rename(filename, new_abs_path)
|
2015-02-12 14:57:46 -08:00
|
|
|
else:
|
|
|
|
suffix = " (dry-run, no change)"
|
|
|
|
|
2018-09-19 13:05:39 -07:00
|
|
|
print((
|
|
|
|
"renamed '{0}' -> '{1}' {2}".format(os.path.basename(filename), new_name, suffix)))
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
elif opts.export_to_zip:
|
|
|
|
msg_hdr = ""
|
|
|
|
if batch_mode:
|
2018-09-19 13:05:39 -07:00
|
|
|
msg_hdr = "{0}: ".format(filename)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
if not ca.isRar():
|
2018-09-19 13:05:39 -07:00
|
|
|
print(msg_hdr + "Archive is not a RAR.", file=sys.stderr)
|
2015-02-12 14:57:46 -08:00
|
|
|
return
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
rar_file = os.path.abspath(os.path.abspath(filename))
|
2015-02-12 14:57:46 -08:00
|
|
|
new_file = os.path.splitext(rar_file)[0] + ".cbz"
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
if opts.abort_export_on_conflict and os.path.lexists(new_file):
|
2018-09-19 13:05:39 -07:00
|
|
|
print(msg_hdr + "{0} already exists in the that folder.".format(os.path.split(new_file)[1]))
|
2015-02-12 14:57:46 -08:00
|
|
|
return
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
new_file = utils.unique_file(os.path.join(new_file))
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
delete_success = False
|
|
|
|
export_success = False
|
|
|
|
if not opts.dryrun:
|
2015-02-13 15:08:07 -08:00
|
|
|
if ca.exportAsZip(new_file):
|
2015-02-12 14:57:46 -08:00
|
|
|
export_success = True
|
|
|
|
if opts.delete_rar_after_export:
|
|
|
|
try:
|
2015-02-13 15:08:07 -08:00
|
|
|
os.unlink(rar_file)
|
2015-02-12 14:57:46 -08:00
|
|
|
except:
|
2018-09-19 13:05:39 -07:00
|
|
|
print(msg_hdr + \
|
|
|
|
"Error deleting original RAR after export", file=sys.stderr)
|
2015-02-12 14:57:46 -08:00
|
|
|
delete_success = False
|
|
|
|
else:
|
|
|
|
delete_success = True
|
|
|
|
else:
|
|
|
|
# last export failed, so remove the zip, if it exists
|
2015-02-13 15:08:07 -08:00
|
|
|
if os.path.lexists(new_file):
|
|
|
|
os.remove(new_file)
|
2015-02-12 14:57:46 -08:00
|
|
|
else:
|
2015-02-15 02:44:00 -08:00
|
|
|
msg = msg_hdr + \
|
2018-09-19 13:05:39 -07:00
|
|
|
"Dry-run: Would try to create {0}".format(
|
2015-02-15 02:44:00 -08:00
|
|
|
os.path.split(new_file)[1])
|
2015-02-12 14:57:46 -08:00
|
|
|
if opts.delete_rar_after_export:
|
2018-09-19 13:05:39 -07:00
|
|
|
msg += " and delete orginal."
|
2015-02-13 15:08:07 -08:00
|
|
|
print(msg)
|
2015-02-12 14:57:46 -08:00
|
|
|
return
|
|
|
|
|
|
|
|
msg = msg_hdr
|
|
|
|
if export_success:
|
2018-09-19 13:05:39 -07:00
|
|
|
msg += "Archive exported successfully to: {0}".format(
|
2015-02-15 02:44:00 -08:00
|
|
|
os.path.split(new_file)[1])
|
2015-02-12 14:57:46 -08:00
|
|
|
if opts.delete_rar_after_export and delete_success:
|
2018-09-19 13:05:39 -07:00
|
|
|
msg += " (Original deleted) "
|
2015-02-12 14:57:46 -08:00
|
|
|
else:
|
2018-09-19 13:05:39 -07:00
|
|
|
msg += "Archive failed to export!"
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
print(msg)
|