2012-11-02 13:54:17 -07:00
|
|
|
#!/usr/bin/python
|
|
|
|
|
|
|
|
"""
|
2013-02-06 17:01:39 -08:00
|
|
|
Comic tagger CLI functions
|
2012-11-06 12:56:30 -08:00
|
|
|
"""
|
|
|
|
|
|
|
|
"""
|
2013-02-06 17:01:39 -08:00
|
|
|
Copyright 2013 Anthony Beville
|
2012-11-06 12:56:30 -08:00
|
|
|
|
|
|
|
Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
you may not use this file except in compliance with the License.
|
|
|
|
You may obtain a copy of the License at
|
|
|
|
|
2015-02-12 14:57:46 -08:00
|
|
|
http://www.apache.org/licenses/LICENSE-2.0
|
2012-11-06 12:56:30 -08:00
|
|
|
|
|
|
|
Unless required by applicable law or agreed to in writing, software
|
|
|
|
distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
See the License for the specific language governing permissions and
|
|
|
|
limitations under the License.
|
2012-11-02 13:54:17 -07:00
|
|
|
"""
|
|
|
|
|
|
|
|
import sys
|
|
|
|
import signal
|
2012-11-06 12:29:18 -08:00
|
|
|
import os
|
2012-11-14 17:25:01 -08:00
|
|
|
import traceback
|
|
|
|
import time
|
2012-11-19 12:28:19 -08:00
|
|
|
from pprint import pprint
|
|
|
|
import json
|
2012-11-27 16:33:51 -08:00
|
|
|
import platform
|
2012-12-29 21:06:12 -08:00
|
|
|
import locale
|
2015-02-13 15:08:07 -08:00
|
|
|
import codecs
|
2012-11-06 12:29:18 -08:00
|
|
|
|
2012-12-29 21:06:12 -08:00
|
|
|
filename_encoding = sys.getfilesystemencoding()
|
2012-11-02 13:54:17 -07:00
|
|
|
|
2012-11-10 11:02:38 -08:00
|
|
|
from settings import ComicTaggerSettings
|
2013-02-13 13:53:15 -08:00
|
|
|
from options import Options
|
|
|
|
from comicarchive import ComicArchive, MetaDataStyle
|
2012-11-10 11:02:38 -08:00
|
|
|
from issueidentifier import IssueIdentifier
|
2012-11-18 19:55:40 -08:00
|
|
|
from genericmetadata import GenericMetadata
|
2012-11-28 12:15:20 -08:00
|
|
|
from comicvinetalker import ComicVineTalker, ComicVineTalkerException
|
2012-12-14 21:54:12 -08:00
|
|
|
from filerenamer import FileRenamer
|
2012-12-17 13:19:21 -08:00
|
|
|
from cbltransformer import CBLTransformer
|
2012-11-06 12:29:18 -08:00
|
|
|
import utils
|
2015-02-13 15:08:07 -08:00
|
|
|
|
2012-11-12 16:12:43 -08:00
|
|
|
|
2012-12-05 20:46:01 -08:00
|
|
|
class MultipleMatch():
|
2015-02-13 15:08:07 -08:00
|
|
|
def __init__(self, filename, match_list):
|
2015-02-12 14:57:46 -08:00
|
|
|
self.filename = filename
|
|
|
|
self.matches = match_list
|
2012-12-05 20:46:01 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
|
2012-12-05 20:46:01 -08:00
|
|
|
class OnlineMatchResults():
|
2015-02-12 14:57:46 -08:00
|
|
|
def __init__(self):
|
|
|
|
self.goodMatches = []
|
|
|
|
self.noMatches = []
|
|
|
|
self.multipleMatches = []
|
|
|
|
self.lowConfidenceMatches = []
|
|
|
|
self.writeFailures = []
|
|
|
|
self.fetchDataFailures = []
|
|
|
|
|
2012-11-02 13:54:17 -07:00
|
|
|
#-----------------------------
|
2012-12-05 20:46:01 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
def actual_issue_data_fetch(match, settings, opts):
|
2012-12-05 20:46:01 -08:00
|
|
|
|
2015-02-12 14:57:46 -08:00
|
|
|
# now get the particular issue data
|
|
|
|
try:
|
|
|
|
comicVine = ComicVineTalker()
|
|
|
|
comicVine.wait_for_rate_limit = opts.wait_and_retry_on_rate_limit
|
2015-02-13 15:08:07 -08:00
|
|
|
cv_md = comicVine.fetchIssueData(match['volume_id'], match['issue_number'], settings)
|
2015-02-12 14:57:46 -08:00
|
|
|
except ComicVineTalkerException:
|
|
|
|
print >> sys.stderr, "Network error while getting issue details. Save aborted"
|
|
|
|
return None
|
|
|
|
|
|
|
|
if settings.apply_cbl_transform_on_cv_import:
|
2015-02-13 15:08:07 -08:00
|
|
|
cv_md = CBLTransformer(cv_md, settings).apply()
|
2012-12-17 13:19:21 -08:00
|
|
|
|
2015-02-12 14:57:46 -08:00
|
|
|
return cv_md
|
2012-12-05 20:46:01 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
def actual_metadata_save(ca, opts, md):
|
2012-12-05 20:46:01 -08:00
|
|
|
|
2015-02-12 14:57:46 -08:00
|
|
|
if not opts.dryrun:
|
|
|
|
# write out the new data
|
2015-02-13 15:08:07 -08:00
|
|
|
if not ca.writeMetadata(md, opts.data_style):
|
2015-02-12 14:57:46 -08:00
|
|
|
print >> sys.stderr,"The tag save seemed to fail!"
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
print >> sys.stderr,"Save complete."
|
|
|
|
else:
|
|
|
|
if opts.terse:
|
|
|
|
print >> sys.stderr,"dry-run option was set, so nothing was written"
|
|
|
|
else:
|
|
|
|
print >> sys.stderr,"dry-run option was set, so nothing was written, but here is the final set of tags:"
|
2015-02-13 15:08:07 -08:00
|
|
|
print(u"{0}".format(md))
|
2015-02-12 14:57:46 -08:00
|
|
|
return True
|
2013-02-05 14:27:35 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
def display_match_set_for_choice(label, match_set, opts, settings):
|
|
|
|
print(u"{0} -- {1}:".format(match_set.filename, label))
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
# sort match list by year
|
|
|
|
match_set.matches.sort(key=lambda k: k['year'])
|
|
|
|
|
|
|
|
for (counter,m) in enumerate(match_set.matches):
|
|
|
|
counter += 1
|
2015-02-13 15:08:07 -08:00
|
|
|
print(u" {0}. {1} #{2} [{3}] ({4}/{5}) - {6}".format(counter,
|
2015-02-12 14:57:46 -08:00
|
|
|
m['series'],
|
|
|
|
m['issue_number'],
|
|
|
|
m['publisher'],
|
|
|
|
m['month'],
|
|
|
|
m['year'],
|
|
|
|
m['issue_title'])
|
2015-02-13 15:08:07 -08:00
|
|
|
)
|
2015-02-12 14:57:46 -08:00
|
|
|
if opts.interactive:
|
|
|
|
while True:
|
|
|
|
i = raw_input("Choose a match #, or 's' to skip: ")
|
|
|
|
if (i.isdigit() and int(i) in range(1,len(match_set.matches)+1)) or i == 's':
|
|
|
|
break
|
|
|
|
if i != 's':
|
|
|
|
i = int(i) - 1
|
|
|
|
# save the data!
|
|
|
|
# we know at this point, that the file is all good to go
|
2015-02-13 15:08:07 -08:00
|
|
|
ca = ComicArchive(match_set.filename, settings.rar_exe_path)
|
|
|
|
md = create_local_metadata(opts, ca, ca.hasMetadata(opts.data_style))
|
2015-02-12 14:57:46 -08:00
|
|
|
cv_md = actual_issue_data_fetch(match_set.matches[int(i)], settings, opts)
|
2015-02-13 15:08:07 -08:00
|
|
|
md.overlay(cv_md)
|
|
|
|
actual_metadata_save(ca, opts, md)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2012-12-05 20:46:01 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
def post_process_matches(match_results, opts, settings):
|
2015-02-12 14:57:46 -08:00
|
|
|
# now go through the match results
|
|
|
|
if opts.show_save_summary:
|
2015-02-13 15:08:07 -08:00
|
|
|
if len(match_results.goodMatches) > 0:
|
|
|
|
print("\nSuccessful matches:\n------------------")
|
2015-02-12 14:57:46 -08:00
|
|
|
for f in match_results.goodMatches:
|
2015-02-13 15:08:07 -08:00
|
|
|
print(f)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
if len(match_results.noMatches) > 0:
|
|
|
|
print("\nNo matches:\n------------------")
|
2015-02-12 14:57:46 -08:00
|
|
|
for f in match_results.noMatches:
|
2015-02-13 15:08:07 -08:00
|
|
|
print(f)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
if len(match_results.writeFailures) > 0:
|
|
|
|
print("\nFile Write Failures:\n------------------")
|
2015-02-12 14:57:46 -08:00
|
|
|
for f in match_results.writeFailures:
|
2015-02-13 15:08:07 -08:00
|
|
|
print(f)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
if len(match_results.fetchDataFailures) > 0:
|
|
|
|
print("\nNetwork Data Fetch Failures:\n------------------")
|
2015-02-12 14:57:46 -08:00
|
|
|
for f in match_results.fetchDataFailures:
|
2015-02-13 15:08:07 -08:00
|
|
|
print(f)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
if not opts.show_save_summary and not opts.interactive:
|
|
|
|
#just quit if we're not interactive or showing the summary
|
|
|
|
return
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
if len(match_results.multipleMatches) > 0:
|
|
|
|
print("\nArchives with multiple high-confidence matches:\n------------------")
|
2015-02-12 14:57:46 -08:00
|
|
|
for match_set in match_results.multipleMatches:
|
2015-02-13 15:08:07 -08:00
|
|
|
display_match_set_for_choice("Multiple high-confidence matches", match_set, opts, settings)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
if len(match_results.lowConfidenceMatches) > 0:
|
|
|
|
print("\nArchives with low-confidence matches:\n------------------")
|
2015-02-12 14:57:46 -08:00
|
|
|
for match_set in match_results.lowConfidenceMatches:
|
2015-02-13 15:08:07 -08:00
|
|
|
if len(match_set.matches) == 1:
|
2015-02-12 14:57:46 -08:00
|
|
|
label = "Single low-confidence match"
|
|
|
|
else:
|
|
|
|
label = "Multiple low-confidence matches"
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
display_match_set_for_choice(label, match_set, opts, settings)
|
2012-12-05 20:46:01 -08:00
|
|
|
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
def cli_mode(opts, settings):
|
|
|
|
if len(opts.file_list) < 1:
|
2015-02-12 14:57:46 -08:00
|
|
|
print >> sys.stderr,"You must specify at least one filename. Use the -h option for more info"
|
|
|
|
return
|
|
|
|
|
|
|
|
match_results = OnlineMatchResults()
|
|
|
|
|
|
|
|
for f in opts.file_list:
|
|
|
|
if type(f) == str:
|
|
|
|
f = f.decode(filename_encoding, 'replace')
|
2015-02-13 15:08:07 -08:00
|
|
|
process_file_cli(f, opts, settings, match_results)
|
2015-02-12 14:57:46 -08:00
|
|
|
sys.stdout.flush()
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
post_process_matches(match_results, opts, settings)
|
2012-11-02 13:54:17 -07:00
|
|
|
|
2012-12-03 17:16:58 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
def create_local_metadata(opts, ca, has_desired_tags):
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
md = GenericMetadata()
|
2015-02-13 15:08:07 -08:00
|
|
|
md.setDefaultPageList(ca.getNumberOfPages())
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
if has_desired_tags:
|
2015-02-13 15:08:07 -08:00
|
|
|
md = ca.readMetadata(opts.data_style)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
# now, overlay the parsed filename info
|
|
|
|
if opts.parse_filename:
|
2015-02-13 15:08:07 -08:00
|
|
|
md.overlay(ca.metadataFromFilename())
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
# finally, use explicit stuff
|
|
|
|
if opts.metadata is not None:
|
2015-02-13 15:08:07 -08:00
|
|
|
md.overlay(opts.metadata)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
return md
|
2012-12-03 17:16:58 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
def process_file_cli(filename, opts, settings, match_results):
|
2012-11-18 21:15:16 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
batch_mode = len(opts.file_list) > 1
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
ca = ComicArchive(filename, settings.rar_exe_path)
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
if not os.path.lexists(filename):
|
2015-02-12 14:57:46 -08:00
|
|
|
print >> sys.stderr,"Cannot find "+ filename
|
|
|
|
return
|
|
|
|
|
|
|
|
if not ca.seemsToBeAComicArchive():
|
|
|
|
print >> sys.stderr,"Sorry, but "+ filename + " is not a comic archive!"
|
|
|
|
return
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
#if not ca.isWritableForStyle(opts.data_style) and (opts.delete_tags or opts.save_tags or opts.rename_file):
|
|
|
|
if not ca.isWritable() and (opts.delete_tags or opts.copy_tags or opts.save_tags or opts.rename_file):
|
2015-02-12 14:57:46 -08:00
|
|
|
print >> sys.stderr,"This archive is not writable for that tag type"
|
|
|
|
return
|
|
|
|
|
|
|
|
has = [ False, False, False ]
|
|
|
|
if ca.hasCIX(): has[ MetaDataStyle.CIX ] = True
|
|
|
|
if ca.hasCBI(): has[ MetaDataStyle.CBI ] = True
|
|
|
|
if ca.hasCoMet(): has[ MetaDataStyle.COMET ] = True
|
|
|
|
|
|
|
|
if opts.print_tags:
|
|
|
|
|
|
|
|
|
|
|
|
if opts.data_style is None:
|
|
|
|
page_count = ca.getNumberOfPages()
|
|
|
|
|
|
|
|
brief = ""
|
|
|
|
|
|
|
|
if batch_mode:
|
|
|
|
brief = u"{0}: ".format(filename)
|
|
|
|
|
|
|
|
if ca.isZip(): brief += "ZIP archive "
|
|
|
|
elif ca.isRar(): brief += "RAR archive "
|
|
|
|
elif ca.isFolder(): brief += "Folder archive "
|
|
|
|
|
|
|
|
brief += "({0: >3} pages)".format(page_count)
|
|
|
|
brief += " tags:[ "
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
if not (has[ MetaDataStyle.CBI ] or has[ MetaDataStyle.CIX ] or has[ MetaDataStyle.COMET ]):
|
2015-02-12 14:57:46 -08:00
|
|
|
brief += "none "
|
|
|
|
else:
|
|
|
|
if has[ MetaDataStyle.CBI ]: brief += "CBL "
|
|
|
|
if has[ MetaDataStyle.CIX ]: brief += "CR "
|
|
|
|
if has[ MetaDataStyle.COMET ]: brief += "CoMet "
|
|
|
|
brief += "]"
|
|
|
|
|
|
|
|
print brief
|
|
|
|
|
|
|
|
if opts.terse:
|
|
|
|
return
|
|
|
|
|
|
|
|
print
|
|
|
|
|
|
|
|
if opts.data_style is None or opts.data_style == MetaDataStyle.CIX:
|
|
|
|
if has[ MetaDataStyle.CIX ]:
|
2015-02-13 15:08:07 -08:00
|
|
|
print("--------- ComicRack tags ---------")
|
2015-02-12 14:57:46 -08:00
|
|
|
if opts.raw:
|
2015-02-13 15:08:07 -08:00
|
|
|
print(u"{0}".format(unicode(ca.readRawCIX(), errors='ignore')))
|
2015-02-12 14:57:46 -08:00
|
|
|
else:
|
2015-02-13 15:08:07 -08:00
|
|
|
print(u"{0}".format(ca.readCIX()))
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
if opts.data_style is None or opts.data_style == MetaDataStyle.CBI:
|
|
|
|
if has[ MetaDataStyle.CBI ]:
|
2015-02-13 15:08:07 -08:00
|
|
|
print("------- ComicBookLover tags -------")
|
2015-02-12 14:57:46 -08:00
|
|
|
if opts.raw:
|
|
|
|
pprint(json.loads(ca.readRawCBI()))
|
|
|
|
else:
|
2015-02-13 15:08:07 -08:00
|
|
|
print(u"{0}".format(ca.readCBI()))
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
if opts.data_style is None or opts.data_style == MetaDataStyle.COMET:
|
|
|
|
if has[ MetaDataStyle.COMET ]:
|
2015-02-13 15:08:07 -08:00
|
|
|
print("----------- CoMet tags -----------")
|
2015-02-12 14:57:46 -08:00
|
|
|
if opts.raw:
|
2015-02-13 15:08:07 -08:00
|
|
|
print(u"{0}".format(ca.readRawCoMet()))
|
2015-02-12 14:57:46 -08:00
|
|
|
else:
|
2015-02-13 15:08:07 -08:00
|
|
|
print(u"{0}".format(ca.readCoMet()))
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
|
|
|
|
elif opts.delete_tags:
|
2015-02-13 15:08:07 -08:00
|
|
|
style_name = MetaDataStyle.name[opts.data_style]
|
|
|
|
if has[opts.data_style]:
|
2015-02-12 14:57:46 -08:00
|
|
|
if not opts.dryrun:
|
2015-02-13 15:08:07 -08:00
|
|
|
if not ca.removeMetadata(opts.data_style):
|
|
|
|
print(u"{0}: Tag removal seemed to fail!".format(filename))
|
2015-02-12 14:57:46 -08:00
|
|
|
else:
|
2015-02-13 15:08:07 -08:00
|
|
|
print(u"{0}: Removed {1} tags.".format(filename, style_name))
|
2015-02-12 14:57:46 -08:00
|
|
|
else:
|
2015-02-13 15:08:07 -08:00
|
|
|
print(u"{0}: dry-run. {1} tags not removed".format(filename, style_name))
|
2015-02-12 14:57:46 -08:00
|
|
|
else:
|
2015-02-13 15:08:07 -08:00
|
|
|
print(u"{0}: This archive doesn't have {1} tags to remove.".format(filename, style_name))
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
elif opts.copy_tags:
|
|
|
|
dst_style_name = MetaDataStyle.name[ opts.data_style ]
|
|
|
|
if opts.no_overwrite and has[ opts.data_style ]:
|
2015-02-13 15:08:07 -08:00
|
|
|
print(u"{0}: Already has {1} tags. Not overwriting.".format(filename, dst_style_name))
|
2015-02-12 14:57:46 -08:00
|
|
|
return
|
|
|
|
if opts.copy_source == opts.data_style:
|
2015-02-13 15:08:07 -08:00
|
|
|
print(u"{0}: Destination and source are same: {1}. Nothing to do.".format(filename, dst_style_name))
|
2015-02-12 14:57:46 -08:00
|
|
|
return
|
|
|
|
|
|
|
|
src_style_name = MetaDataStyle.name[ opts.copy_source ]
|
|
|
|
if has[ opts.copy_source ]:
|
|
|
|
if not opts.dryrun:
|
2015-02-13 15:08:07 -08:00
|
|
|
md = ca.readMetadata(opts.copy_source)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
if settings.apply_cbl_transform_on_bulk_operation and opts.data_style == MetaDataStyle.CBI:
|
2015-02-13 15:08:07 -08:00
|
|
|
md = CBLTransformer(md, settings).apply()
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
if not ca.writeMetadata(md, opts.data_style):
|
|
|
|
print(u"{0}: Tag copy seemed to fail!".format(filename))
|
2015-02-12 14:57:46 -08:00
|
|
|
else:
|
2015-02-13 15:08:07 -08:00
|
|
|
print(u"{0}: Copied {1} tags to {2} .".format(filename, src_style_name, dst_style_name))
|
2015-02-12 14:57:46 -08:00
|
|
|
else:
|
2015-02-13 15:08:07 -08:00
|
|
|
print(u"{0}: dry-run. {1} tags not copied".format(filename, src_style_name))
|
2015-02-12 14:57:46 -08:00
|
|
|
else:
|
2015-02-13 15:08:07 -08:00
|
|
|
print(u"{0}: This archive doesn't have {1} tags to copy.".format(filename, src_style_name))
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
|
|
|
|
elif opts.save_tags:
|
|
|
|
|
|
|
|
if opts.no_overwrite and has[ opts.data_style ]:
|
2015-02-13 15:08:07 -08:00
|
|
|
print(u"{0}: Already has {1} tags. Not overwriting.".format(filename, MetaDataStyle.name[opts.data_style]))
|
2015-02-12 14:57:46 -08:00
|
|
|
return
|
|
|
|
|
|
|
|
if batch_mode:
|
2015-02-13 15:08:07 -08:00
|
|
|
print(u"Processing {0}...".format(filename))
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
md = create_local_metadata(opts, ca, has[ opts.data_style ])
|
2015-02-12 14:57:46 -08:00
|
|
|
if md.issue is None or md.issue == "":
|
|
|
|
if opts.assume_issue_is_one_if_not_set:
|
|
|
|
md.issue = "1"
|
|
|
|
|
|
|
|
# now, search online
|
|
|
|
if opts.search_online:
|
|
|
|
if opts.issue_id is not None:
|
|
|
|
# we were given the actual ID to search with
|
|
|
|
try:
|
|
|
|
comicVine = ComicVineTalker()
|
|
|
|
comicVine.wait_for_rate_limit = opts.wait_and_retry_on_rate_limit
|
2015-02-13 15:08:07 -08:00
|
|
|
cv_md = comicVine.fetchIssueDataByIssueID(opts.issue_id, settings)
|
2015-02-12 14:57:46 -08:00
|
|
|
except ComicVineTalkerException:
|
|
|
|
print >> sys.stderr,"Network error while getting issue details. Save aborted"
|
|
|
|
match_results.fetchDataFailures.append(filename)
|
|
|
|
return
|
|
|
|
|
|
|
|
if cv_md is None:
|
|
|
|
print >> sys.stderr,"No match for ID {0} was found.".format(opts.issue_id)
|
|
|
|
match_results.noMatches.append(filename)
|
|
|
|
return
|
|
|
|
|
|
|
|
if settings.apply_cbl_transform_on_cv_import:
|
2015-02-13 15:08:07 -08:00
|
|
|
cv_md = CBLTransformer(cv_md, settings).apply()
|
2015-02-12 14:57:46 -08:00
|
|
|
else:
|
2015-02-13 15:08:07 -08:00
|
|
|
ii = IssueIdentifier(ca, settings)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
if md is None or md.isEmpty:
|
|
|
|
print >> sys.stderr,"No metadata given to search online with!"
|
|
|
|
match_results.noMatches.append(filename)
|
|
|
|
return
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
def myoutput(text):
|
2015-02-12 14:57:46 -08:00
|
|
|
if opts.verbose:
|
2015-02-13 15:08:07 -08:00
|
|
|
IssueIdentifier.defaultWriteOutput(text)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
# use our overlayed MD struct to search
|
2015-02-13 15:08:07 -08:00
|
|
|
ii.setAdditionalMetadata(md)
|
2015-02-12 14:57:46 -08:00
|
|
|
ii.onlyUseAdditionalMetaData = True
|
|
|
|
ii.waitAndRetryOnRateLimit = opts.wait_and_retry_on_rate_limit
|
2015-02-13 15:08:07 -08:00
|
|
|
ii.setOutputFunction(myoutput)
|
2015-02-12 14:57:46 -08:00
|
|
|
ii.cover_page_index = md.getCoverPageIndexList()[0]
|
|
|
|
matches = ii.search()
|
|
|
|
|
|
|
|
result = ii.search_result
|
|
|
|
|
|
|
|
found_match = False
|
|
|
|
choices = False
|
|
|
|
low_confidence = False
|
|
|
|
|
|
|
|
if result == ii.ResultNoMatches:
|
|
|
|
pass
|
|
|
|
elif result == ii.ResultFoundMatchButBadCoverScore:
|
|
|
|
low_confidence = True
|
|
|
|
found_match = True
|
|
|
|
elif result == ii.ResultFoundMatchButNotFirstPage :
|
|
|
|
found_match = True
|
|
|
|
elif result == ii.ResultMultipleMatchesWithBadImageScores:
|
|
|
|
low_confidence = True
|
|
|
|
choices = True
|
|
|
|
elif result == ii.ResultOneGoodMatch:
|
|
|
|
found_match = True
|
|
|
|
elif result == ii.ResultMultipleGoodMatches:
|
|
|
|
choices = True
|
|
|
|
|
|
|
|
if choices:
|
|
|
|
if low_confidence:
|
|
|
|
print >> sys.stderr,"Online search: Multiple low confidence matches. Save aborted"
|
|
|
|
match_results.lowConfidenceMatches.append(MultipleMatch(filename,matches))
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
print >> sys.stderr,"Online search: Multiple good matches. Save aborted"
|
|
|
|
match_results.multipleMatches.append(MultipleMatch(filename,matches))
|
|
|
|
return
|
|
|
|
if low_confidence and opts.abortOnLowConfidence:
|
|
|
|
print >> sys.stderr,"Online search: Low confidence match. Save aborted"
|
|
|
|
match_results.lowConfidenceMatches.append(MultipleMatch(filename,matches))
|
|
|
|
return
|
|
|
|
if not found_match:
|
|
|
|
print >> sys.stderr,"Online search: No match found. Save aborted"
|
|
|
|
match_results.noMatches.append(filename)
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
|
|
# we got here, so we have a single match
|
|
|
|
|
|
|
|
# now get the particular issue data
|
|
|
|
cv_md = actual_issue_data_fetch(matches[0], settings, opts)
|
|
|
|
if cv_md is None:
|
|
|
|
match_results.fetchDataFailures.append(filename)
|
|
|
|
return
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
md.overlay(cv_md)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
# ok, done building our metadata. time to save
|
2015-02-13 15:08:07 -08:00
|
|
|
if not actual_metadata_save(ca, opts, md):
|
2015-02-12 14:57:46 -08:00
|
|
|
match_results.writeFailures.append(filename)
|
|
|
|
else:
|
|
|
|
match_results.goodMatches.append(filename)
|
|
|
|
|
|
|
|
elif opts.rename_file:
|
|
|
|
|
|
|
|
msg_hdr = ""
|
|
|
|
if batch_mode:
|
|
|
|
msg_hdr = u"{0}: ".format(filename)
|
|
|
|
|
|
|
|
if opts.data_style is not None:
|
|
|
|
use_tags = has[ opts.data_style ]
|
|
|
|
else:
|
|
|
|
use_tags = False
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
md = create_local_metadata(opts, ca, use_tags)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
if md.series is None:
|
|
|
|
print >> sys.stderr, msg_hdr + "Can't rename without series name"
|
|
|
|
return
|
|
|
|
|
|
|
|
new_ext = None # default
|
|
|
|
if settings.rename_extension_based_on_archive:
|
|
|
|
if ca.isZip():
|
|
|
|
new_ext = ".cbz"
|
|
|
|
elif ca.isRar():
|
|
|
|
new_ext = ".cbr"
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
renamer = FileRenamer(md)
|
|
|
|
renamer.setTemplate(settings.rename_template)
|
|
|
|
renamer.setIssueZeroPadding(settings.rename_issue_number_padding)
|
|
|
|
renamer.setSmartCleanup(settings.rename_use_smart_string_cleanup)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
new_name = renamer.determineName(filename, ext=new_ext)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
if new_name == os.path.basename(filename):
|
|
|
|
print >> sys.stderr, msg_hdr + "Filename is already good!"
|
|
|
|
return
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
folder = os.path.dirname(os.path.abspath(filename))
|
|
|
|
new_abs_path = utils.unique_file(os.path.join(folder, new_name))
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
suffix = ""
|
|
|
|
if not opts.dryrun:
|
|
|
|
# rename the file
|
2015-02-13 15:08:07 -08:00
|
|
|
os.rename(filename, new_abs_path)
|
2015-02-12 14:57:46 -08:00
|
|
|
else:
|
|
|
|
suffix = " (dry-run, no change)"
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
print(u"renamed '{0}' -> '{1}' {2}".format(os.path.basename(filename), new_name, suffix))
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
elif opts.export_to_zip:
|
|
|
|
msg_hdr = ""
|
|
|
|
if batch_mode:
|
|
|
|
msg_hdr = u"{0}: ".format(filename)
|
|
|
|
|
|
|
|
if not ca.isRar():
|
|
|
|
print >> sys.stderr, msg_hdr + "Archive is not a RAR."
|
|
|
|
return
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
rar_file = os.path.abspath(os.path.abspath(filename))
|
2015-02-12 14:57:46 -08:00
|
|
|
new_file = os.path.splitext(rar_file)[0] + ".cbz"
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
if opts.abort_export_on_conflict and os.path.lexists(new_file):
|
2015-02-12 14:57:46 -08:00
|
|
|
print msg_hdr + "{0} already exists in the that folder.".format(os.path.split(new_file)[1])
|
|
|
|
return
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
new_file = utils.unique_file(os.path.join(new_file))
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
delete_success = False
|
|
|
|
export_success = False
|
|
|
|
if not opts.dryrun:
|
2015-02-13 15:08:07 -08:00
|
|
|
if ca.exportAsZip(new_file):
|
2015-02-12 14:57:46 -08:00
|
|
|
export_success = True
|
|
|
|
if opts.delete_rar_after_export:
|
|
|
|
try:
|
2015-02-13 15:08:07 -08:00
|
|
|
os.unlink(rar_file)
|
2015-02-12 14:57:46 -08:00
|
|
|
except:
|
|
|
|
print >> sys.stderr, msg_hdr + "Error deleting original RAR after export"
|
|
|
|
delete_success = False
|
|
|
|
else:
|
|
|
|
delete_success = True
|
|
|
|
else:
|
|
|
|
# last export failed, so remove the zip, if it exists
|
2015-02-13 15:08:07 -08:00
|
|
|
if os.path.lexists(new_file):
|
|
|
|
os.remove(new_file)
|
2015-02-12 14:57:46 -08:00
|
|
|
else:
|
|
|
|
msg = msg_hdr + u"Dry-run: Would try to create {0}".format(os.path.split(new_file)[1])
|
|
|
|
if opts.delete_rar_after_export:
|
|
|
|
msg += u" and delete orginal."
|
2015-02-13 15:08:07 -08:00
|
|
|
print(msg)
|
2015-02-12 14:57:46 -08:00
|
|
|
return
|
|
|
|
|
|
|
|
msg = msg_hdr
|
|
|
|
if export_success:
|
2015-02-13 15:08:07 -08:00
|
|
|
msg += u"Archive exported successfully to: {0}".format(os.path.split(new_file)[1])
|
2015-02-12 14:57:46 -08:00
|
|
|
if opts.delete_rar_after_export and delete_success:
|
|
|
|
msg += u" (Original deleted) "
|
|
|
|
else:
|
|
|
|
msg += u"Archive failed to export!"
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
print(msg)
|