2015-02-21 18:30:32 -08:00
|
|
|
"""A python class to manage caching of data from Comic Vine"""
|
2012-11-08 20:02:14 -08:00
|
|
|
|
2015-02-21 18:30:32 -08:00
|
|
|
# Copyright 2012-2014 Anthony Beville
|
2012-11-08 20:02:14 -08:00
|
|
|
|
2015-02-21 18:30:32 -08:00
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
2012-11-08 20:02:14 -08:00
|
|
|
|
2015-02-21 18:30:32 -08:00
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
2012-11-08 20:02:14 -08:00
|
|
|
|
2015-02-21 18:30:32 -08:00
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2012-11-08 20:02:14 -08:00
|
|
|
|
|
|
|
import sqlite3 as lite
|
|
|
|
import os
|
|
|
|
import datetime
|
2015-02-21 18:30:32 -08:00
|
|
|
#import sys
|
|
|
|
#from pprint import pprint
|
2012-11-08 20:02:14 -08:00
|
|
|
|
2012-12-30 15:32:37 -08:00
|
|
|
import ctversion
|
2012-11-20 00:57:12 -08:00
|
|
|
from settings import ComicTaggerSettings
|
2013-02-02 10:41:06 -08:00
|
|
|
import utils
|
2012-11-20 00:57:12 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
|
2012-11-08 20:02:14 -08:00
|
|
|
class ComicVineCacher:
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
def __init__(self):
|
2015-02-12 14:57:46 -08:00
|
|
|
self.settings_folder = ComicTaggerSettings.getSettingsFolder()
|
2015-02-13 15:08:07 -08:00
|
|
|
self.db_file = os.path.join(self.settings_folder, "cv_cache.db")
|
2015-02-15 02:44:00 -08:00
|
|
|
self.version_file = os.path.join(
|
|
|
|
self.settings_folder, "cache_version.txt")
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-15 02:44:00 -08:00
|
|
|
# verify that cache is from same version as this one
|
2015-02-12 14:57:46 -08:00
|
|
|
data = ""
|
|
|
|
try:
|
2015-02-13 15:08:07 -08:00
|
|
|
with open(self.version_file, 'rb') as f:
|
2015-02-12 14:57:46 -08:00
|
|
|
data = f.read()
|
|
|
|
f.close()
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
if data != ctversion.version:
|
|
|
|
self.clearCache()
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
if not os.path.exists(self.db_file):
|
2015-02-12 14:57:46 -08:00
|
|
|
self.create_cache_db()
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
def clearCache(self):
|
2015-02-12 14:57:46 -08:00
|
|
|
try:
|
2015-02-13 15:08:07 -08:00
|
|
|
os.unlink(self.db_file)
|
2015-02-12 14:57:46 -08:00
|
|
|
except:
|
|
|
|
pass
|
|
|
|
try:
|
2015-02-13 15:08:07 -08:00
|
|
|
os.unlink(self.version_file)
|
2015-02-12 14:57:46 -08:00
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
def create_cache_db(self):
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-15 02:44:00 -08:00
|
|
|
# create the version file
|
2015-02-13 15:08:07 -08:00
|
|
|
with open(self.version_file, 'w') as f:
|
|
|
|
f.write(ctversion.version)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
# this will wipe out any existing version
|
2015-02-13 15:08:07 -08:00
|
|
|
open(self.db_file, 'w').close()
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
con = lite.connect(self.db_file)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
# create tables
|
|
|
|
with con:
|
|
|
|
|
|
|
|
cur = con.cursor()
|
2015-02-15 02:44:00 -08:00
|
|
|
# name,id,start_year,publisher,image,description,count_of_issues
|
2015-02-15 03:55:04 -08:00
|
|
|
cur.execute(
|
|
|
|
"CREATE TABLE VolumeSearchCache(" +
|
|
|
|
"search_term TEXT," +
|
|
|
|
"id INT," +
|
|
|
|
"name TEXT," +
|
|
|
|
"start_year INT," +
|
|
|
|
"publisher TEXT," +
|
|
|
|
"count_of_issues INT," +
|
|
|
|
"image_url TEXT," +
|
|
|
|
"description TEXT," +
|
|
|
|
"timestamp DATE DEFAULT (datetime('now','localtime'))) ")
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-15 03:55:04 -08:00
|
|
|
cur.execute(
|
|
|
|
"CREATE TABLE Volumes(" +
|
|
|
|
"id INT," +
|
|
|
|
"name TEXT," +
|
|
|
|
"publisher TEXT," +
|
|
|
|
"count_of_issues INT," +
|
|
|
|
"start_year INT," +
|
|
|
|
"timestamp DATE DEFAULT (datetime('now','localtime')), " +
|
|
|
|
"PRIMARY KEY (id))")
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-15 03:55:04 -08:00
|
|
|
cur.execute(
|
|
|
|
"CREATE TABLE AltCovers(" +
|
|
|
|
"issue_id INT," +
|
|
|
|
"url_list TEXT," +
|
|
|
|
"timestamp DATE DEFAULT (datetime('now','localtime')), " +
|
|
|
|
"PRIMARY KEY (issue_id))")
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-15 03:55:04 -08:00
|
|
|
cur.execute(
|
|
|
|
"CREATE TABLE Issues(" +
|
|
|
|
"id INT," +
|
|
|
|
"volume_id INT," +
|
|
|
|
"name TEXT," +
|
|
|
|
"issue_number TEXT," +
|
|
|
|
"super_url TEXT," +
|
|
|
|
"thumb_url TEXT," +
|
|
|
|
"cover_date TEXT," +
|
|
|
|
"site_detail_url TEXT," +
|
|
|
|
"description TEXT," +
|
|
|
|
"timestamp DATE DEFAULT (datetime('now','localtime')), " +
|
|
|
|
"PRIMARY KEY (id))")
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
def add_search_results(self, search_term, cv_search_results):
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
con = lite.connect(self.db_file)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
with con:
|
|
|
|
con.text_factory = unicode
|
|
|
|
cur = con.cursor()
|
|
|
|
|
|
|
|
# remove all previous entries with this search term
|
2015-02-15 02:44:00 -08:00
|
|
|
cur.execute(
|
2015-02-15 03:55:04 -08:00
|
|
|
"DELETE FROM VolumeSearchCache WHERE search_term = ?", [
|
|
|
|
search_term.lower()])
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
# now add in new results
|
|
|
|
for record in cv_search_results:
|
|
|
|
timestamp = datetime.datetime.now()
|
|
|
|
|
|
|
|
if record['publisher'] is None:
|
|
|
|
pub_name = ""
|
|
|
|
else:
|
|
|
|
pub_name = record['publisher']['name']
|
|
|
|
|
|
|
|
if record['image'] is None:
|
|
|
|
url = ""
|
|
|
|
else:
|
|
|
|
url = record['image']['super_url']
|
|
|
|
|
2015-02-15 03:55:04 -08:00
|
|
|
cur.execute(
|
|
|
|
"INSERT INTO VolumeSearchCache " +
|
|
|
|
"(search_term, id, name, start_year, publisher, count_of_issues, image_url, description) " +
|
|
|
|
"VALUES(?, ?, ?, ?, ?, ?, ?, ?)",
|
|
|
|
(search_term.lower(),
|
|
|
|
record['id'],
|
|
|
|
record['name'],
|
|
|
|
record['start_year'],
|
|
|
|
pub_name,
|
|
|
|
record['count_of_issues'],
|
|
|
|
url,
|
|
|
|
record['description']))
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
def get_search_results(self, search_term):
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
results = list()
|
2015-02-13 15:08:07 -08:00
|
|
|
con = lite.connect(self.db_file)
|
2015-02-12 14:57:46 -08:00
|
|
|
with con:
|
|
|
|
con.text_factory = unicode
|
|
|
|
cur = con.cursor()
|
|
|
|
|
|
|
|
# purge stale search results
|
2015-02-15 02:44:00 -08:00
|
|
|
a_day_ago = datetime.datetime.today() - datetime.timedelta(days=1)
|
|
|
|
cur.execute(
|
2015-02-15 03:55:04 -08:00
|
|
|
"DELETE FROM VolumeSearchCache WHERE timestamp < ?", [
|
|
|
|
str(a_day_ago)])
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
# fetch
|
2015-02-15 02:44:00 -08:00
|
|
|
cur.execute(
|
|
|
|
"SELECT * FROM VolumeSearchCache WHERE search_term=?", [search_term.lower()])
|
2015-02-12 14:57:46 -08:00
|
|
|
rows = cur.fetchall()
|
|
|
|
# now process the results
|
|
|
|
for record in rows:
|
|
|
|
|
|
|
|
result = dict()
|
|
|
|
result['id'] = record[1]
|
|
|
|
result['name'] = record[2]
|
|
|
|
result['start_year'] = record[3]
|
|
|
|
result['publisher'] = dict()
|
|
|
|
result['publisher']['name'] = record[4]
|
|
|
|
result['count_of_issues'] = record[5]
|
|
|
|
result['image'] = dict()
|
|
|
|
result['image']['super_url'] = record[6]
|
|
|
|
result['description'] = record[7]
|
|
|
|
|
|
|
|
results.append(result)
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
def add_alt_covers(self, issue_id, url_list):
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
con = lite.connect(self.db_file)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
with con:
|
|
|
|
con.text_factory = unicode
|
|
|
|
cur = con.cursor()
|
|
|
|
|
|
|
|
# remove all previous entries with this search term
|
2015-02-15 02:44:00 -08:00
|
|
|
cur.execute("DELETE FROM AltCovers WHERE issue_id = ?", [issue_id])
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
url_list_str = utils.listToString(url_list)
|
|
|
|
# now add in new record
|
|
|
|
cur.execute("INSERT INTO AltCovers " +
|
2015-02-13 15:08:07 -08:00
|
|
|
"(issue_id, url_list) " +
|
2015-02-15 02:44:00 -08:00
|
|
|
"VALUES(?, ?)",
|
|
|
|
(issue_id,
|
|
|
|
url_list_str)
|
|
|
|
)
|
2012-11-08 20:02:14 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
def get_alt_covers(self, issue_id):
|
2012-11-08 20:02:14 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
con = lite.connect(self.db_file)
|
2015-02-12 14:57:46 -08:00
|
|
|
with con:
|
|
|
|
cur = con.cursor()
|
|
|
|
con.text_factory = unicode
|
2012-11-08 20:02:14 -08:00
|
|
|
|
2015-02-15 02:44:00 -08:00
|
|
|
# purge stale issue info - probably issue data won't change
|
|
|
|
# much....
|
|
|
|
a_month_ago = datetime.datetime.today() - \
|
|
|
|
datetime.timedelta(days=30)
|
|
|
|
cur.execute(
|
2015-02-15 03:55:04 -08:00
|
|
|
"DELETE FROM AltCovers WHERE timestamp < ?", [
|
|
|
|
str(a_month_ago)])
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-15 02:44:00 -08:00
|
|
|
cur.execute(
|
|
|
|
"SELECT url_list FROM AltCovers WHERE issue_id=?", [issue_id])
|
2015-02-12 14:57:46 -08:00
|
|
|
row = cur.fetchone()
|
2015-02-15 02:44:00 -08:00
|
|
|
if row is None:
|
2015-02-12 14:57:46 -08:00
|
|
|
return None
|
|
|
|
else:
|
|
|
|
url_list_str = row[0]
|
|
|
|
if len(url_list_str) == 0:
|
|
|
|
return []
|
|
|
|
raw_list = url_list_str.split(",")
|
|
|
|
url_list = []
|
|
|
|
for item in raw_list:
|
2015-02-13 15:08:07 -08:00
|
|
|
url_list.append(str(item).strip())
|
2015-02-12 14:57:46 -08:00
|
|
|
return url_list
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
def add_volume_info(self, cv_volume_record):
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
con = lite.connect(self.db_file)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
with con:
|
|
|
|
|
|
|
|
cur = con.cursor()
|
|
|
|
|
|
|
|
timestamp = datetime.datetime.now()
|
|
|
|
|
|
|
|
if cv_volume_record['publisher'] is None:
|
|
|
|
pub_name = ""
|
|
|
|
else:
|
|
|
|
pub_name = cv_volume_record['publisher']['name']
|
|
|
|
|
|
|
|
data = {
|
2015-02-15 03:44:09 -08:00
|
|
|
"name": cv_volume_record['name'],
|
|
|
|
"publisher": pub_name,
|
2015-02-15 02:44:00 -08:00
|
|
|
"count_of_issues": cv_volume_record['count_of_issues'],
|
2015-02-15 03:44:09 -08:00
|
|
|
"start_year": cv_volume_record['start_year'],
|
|
|
|
"timestamp": timestamp
|
2015-02-15 02:44:00 -08:00
|
|
|
}
|
2015-02-13 15:08:07 -08:00
|
|
|
self.upsert(cur, "volumes", "id", cv_volume_record['id'], data)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
def add_volume_issues_info(self, volume_id, cv_volume_issues):
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
con = lite.connect(self.db_file)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
with con:
|
|
|
|
|
|
|
|
cur = con.cursor()
|
|
|
|
|
|
|
|
timestamp = datetime.datetime.now()
|
|
|
|
|
|
|
|
# add in issues
|
|
|
|
|
|
|
|
for issue in cv_volume_issues:
|
|
|
|
|
|
|
|
data = {
|
2015-02-15 03:44:09 -08:00
|
|
|
"volume_id": volume_id,
|
|
|
|
"name": issue['name'],
|
|
|
|
"issue_number": issue['issue_number'],
|
2015-02-15 02:44:00 -08:00
|
|
|
"site_detail_url": issue['site_detail_url'],
|
2015-02-15 03:44:09 -08:00
|
|
|
"cover_date": issue['cover_date'],
|
|
|
|
"super_url": issue['image']['super_url'],
|
|
|
|
"thumb_url": issue['image']['thumb_url'],
|
|
|
|
"description": issue['description'],
|
|
|
|
"timestamp": timestamp
|
2015-02-15 02:44:00 -08:00
|
|
|
}
|
|
|
|
self.upsert(cur, "issues", "id", issue['id'], data)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
def get_volume_info(self, volume_id):
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
result = None
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
con = lite.connect(self.db_file)
|
2015-02-12 14:57:46 -08:00
|
|
|
with con:
|
|
|
|
cur = con.cursor()
|
|
|
|
con.text_factory = unicode
|
|
|
|
|
|
|
|
# purge stale volume info
|
2015-02-15 02:44:00 -08:00
|
|
|
a_week_ago = datetime.datetime.today() - datetime.timedelta(days=7)
|
|
|
|
cur.execute(
|
|
|
|
"DELETE FROM Volumes WHERE timestamp < ?", [str(a_week_ago)])
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
# fetch
|
2015-02-15 02:44:00 -08:00
|
|
|
cur.execute(
|
2015-02-15 03:55:04 -08:00
|
|
|
"SELECT id,name,publisher,count_of_issues,start_year FROM Volumes WHERE id = ?",
|
|
|
|
[volume_id])
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
row = cur.fetchone()
|
|
|
|
|
2015-02-15 02:44:00 -08:00
|
|
|
if row is None:
|
2015-02-12 14:57:46 -08:00
|
|
|
return result
|
|
|
|
|
|
|
|
result = dict()
|
|
|
|
|
2015-02-15 02:44:00 -08:00
|
|
|
# since ID is primary key, there is only one row
|
|
|
|
result['id'] = row[0]
|
|
|
|
result['name'] = row[1]
|
2015-02-12 14:57:46 -08:00
|
|
|
result['publisher'] = dict()
|
|
|
|
result['publisher']['name'] = row[2]
|
2015-02-15 02:44:00 -08:00
|
|
|
result['count_of_issues'] = row[3]
|
|
|
|
result['start_year'] = row[4]
|
2015-02-12 14:57:46 -08:00
|
|
|
result['issues'] = list()
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
def get_volume_issues_info(self, volume_id):
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
result = None
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
con = lite.connect(self.db_file)
|
2015-02-12 14:57:46 -08:00
|
|
|
with con:
|
|
|
|
cur = con.cursor()
|
|
|
|
con.text_factory = unicode
|
|
|
|
|
2015-02-15 02:44:00 -08:00
|
|
|
# purge stale issue info - probably issue data won't change
|
|
|
|
# much....
|
|
|
|
a_week_ago = datetime.datetime.today() - datetime.timedelta(days=7)
|
|
|
|
cur.execute(
|
|
|
|
"DELETE FROM Issues WHERE timestamp < ?", [str(a_week_ago)])
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
# fetch
|
|
|
|
results = list()
|
|
|
|
|
2015-02-15 02:44:00 -08:00
|
|
|
cur.execute(
|
2015-02-15 03:55:04 -08:00
|
|
|
"SELECT id,name,issue_number,site_detail_url,cover_date,super_url,thumb_url,description FROM Issues WHERE volume_id = ?",
|
|
|
|
[volume_id])
|
2015-02-12 14:57:46 -08:00
|
|
|
rows = cur.fetchall()
|
|
|
|
|
|
|
|
# now process the results
|
|
|
|
for row in rows:
|
|
|
|
record = dict()
|
|
|
|
|
2015-02-15 02:44:00 -08:00
|
|
|
record['id'] = row[0]
|
|
|
|
record['name'] = row[1]
|
|
|
|
record['issue_number'] = row[2]
|
|
|
|
record['site_detail_url'] = row[3]
|
|
|
|
record['cover_date'] = row[4]
|
2015-02-12 14:57:46 -08:00
|
|
|
record['image'] = dict()
|
2015-02-15 02:44:00 -08:00
|
|
|
record['image']['super_url'] = row[5]
|
|
|
|
record['image']['thumb_url'] = row[6]
|
|
|
|
record['description'] = row[7]
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
results.append(record)
|
|
|
|
|
|
|
|
if len(results) == 0:
|
|
|
|
return None
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
2015-02-15 03:44:09 -08:00
|
|
|
def add_issue_select_details(
|
2015-02-15 03:55:04 -08:00
|
|
|
self,
|
|
|
|
issue_id,
|
|
|
|
image_url,
|
|
|
|
thumb_image_url,
|
|
|
|
cover_date,
|
|
|
|
site_detail_url):
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
con = lite.connect(self.db_file)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
with con:
|
|
|
|
cur = con.cursor()
|
|
|
|
con.text_factory = unicode
|
|
|
|
timestamp = datetime.datetime.now()
|
|
|
|
|
|
|
|
data = {
|
2015-02-15 02:44:00 -08:00
|
|
|
"super_url": image_url,
|
|
|
|
"thumb_url": thumb_image_url,
|
|
|
|
"cover_date": cover_date,
|
|
|
|
"site_detail_url": site_detail_url,
|
|
|
|
"timestamp": timestamp
|
|
|
|
}
|
|
|
|
self.upsert(cur, "issues", "id", issue_id, data)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
def get_issue_select_details(self, issue_id):
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
con = lite.connect(self.db_file)
|
2015-02-12 14:57:46 -08:00
|
|
|
with con:
|
|
|
|
cur = con.cursor()
|
|
|
|
con.text_factory = unicode
|
|
|
|
|
2015-02-15 02:44:00 -08:00
|
|
|
cur.execute(
|
2015-02-15 03:55:04 -08:00
|
|
|
"SELECT super_url,thumb_url,cover_date,site_detail_url FROM Issues WHERE id=?",
|
|
|
|
[issue_id])
|
2015-02-12 14:57:46 -08:00
|
|
|
row = cur.fetchone()
|
|
|
|
|
|
|
|
details = dict()
|
2015-02-15 02:44:00 -08:00
|
|
|
if row is None or row[0] is None:
|
2015-02-12 14:57:46 -08:00
|
|
|
details['image_url'] = None
|
|
|
|
details['thumb_image_url'] = None
|
|
|
|
details['cover_date'] = None
|
|
|
|
details['site_detail_url'] = None
|
|
|
|
|
|
|
|
else:
|
|
|
|
details['image_url'] = row[0]
|
|
|
|
details['thumb_image_url'] = row[1]
|
|
|
|
details['cover_date'] = row[2]
|
|
|
|
details['site_detail_url'] = row[3]
|
|
|
|
|
|
|
|
return details
|
|
|
|
|
2015-02-13 15:08:07 -08:00
|
|
|
def upsert(self, cur, tablename, pkname, pkval, data):
|
2015-02-21 18:30:32 -08:00
|
|
|
"""This does an insert if the given PK doesn't exist, and an
|
|
|
|
update it if does
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-21 18:30:32 -08:00
|
|
|
TODO: look into checking if UPDATE is needed
|
|
|
|
TODO: should the cursor be created here, and not up the stack?
|
|
|
|
"""
|
2015-02-12 14:57:46 -08:00
|
|
|
|
|
|
|
ins_count = len(data) + 1
|
|
|
|
|
|
|
|
keys = ""
|
|
|
|
vals = list()
|
|
|
|
ins_slots = ""
|
|
|
|
set_slots = ""
|
|
|
|
|
|
|
|
for key in data:
|
|
|
|
|
2015-02-15 02:44:00 -08:00
|
|
|
if keys != "":
|
2015-02-12 14:57:46 -08:00
|
|
|
keys += ", "
|
2015-02-15 02:44:00 -08:00
|
|
|
if ins_slots != "":
|
2015-02-12 14:57:46 -08:00
|
|
|
ins_slots += ", "
|
2015-02-15 02:44:00 -08:00
|
|
|
if set_slots != "":
|
2015-02-12 14:57:46 -08:00
|
|
|
set_slots += ", "
|
|
|
|
|
|
|
|
keys += key
|
2015-02-13 15:08:07 -08:00
|
|
|
vals.append(data[key])
|
2015-02-12 14:57:46 -08:00
|
|
|
ins_slots += "?"
|
|
|
|
set_slots += key + " = ?"
|
|
|
|
|
|
|
|
keys += ", " + pkname
|
2015-02-13 15:08:07 -08:00
|
|
|
vals.append(pkval)
|
2015-02-12 14:57:46 -08:00
|
|
|
ins_slots += ", ?"
|
|
|
|
condition = pkname + " = ?"
|
|
|
|
|
2015-02-15 02:44:00 -08:00
|
|
|
sql_ins = ("INSERT OR IGNORE INTO " + tablename +
|
|
|
|
" (" + keys + ") " +
|
|
|
|
" VALUES (" + ins_slots + ")")
|
|
|
|
cur.execute(sql_ins, vals)
|
2015-02-12 14:57:46 -08:00
|
|
|
|
2015-02-15 02:44:00 -08:00
|
|
|
sql_upd = ("UPDATE " + tablename +
|
|
|
|
" SET " + set_slots + " WHERE " + condition)
|
|
|
|
cur.execute(sql_upd, vals)
|