Database backend: Make the database backend more robust when the calibre library is placed on flaky filesystems. Now when an I/O error occurs while querying the database, the connection to the database is closed and re-opened and the query is retried. See #1286522 (Errors accessing SD card after suspend)

This commit is contained in:
Kovid Goyal 2014-03-02 10:11:24 +05:30
parent 64d48f778c
commit 281b416406
5 changed files with 169 additions and 139 deletions

View File

@ -102,14 +102,13 @@ class DBPrefs(dict): # {{{
def __delitem__(self, key):
dict.__delitem__(self, key)
self.db.conn.execute('DELETE FROM preferences WHERE key=?', (key,))
self.db.execute('DELETE FROM preferences WHERE key=?', (key,))
def __setitem__(self, key, val):
if self.disable_setting:
return
raw = self.to_raw(val)
self.db.conn.execute('INSERT OR REPLACE INTO preferences (key,val) VALUES (?,?)', (key,
raw))
self.db.execute('INSERT OR REPLACE INTO preferences (key,val) VALUES (?,?)', (key, raw))
dict.__setitem__(self, key, val)
def set(self, key, val):
@ -348,13 +347,13 @@ class DB(object):
os.makedirs(self.library_path)
self.is_case_sensitive = is_case_sensitive(self.library_path)
SchemaUpgrade(self.conn, self.library_path, self.field_metadata)
SchemaUpgrade(self, self.library_path, self.field_metadata)
# Guarantee that the library_id is set
self.library_id
# Fix legacy triggers and columns
self.conn.execute('''
self.execute('''
DROP TRIGGER IF EXISTS author_insert_trg;
CREATE TEMP TRIGGER author_insert_trg
AFTER INSERT ON authors
@ -527,7 +526,7 @@ class DB(object):
'SELECT id FROM custom_columns WHERE mark_for_delete=1'):
num = record[0]
table, lt = self.custom_table_names(num)
self.conn.execute('''\
self.execute('''\
DROP INDEX IF EXISTS {table}_idx;
DROP INDEX IF EXISTS {lt}_aidx;
DROP INDEX IF EXISTS {lt}_bidx;
@ -544,7 +543,7 @@ class DB(object):
'''.format(table=table, lt=lt)
)
self.prefs.set('update_all_last_mod_dates_on_start', True)
self.conn.execute('DELETE FROM custom_columns WHERE mark_for_delete=1')
self.execute('DELETE FROM custom_columns WHERE mark_for_delete=1')
# Load metadata for custom columns
self.custom_column_label_map, self.custom_column_num_map = {}, {}
@ -600,12 +599,12 @@ class DB(object):
for data in remove:
prints('WARNING: Custom column %r not found, removing.' %
data['label'])
self.conn.execute('DELETE FROM custom_columns WHERE id=?',
self.execute('DELETE FROM custom_columns WHERE id=?',
(data['num'],))
if triggers:
with self.conn:
self.conn.execute('''\
self.execute('''\
CREATE TEMP TRIGGER custom_books_delete_trg
AFTER DELETE ON books
BEGIN
@ -787,6 +786,38 @@ class DB(object):
self._conn = Connection(self.dbpath)
return self._conn
def execute(self, sql, bindings=None):
try:
return self.conn.cursor().execute(sql, bindings)
except apsw.IOError:
# This can happen if the computer was suspended see for example:
# https://bugs.launchpad.net/bugs/1286522. Try to reopen the db
self.db.reopen(force=True)
return self.conn.cursor().execute(sql, bindings)
def executemany(self, sql, sequence_of_bindings):
try:
with self.conn: # Disable autocommit mode, for performance
return self.conn.cursor().executemany(sql, sequence_of_bindings)
except apsw.IOError:
# This can happen if the computer was suspended see for example:
# https://bugs.launchpad.net/bugs/1286522. Try to reopen the db
self.db.reopen(force=True)
with self.conn: # Disable autocommit mode, for performance
return self.conn.cursor().executemany(sql, sequence_of_bindings)
def get(self, *args, **kw):
ans = self.execute(*args)
if kw.get('all', True):
return ans.fetchall()
try:
return ans.next()[0]
except (StopIteration, IndexError):
return None
def last_insert_rowid(self):
return self.conn.last_insert_rowid()
def custom_field_name(self, label=None, num=None):
if label is not None:
return self.field_metadata.custom_field_prefix + label
@ -800,17 +831,17 @@ class DB(object):
def set_custom_column_metadata(self, num, name=None, label=None, is_editable=None, display=None):
changed = False
if name is not None:
self.conn.execute('UPDATE custom_columns SET name=? WHERE id=?', (name, num))
self.execute('UPDATE custom_columns SET name=? WHERE id=?', (name, num))
changed = True
if label is not None:
self.conn.execute('UPDATE custom_columns SET label=? WHERE id=?', (label, num))
self.execute('UPDATE custom_columns SET label=? WHERE id=?', (label, num))
changed = True
if is_editable is not None:
self.conn.execute('UPDATE custom_columns SET editable=? WHERE id=?', (bool(is_editable), num))
self.execute('UPDATE custom_columns SET editable=? WHERE id=?', (bool(is_editable), num))
self.custom_column_num_map[num]['is_editable'] = bool(is_editable)
changed = True
if display is not None:
self.conn.execute('UPDATE custom_columns SET display=? WHERE id=?', (json.dumps(display), num))
self.execute('UPDATE custom_columns SET display=? WHERE id=?', (json.dumps(display), num))
changed = True
# Note: the caller is responsible for scheduling a metadata backup if necessary
return changed
@ -826,7 +857,7 @@ class DB(object):
normalized = datatype not in ('datetime', 'comments', 'int', 'bool',
'float', 'composite')
is_multiple = is_multiple and datatype in ('text', 'composite')
self.conn.execute(
self.execute(
('INSERT INTO '
'custom_columns(label,name,datatype,is_multiple,editable,display,normalized)'
'VALUES (?,?,?,?,?,?,?)'),
@ -968,22 +999,22 @@ class DB(object):
'''.format(table=table),
]
script = ' \n'.join(lines)
self.conn.execute(script)
self.execute(script)
self.prefs.set('update_all_last_mod_dates_on_start', True)
return num
# }}}
def delete_custom_column(self, label=None, num=None):
data = self.custom_field_metadata(label, num)
self.conn.execute('UPDATE custom_columns SET mark_for_delete=1 WHERE id=?', (data['num'],))
self.execute('UPDATE custom_columns SET mark_for_delete=1 WHERE id=?', (data['num'],))
def close(self):
def close(self, force=False):
if getattr(self, '_conn', None) is not None:
self._conn.close()
self._conn.close(force)
del self._conn
def reopen(self):
self.close()
def reopen(self, force=False):
self.close(force)
self._conn = None
self.conn
@ -1019,7 +1050,7 @@ class DB(object):
self.reopen()
def vacuum(self):
self.conn.execute('VACUUM')
self.execute('VACUUM')
@dynamic_property
def user_version(self):
@ -1029,7 +1060,7 @@ class DB(object):
return self.conn.get('pragma user_version;', all=False)
def fset(self, val):
self.conn.execute('pragma user_version=%d'%int(val))
self.execute('pragma user_version=%d'%int(val))
return property(doc=doc, fget=fget, fset=fset)
@ -1133,7 +1164,7 @@ class DB(object):
def fset(self, val):
self._library_id_ = unicode(val)
self.conn.execute('''
self.execute('''
DELETE FROM library_id;
INSERT INTO library_id (uuid) VALUES (?);
''', (self._library_id_,))
@ -1496,7 +1527,7 @@ class DB(object):
return f.read()
def remove_books(self, path_map, permanent=False):
self.conn.executemany(
self.executemany(
'DELETE FROM books WHERE id=?', [(x,) for x in path_map])
paths = {os.path.join(self.library_path, x) for x in path_map.itervalues() if x}
paths = {x for x in paths if os.path.exists(x) and self.is_deletable(x)}
@ -1513,8 +1544,8 @@ class DB(object):
def add_custom_data(self, name, val_map, delete_first):
if delete_first:
self.conn.execute('DELETE FROM books_plugin_data WHERE name=?', (name, ))
self.conn.executemany(
self.execute('DELETE FROM books_plugin_data WHERE name=?', (name, ))
self.executemany(
'INSERT OR REPLACE INTO books_plugin_data (book, name, val) VALUES (?, ?, ?)',
[(book_id, name, json.dumps(val, default=to_json))
for book_id, val in val_map.iteritems()])
@ -1530,11 +1561,11 @@ class DB(object):
if len(book_ids) == 1:
bid = next(iter(book_ids))
ans = {book_id:safe_load(val) for book_id, val in
self.conn.execute('SELECT book, val FROM books_plugin_data WHERE book=? AND name=?', (bid, name))}
self.execute('SELECT book, val FROM books_plugin_data WHERE book=? AND name=?', (bid, name))}
return ans or {bid:default}
ans = {}
for book_id, val in self.conn.execute(
for book_id, val in self.execute(
'SELECT book, val FROM books_plugin_data WHERE name=?', (name,)):
if not book_ids or book_id in book_ids:
val = safe_load(val)
@ -1543,13 +1574,13 @@ class DB(object):
def delete_custom_book_data(self, name, book_ids):
if book_ids:
self.conn.executemany('DELETE FROM books_plugin_data WHERE book=? AND name=?',
self.executemany('DELETE FROM books_plugin_data WHERE book=? AND name=?',
[(book_id, name) for book_id in book_ids])
else:
self.conn.execute('DELETE FROM books_plugin_data WHERE name=?', (name,))
self.execute('DELETE FROM books_plugin_data WHERE name=?', (name,))
def get_ids_for_custom_book_data(self, name):
return frozenset(r[0] for r in self.conn.execute('SELECT book FROM books_plugin_data WHERE name=?', (name,)))
return frozenset(r[0] for r in self.execute('SELECT book FROM books_plugin_data WHERE name=?', (name,)))
def conversion_options(self, book_id, fmt):
for (data,) in self.conn.get('SELECT data FROM conversion_options WHERE book=? AND format=?', (book_id, fmt.upper())):
@ -1558,20 +1589,20 @@ class DB(object):
def has_conversion_options(self, ids, fmt='PIPE'):
ids = frozenset(ids)
self.conn.execute('DROP TABLE IF EXISTS conversion_options_temp; CREATE TEMP TABLE conversion_options_temp (id INTEGER PRIMARY KEY);')
self.conn.executemany('INSERT INTO conversion_options_temp VALUES (?)', [(x,) for x in ids])
self.execute('DROP TABLE IF EXISTS conversion_options_temp; CREATE TEMP TABLE conversion_options_temp (id INTEGER PRIMARY KEY);')
self.executemany('INSERT INTO conversion_options_temp VALUES (?)', [(x,) for x in ids])
for (book_id,) in self.conn.get(
'SELECT book FROM conversion_options WHERE format=? AND book IN (SELECT id FROM conversion_options_temp)', (fmt.upper(),)):
return True
return False
def delete_conversion_options(self, book_ids, fmt):
self.conn.executemany('DELETE FROM conversion_options WHERE book=? AND format=?',
self.executemany('DELETE FROM conversion_options WHERE book=? AND format=?',
[(book_id, fmt.upper()) for book_id in book_ids])
def set_conversion_options(self, options, fmt):
options = [(book_id, fmt.upper(), buffer(cPickle.dumps(data, -1))) for book_id, data in options.iteritems()]
self.conn.executemany('INSERT OR REPLACE INTO conversion_options(book,format,data) VALUES (?,?,?)', options)
self.executemany('INSERT OR REPLACE INTO conversion_options(book,format,data) VALUES (?,?,?)', options)
def get_top_level_move_items(self, all_paths):
items = set(os.listdir(self.library_path))
@ -1627,9 +1658,9 @@ class DB(object):
pass
def restore_book(self, book_id, path, formats):
self.conn.execute('UPDATE books SET path=? WHERE id=?', (path.replace(os.sep, '/'), book_id))
self.execute('UPDATE books SET path=? WHERE id=?', (path.replace(os.sep, '/'), book_id))
vals = [(book_id, fmt, size, name) for fmt, size, name in formats]
self.conn.executemany('INSERT INTO data (book,format,uncompressed_size,name) VALUES (?,?,?,?)', vals)
self.executemany('INSERT INTO data (book,format,uncompressed_size,name) VALUES (?,?,?,?)', vals)
# }}}

View File

@ -160,7 +160,7 @@ class Cache(object):
self._search_api.change_locations(self.field_metadata.get_search_terms())
self.dirtied_cache = {x:i for i, (x,) in enumerate(
self.backend.conn.execute('SELECT book FROM metadata_dirtied'))}
self.backend.execute('SELECT book FROM metadata_dirtied'))}
if self.dirtied_cache:
self.dirtied_sequence = max(self.dirtied_cache.itervalues())+1
@ -923,7 +923,7 @@ class Cache(object):
self.dirtied_sequence = max(already_dirtied.itervalues()) + 1
self.dirtied_cache.update(already_dirtied)
if new_dirtied:
self.backend.conn.executemany('INSERT OR IGNORE INTO metadata_dirtied (book) VALUES (?)',
self.backend.executemany('INSERT OR IGNORE INTO metadata_dirtied (book) VALUES (?)',
((x,) for x in new_dirtied))
new_dirtied = {book_id:self.dirtied_sequence+i for i, book_id in enumerate(new_dirtied)}
self.dirtied_sequence = max(new_dirtied.itervalues()) + 1
@ -933,7 +933,7 @@ class Cache(object):
def commit_dirty_cache(self):
book_ids = [(x,) for x in self.dirtied_cache]
if book_ids:
self.backend.conn.executemany('INSERT OR IGNORE INTO metadata_dirtied (book) VALUES (?)', book_ids)
self.backend.executemany('INSERT OR IGNORE INTO metadata_dirtied (book) VALUES (?)', book_ids)
@write_api
def set_field(self, name, book_id_to_val_map, allow_case_change=True, do_path_update=True):
@ -1022,7 +1022,7 @@ class Cache(object):
'''
dc_sequence = self.dirtied_cache.get(book_id, None)
if dc_sequence is None or sequence is None or dc_sequence == sequence:
self.backend.conn.execute('DELETE FROM metadata_dirtied WHERE book=?',
self.backend.execute('DELETE FROM metadata_dirtied WHERE book=?',
(book_id,))
self.dirtied_cache.pop(book_id, None)
@ -1374,14 +1374,13 @@ class Cache(object):
aus = aus.decode(preferred_encoding, 'replace')
if isbytestring(mi.title):
mi.title = mi.title.decode(preferred_encoding, 'replace')
conn = self.backend.conn
if force_id is None:
conn.execute('INSERT INTO books(title, series_index, author_sort) VALUES (?, ?, ?)',
self.backend.execute('INSERT INTO books(title, series_index, author_sort) VALUES (?, ?, ?)',
(mi.title, series_index, aus))
else:
conn.execute('INSERT INTO books(id, title, series_index, author_sort) VALUES (?, ?, ?, ?)',
self.backend.execute('INSERT INTO books(id, title, series_index, author_sort) VALUES (?, ?, ?, ?)',
(force_id, mi.title, series_index, aus))
book_id = conn.last_insert_rowid()
book_id = self.backend.last_insert_rowid()
mi.timestamp = utcnow() if mi.timestamp is None else mi.timestamp
mi.pubdate = UNDEFINED_DATE if mi.pubdate is None else mi.pubdate
@ -1392,7 +1391,7 @@ class Cache(object):
self._set_field('uuid', {book_id:mi.uuid})
# Update the caches for fields from the books table
self.fields['size'].table.book_col_map[book_id] = 0
row = next(conn.execute('SELECT sort, series_index, author_sort, uuid, has_cover FROM books WHERE id=?', (book_id,)))
row = next(self.backend.execute('SELECT sort, series_index, author_sort, uuid, has_cover FROM books WHERE id=?', (book_id,)))
for field, val in zip(('sort', 'series_index', 'author_sort', 'uuid', 'cover'), row):
if field == 'cover':
val = bool(val)

View File

@ -14,35 +14,35 @@ from calibre.utils.date import isoformat, DEFAULT_DATE
class SchemaUpgrade(object):
def __init__(self, conn, library_path, field_metadata):
conn.execute('BEGIN EXCLUSIVE TRANSACTION')
self.conn = conn
def __init__(self, db, library_path, field_metadata):
db.execute('BEGIN EXCLUSIVE TRANSACTION')
self.db = db
self.library_path = library_path
self.field_metadata = field_metadata
# Upgrade database
try:
while True:
uv = self.conn.execute('pragma user_version').next()[0]
uv = self.db.execute('pragma user_version').next()[0]
meth = getattr(self, 'upgrade_version_%d'%uv, None)
if meth is None:
break
else:
prints('Upgrading database to version %d...'%(uv+1))
meth()
self.conn.execute('pragma user_version=%d'%(uv+1))
self.db.execute('pragma user_version=%d'%(uv+1))
except:
self.conn.execute('ROLLBACK')
self.db.execute('ROLLBACK')
raise
else:
self.conn.execute('COMMIT')
self.db.execute('COMMIT')
finally:
self.conn = self.field_metadata = None
self.db = self.field_metadata = None
def upgrade_version_1(self):
'''
Normalize indices.
'''
self.conn.execute('''\
self.db.execute('''\
DROP INDEX IF EXISTS authors_idx;
CREATE INDEX authors_idx ON books (author_sort COLLATE NOCASE, sort COLLATE NOCASE);
DROP INDEX IF EXISTS series_idx;
@ -65,14 +65,14 @@ class SchemaUpgrade(object):
END;
DELETE FROM %(table)s WHERE (SELECT COUNT(id) FROM books_%(ltable)s_link WHERE %(ltable_col)s=%(table)s.id) < 1;
'''
self.conn.execute(script%dict(ltable='authors', table='authors', ltable_col='author'))
self.conn.execute(script%dict(ltable='publishers', table='publishers', ltable_col='publisher'))
self.conn.execute(script%dict(ltable='tags', table='tags', ltable_col='tag'))
self.conn.execute(script%dict(ltable='series', table='series', ltable_col='series'))
self.db.execute(script%dict(ltable='authors', table='authors', ltable_col='author'))
self.db.execute(script%dict(ltable='publishers', table='publishers', ltable_col='publisher'))
self.db.execute(script%dict(ltable='tags', table='tags', ltable_col='tag'))
self.db.execute(script%dict(ltable='series', table='series', ltable_col='series'))
def upgrade_version_3(self):
' Add path to result cache '
self.conn.execute('''
self.db.execute('''
DROP VIEW IF EXISTS meta;
CREATE VIEW meta AS
SELECT id, title,
@ -95,7 +95,7 @@ class SchemaUpgrade(object):
def upgrade_version_4(self):
'Rationalize books table'
self.conn.execute('''
self.db.execute('''
CREATE TEMPORARY TABLE
books_backup(id,title,sort,timestamp,series_index,author_sort,isbn,path);
INSERT INTO books_backup SELECT id,title,sort,timestamp,series_index,author_sort,isbn,path FROM books;
@ -142,7 +142,7 @@ class SchemaUpgrade(object):
def upgrade_version_5(self):
'Update indexes/triggers for new books table'
self.conn.execute('''
self.db.execute('''
CREATE INDEX authors_idx ON books (author_sort COLLATE NOCASE);
CREATE INDEX books_idx ON books (sort COLLATE NOCASE);
CREATE TRIGGER books_delete_trg
@ -174,7 +174,7 @@ class SchemaUpgrade(object):
def upgrade_version_6(self):
'Show authors in order'
self.conn.execute('''
self.db.execute('''
DROP VIEW IF EXISTS meta;
CREATE VIEW meta AS
SELECT id, title,
@ -200,7 +200,7 @@ class SchemaUpgrade(object):
def upgrade_version_7(self):
'Add uuid column'
self.conn.execute('''
self.db.execute('''
ALTER TABLE books ADD COLUMN uuid TEXT;
DROP TRIGGER IF EXISTS books_insert_trg;
DROP TRIGGER IF EXISTS books_update_trg;
@ -243,7 +243,7 @@ class SchemaUpgrade(object):
def upgrade_version_8(self):
'Add Tag Browser views'
def create_tag_browser_view(table_name, column_name):
self.conn.execute('''
self.db.execute('''
DROP VIEW IF EXISTS tag_browser_{tn};
CREATE VIEW tag_browser_{tn} AS SELECT
id,
@ -260,7 +260,7 @@ class SchemaUpgrade(object):
def upgrade_version_9(self):
'Add custom columns'
self.conn.execute('''
self.db.execute('''
CREATE TABLE custom_columns (
id INTEGER PRIMARY KEY AUTOINCREMENT,
label TEXT NOT NULL,
@ -295,11 +295,11 @@ class SchemaUpgrade(object):
{cn}={tn}.id AND books_list_filter(book)) count
FROM {tn};
'''.format(tn=table_name, cn=column_name, vcn=view_column_name))
self.conn.execute(script)
self.db.execute(script)
for field in self.field_metadata.itervalues():
if field['is_category'] and not field['is_custom'] and 'link_column' in field:
table = self.conn.get(
table = self.db.get(
'SELECT name FROM sqlite_master WHERE type="table" AND name=?',
('books_%s_link'%field['table'],), all=False)
if table is not None:
@ -337,7 +337,7 @@ class SchemaUpgrade(object):
'''.format(tn=table_name, cn=column_name,
vcn=view_column_name, scn=sort_column_name))
self.conn.execute(script)
self.db.execute(script)
def create_cust_tag_browser_view(table_name, link_table_name):
script = '''
@ -371,18 +371,18 @@ class SchemaUpgrade(object):
value AS sort
FROM {table};
'''.format(lt=link_table_name, table=table_name)
self.conn.execute(script)
self.db.execute(script)
for field in self.field_metadata.itervalues():
if field['is_category'] and not field['is_custom'] and 'link_column' in field:
table = self.conn.get(
table = self.db.get(
'SELECT name FROM sqlite_master WHERE type="table" AND name=?',
('books_%s_link'%field['table'],), all=False)
if table is not None:
create_std_tag_browser_view(field['table'], field['link_column'],
field['column'], field['category_sort'])
db_tables = self.conn.get('''SELECT name FROM sqlite_master
db_tables = self.db.get('''SELECT name FROM sqlite_master
WHERE type='table'
ORDER BY name''')
tables = []
@ -393,7 +393,7 @@ class SchemaUpgrade(object):
if table.startswith('custom_column_') and link_table in tables:
create_cust_tag_browser_view(table, link_table)
self.conn.execute('UPDATE authors SET sort=author_to_author_sort(name)')
self.db.execute('UPDATE authors SET sort=author_to_author_sort(name)')
def upgrade_version_12(self):
'DB based preference store'
@ -404,7 +404,7 @@ class SchemaUpgrade(object):
val TEXT NON NULL,
UNIQUE(key));
'''
self.conn.execute(script)
self.db.execute(script)
def upgrade_version_13(self):
'Dirtied table for OPF metadata backups'
@ -415,12 +415,12 @@ class SchemaUpgrade(object):
UNIQUE(book));
INSERT INTO metadata_dirtied (book) SELECT id FROM books;
'''
self.conn.execute(script)
self.db.execute(script)
def upgrade_version_14(self):
'Cache has_cover'
self.conn.execute('ALTER TABLE books ADD COLUMN has_cover BOOL DEFAULT 0')
data = self.conn.get('SELECT id,path FROM books', all=True)
self.db.execute('ALTER TABLE books ADD COLUMN has_cover BOOL DEFAULT 0')
data = self.db.get('SELECT id,path FROM books', all=True)
def has_cover(path):
if path:
path = os.path.join(self.library_path, path.replace('/', os.sep),
@ -429,16 +429,16 @@ class SchemaUpgrade(object):
return False
ids = [(x[0],) for x in data if has_cover(x[1])]
self.conn.executemany('UPDATE books SET has_cover=1 WHERE id=?', ids)
self.db.executemany('UPDATE books SET has_cover=1 WHERE id=?', ids)
def upgrade_version_15(self):
'Remove commas from tags'
self.conn.execute("UPDATE OR IGNORE tags SET name=REPLACE(name, ',', ';')")
self.conn.execute("UPDATE OR IGNORE tags SET name=REPLACE(name, ',', ';;')")
self.conn.execute("UPDATE OR IGNORE tags SET name=REPLACE(name, ',', '')")
self.db.execute("UPDATE OR IGNORE tags SET name=REPLACE(name, ',', ';')")
self.db.execute("UPDATE OR IGNORE tags SET name=REPLACE(name, ',', ';;')")
self.db.execute("UPDATE OR IGNORE tags SET name=REPLACE(name, ',', '')")
def upgrade_version_16(self):
self.conn.execute('''
self.db.execute('''
DROP TRIGGER IF EXISTS books_update_trg;
CREATE TRIGGER books_update_trg
AFTER UPDATE ON books
@ -472,7 +472,7 @@ class SchemaUpgrade(object):
DELETE FROM books_plugin_data WHERE book=OLD.id;
END;
'''
self.conn.execute(script)
self.db.execute(script)
def upgrade_version_18(self):
'''
@ -584,10 +584,10 @@ class SchemaUpgrade(object):
'''%isoformat(DEFAULT_DATE, sep=' ')
# Sqlite does not support non constant default values in alter
# statements
self.conn.execute(script)
self.db.execute(script)
def upgrade_version_19(self):
recipes = self.conn.get('SELECT id,title,script FROM feeds')
recipes = self.db.get('SELECT id,title,script FROM feeds')
if recipes:
from calibre.web.feeds.recipes import (custom_recipes,
custom_recipe_filename)
@ -612,6 +612,6 @@ class SchemaUpgrade(object):
script = '''
ALTER TABLE authors ADD COLUMN link TEXT NOT NULL DEFAULT "";
'''
self.conn.execute(script)
self.db.execute(script)

View File

@ -99,7 +99,7 @@ class OneToOneTable(Table):
def read(self, db):
idcol = 'id' if self.metadata['table'] == 'books' else 'book'
query = db.conn.execute('SELECT {0}, {1} FROM {2}'.format(idcol,
query = db.execute('SELECT {0}, {1} FROM {2}'.format(idcol,
self.metadata['column'], self.metadata['table']))
if self.unserialize is None:
try:
@ -107,7 +107,7 @@ class OneToOneTable(Table):
except UnicodeDecodeError:
# The db is damaged, try to work around it by ignoring
# failures to decode utf-8
query = db.conn.execute('SELECT {0}, cast({1} as blob) FROM {2}'.format(idcol,
query = db.execute('SELECT {0}, cast({1} as blob) FROM {2}'.format(idcol,
self.metadata['column'], self.metadata['table']))
self.book_col_map = {k:bytes(val).decode('utf-8', 'replace') for k, val in query}
else:
@ -126,13 +126,13 @@ class PathTable(OneToOneTable):
def set_path(self, book_id, path, db):
self.book_col_map[book_id] = path
db.conn.execute('UPDATE books SET path=? WHERE id=?',
db.execute('UPDATE books SET path=? WHERE id=?',
(path, book_id))
class SizeTable(OneToOneTable):
def read(self, db):
query = db.conn.execute(
query = db.execute(
'SELECT books.id, (SELECT MAX(uncompressed_size) FROM data '
'WHERE data.book=books.id) FROM books')
self.book_col_map = dict(query)
@ -196,7 +196,7 @@ class ManyToOneTable(Table):
self.read_maps(db)
def read_id_maps(self, db):
query = db.conn.execute('SELECT id, {0} FROM {1}'.format(
query = db.execute('SELECT id, {0} FROM {1}'.format(
self.metadata['column'], self.metadata['table']))
if self.unserialize is None:
self.id_map = dict(query)
@ -207,7 +207,7 @@ class ManyToOneTable(Table):
def read_maps(self, db):
cbm = self.col_book_map
bcm = self.book_col_map
for book, item_id in db.conn.execute(
for book, item_id in db.execute(
'SELECT book, {0} FROM {1}'.format(
self.metadata['link_column'], self.link_table)):
cbm[item_id].add(book)
@ -221,7 +221,7 @@ class ManyToOneTable(Table):
book_ids = self.col_book_map.pop(item_id, ())
for book_id in book_ids:
self.book_col_map.pop(book_id, None)
db.conn.executemany('DELETE FROM {0} WHERE {1}=?'.format(
db.executemany('DELETE FROM {0} WHERE {1}=?'.format(
self.link_table, self.metadata['link_column']), tuple((x,) for x in extra_item_ids))
def fix_case_duplicates(self, db):
@ -238,10 +238,10 @@ class ManyToOneTable(Table):
books = self.col_book_map.pop(item_id, set())
for book_id in books:
self.book_col_map[book_id] = main_id
db.conn.executemany('UPDATE {0} SET {1}=? WHERE {1}=?'.format(
db.executemany('UPDATE {0} SET {1}=? WHERE {1}=?'.format(
self.link_table, self.metadata['link_column']),
tuple((main_id, x) for x in v))
db.conn.executemany('DELETE FROM {0} WHERE id=?'.format(self.metadata['table']),
db.executemany('DELETE FROM {0} WHERE id=?'.format(self.metadata['table']),
tuple((x,) for x in v))
def remove_books(self, book_ids, db):
@ -260,7 +260,7 @@ class ManyToOneTable(Table):
if self.id_map.pop(item_id, null) is not null:
clean.add(item_id)
if clean:
db.conn.executemany(
db.executemany(
'DELETE FROM {0} WHERE id=?'.format(self.metadata['table']),
[(x,) for x in clean])
return clean
@ -276,8 +276,8 @@ class ManyToOneTable(Table):
self.book_col_map.pop(book_id, None)
affected_books.update(book_ids)
item_ids = tuple((x,) for x in item_ids)
db.conn.executemany('DELETE FROM {0} WHERE {1}=?'.format(self.link_table, self.metadata['link_column']), item_ids)
db.conn.executemany('DELETE FROM {0} WHERE id=?'.format(self.metadata['table']), item_ids)
db.executemany('DELETE FROM {0} WHERE {1}=?'.format(self.link_table, self.metadata['link_column']), item_ids)
db.executemany('DELETE FROM {0} WHERE id=?'.format(self.metadata['table']), item_ids)
return affected_books
def rename_item(self, item_id, new_name, db):
@ -289,7 +289,7 @@ class ManyToOneTable(Table):
if existing_item is None or existing_item == item_id:
# A simple rename will do the trick
self.id_map[item_id] = new_name
db.conn.execute('UPDATE {0} SET {1}=? WHERE id=?'.format(table, col), (new_name, item_id))
db.execute('UPDATE {0} SET {1}=? WHERE id=?'.format(table, col), (new_name, item_id))
else:
# We have to replace
new_id = existing_item
@ -301,7 +301,7 @@ class ManyToOneTable(Table):
# For custom series this means that the series index can
# potentially have duplicates/be incorrect, but there is no way to
# handle that in this context.
db.conn.execute('UPDATE {0} SET {1}=? WHERE {1}=?; DELETE FROM {2} WHERE id=?'.format(
db.execute('UPDATE {0} SET {1}=? WHERE {1}=?; DELETE FROM {2} WHERE id=?'.format(
self.link_table, lcol, table), (existing_item, item_id, item_id))
return affected_books, new_id
@ -314,9 +314,9 @@ class RatingTable(ManyToOneTable):
bad_ids = {item_id for item_id, rating in self.id_map.iteritems() if rating == 0}
if bad_ids:
self.id_map = {item_id:rating for item_id, rating in self.id_map.iteritems() if rating != 0}
db.conn.executemany('DELETE FROM {0} WHERE {1}=?'.format(self.link_table, self.metadata['link_column']),
db.executemany('DELETE FROM {0} WHERE {1}=?'.format(self.link_table, self.metadata['link_column']),
tuple((x,) for x in bad_ids))
db.conn.execute('DELETE FROM {0} WHERE {1}=0'.format(
db.execute('DELETE FROM {0} WHERE {1}=0'.format(
self.metadata['table'], self.metadata['column']))
class ManyToManyTable(ManyToOneTable):
@ -334,7 +334,7 @@ class ManyToManyTable(ManyToOneTable):
def read_maps(self, db):
bcm = defaultdict(list)
cbm = self.col_book_map
for book, item_id in db.conn.execute(
for book, item_id in db.execute(
self.selectq.format(self.metadata['link_column'], self.link_table)):
cbm[item_id].add(book)
bcm[book].append(item_id)
@ -349,7 +349,7 @@ class ManyToManyTable(ManyToOneTable):
book_ids = self.col_book_map.pop(item_id, ())
for book_id in book_ids:
self.book_col_map[book_id] = tuple(iid for iid in self.book_col_map.pop(book_id, ()) if iid not in extra_item_ids)
db.conn.executemany('DELETE FROM {0} WHERE {1}=?'.format(
db.executemany('DELETE FROM {0} WHERE {1}=?'.format(
self.link_table, self.metadata['link_column']), tuple((x,) for x in extra_item_ids))
def remove_books(self, book_ids, db):
@ -368,7 +368,7 @@ class ManyToManyTable(ManyToOneTable):
if self.id_map.pop(item_id, null) is not null:
clean.add(item_id)
if clean and self.do_clean_on_remove:
db.conn.executemany(
db.executemany(
'DELETE FROM {0} WHERE id=?'.format(self.metadata['table']),
[(x,) for x in clean])
return clean
@ -384,8 +384,8 @@ class ManyToManyTable(ManyToOneTable):
self.book_col_map[book_id] = tuple(x for x in self.book_col_map.get(book_id, ()) if x != item_id)
affected_books.update(book_ids)
item_ids = tuple((x,) for x in item_ids)
db.conn.executemany('DELETE FROM {0} WHERE {1}=?'.format(self.link_table, self.metadata['link_column']), item_ids)
db.conn.executemany('DELETE FROM {0} WHERE id=?'.format(self.metadata['table']), item_ids)
db.executemany('DELETE FROM {0} WHERE {1}=?'.format(self.link_table, self.metadata['link_column']), item_ids)
db.executemany('DELETE FROM {0} WHERE id=?'.format(self.metadata['table']), item_ids)
return affected_books
def rename_item(self, item_id, new_name, db):
@ -397,7 +397,7 @@ class ManyToManyTable(ManyToOneTable):
if existing_item is None or existing_item == item_id:
# A simple rename will do the trick
self.id_map[item_id] = new_name
db.conn.execute('UPDATE {0} SET {1}=? WHERE id=?'.format(table, col), (new_name, item_id))
db.execute('UPDATE {0} SET {1}=? WHERE id=?'.format(table, col), (new_name, item_id))
else:
# We have to replace
new_id = existing_item
@ -409,9 +409,9 @@ class ManyToManyTable(ManyToOneTable):
for book_id in books:
self.book_col_map[book_id] = tuple((existing_item if x == item_id else x) for x in self.book_col_map.get(book_id, ()) if x != existing_item)
self.col_book_map[existing_item].update(books)
db.conn.executemany('DELETE FROM {0} WHERE book=? AND {1}=?'.format(self.link_table, lcol), [
db.executemany('DELETE FROM {0} WHERE book=? AND {1}=?'.format(self.link_table, lcol), [
(book_id, existing_item) for book_id in books])
db.conn.execute('UPDATE {0} SET {1}=? WHERE {1}=?; DELETE FROM {2} WHERE id=?'.format(
db.execute('UPDATE {0} SET {1}=? WHERE {1}=?; DELETE FROM {2} WHERE id=?'.format(
self.link_table, lcol, table), (existing_item, item_id, item_id))
return affected_books, new_id
@ -440,17 +440,17 @@ class ManyToManyTable(ManyToOneTable):
self.book_col_map[book_id] = vals
if len(orig) == len(vals):
# We have a simple replacement
db.conn.executemany(
db.executemany(
'UPDATE {0} SET {1}=? WHERE {1}=? AND book=?'.format(
self.link_table, self.metadata['link_column']),
tuple((main_id, x, book_id) for x in v))
else:
# duplicates
db.conn.execute('DELETE FROM {0} WHERE book=?'.format(self.link_table), (book_id,))
db.conn.executemany(
db.execute('DELETE FROM {0} WHERE book=?'.format(self.link_table), (book_id,))
db.executemany(
'INSERT INTO {0} (book,{1}) VALUES (?,?)'.format(self.link_table, self.metadata['link_column']),
tuple((book_id, x) for x in vals))
db.conn.executemany('DELETE FROM {0} WHERE id=?'.format(self.metadata['table']),
db.executemany('DELETE FROM {0} WHERE id=?'.format(self.metadata['table']),
tuple((x,) for x in v))
class AuthorsTable(ManyToManyTable):
@ -460,7 +460,7 @@ class AuthorsTable(ManyToManyTable):
self.asort_map = sm = {}
self.id_map = im = {}
us = self.unserialize
for aid, name, sort, link in db.conn.execute(
for aid, name, sort, link in db.execute(
'SELECT id, name, sort, link FROM authors'):
name = us(name)
im[aid] = name
@ -471,7 +471,7 @@ class AuthorsTable(ManyToManyTable):
aus_map = {aid:(a or '').strip() for aid, a in aus_map.iteritems()}
aus_map = {aid:a for aid, a in aus_map.iteritems() if a != self.asort_map.get(aid, None)}
self.asort_map.update(aus_map)
db.conn.executemany('UPDATE authors SET sort=? WHERE id=?',
db.executemany('UPDATE authors SET sort=? WHERE id=?',
[(v, k) for k, v in aus_map.iteritems()])
return aus_map
@ -479,7 +479,7 @@ class AuthorsTable(ManyToManyTable):
link_map = {aid:(l or '').strip() for aid, l in link_map.iteritems()}
link_map = {aid:l for aid, l in link_map.iteritems() if l != self.alink_map.get(aid, None)}
self.alink_map.update(link_map)
db.conn.executemany('UPDATE authors SET link=? WHERE id=?',
db.executemany('UPDATE authors SET link=? WHERE id=?',
[(v, k) for k, v in link_map.iteritems()])
return link_map
@ -520,7 +520,7 @@ class FormatsTable(ManyToManyTable):
self.col_book_map = cbm = defaultdict(set)
bcm = defaultdict(list)
for book, fmt, name, sz in db.conn.execute('SELECT book, format, name, uncompressed_size FROM data'):
for book, fmt, name, sz in db.execute('SELECT book, format, name, uncompressed_size FROM data'):
if fmt is not None:
fmt = fmt.upper()
cbm[fmt].add(book)
@ -539,7 +539,7 @@ class FormatsTable(ManyToManyTable):
def set_fname(self, book_id, fmt, fname, db):
self.fname_map[book_id][fmt] = fname
db.conn.execute('UPDATE data SET name=? WHERE book=? AND format=?',
db.execute('UPDATE data SET name=? WHERE book=? AND format=?',
(fname, book_id, fmt))
def remove_formats(self, formats_map, db):
@ -552,7 +552,7 @@ class FormatsTable(ManyToManyTable):
self.col_book_map[fmt].discard(book_id)
except KeyError:
pass
db.conn.executemany('DELETE FROM data WHERE book=? AND format=?',
db.executemany('DELETE FROM data WHERE book=? AND format=?',
[(book_id, fmt) for book_id, fmts in formats_map.iteritems() for fmt in fmts])
def zero_max(book_id):
try:
@ -584,7 +584,7 @@ class FormatsTable(ManyToManyTable):
self.fname_map[book_id][fmt] = fname
self.size_map[book_id][fmt] = size
db.conn.execute('INSERT OR REPLACE INTO data (book,format,uncompressed_size,name) VALUES (?,?,?,?)',
db.execute('INSERT OR REPLACE INTO data (book,format,uncompressed_size,name) VALUES (?,?,?,?)',
(book_id, fmt, size, fname))
return max(self.size_map[book_id].itervalues())
@ -599,7 +599,7 @@ class IdentifiersTable(ManyToManyTable):
def read_maps(self, db):
self.book_col_map = defaultdict(dict)
self.col_book_map = defaultdict(set)
for book, typ, val in db.conn.execute('SELECT book, type, val FROM identifiers'):
for book, typ, val in db.execute('SELECT book, type, val FROM identifiers'):
if typ is not None and val is not None:
self.col_book_map[typ].add(book)
self.book_col_map[book][typ] = val

View File

@ -174,7 +174,7 @@ def one_one_in_books(book_id_val_map, db, field, *args):
'Set a one-one field in the books table'
if book_id_val_map:
sequence = ((sqlite_datetime(v), k) for k, v in book_id_val_map.iteritems())
db.conn.executemany(
db.executemany(
'UPDATE books SET %s=? WHERE id=?'%field.metadata['column'], sequence)
field.table.book_col_map.update(book_id_val_map)
return set(book_id_val_map)
@ -194,13 +194,13 @@ def one_one_in_other(book_id_val_map, db, field, *args):
'Set a one-one field in the non-books table, like comments'
deleted = tuple((k,) for k, v in book_id_val_map.iteritems() if v is None)
if deleted:
db.conn.executemany('DELETE FROM %s WHERE book=?'%field.metadata['table'],
db.executemany('DELETE FROM %s WHERE book=?'%field.metadata['table'],
deleted)
for book_id in deleted:
field.table.book_col_map.pop(book_id[0], None)
updated = {k:v for k, v in book_id_val_map.iteritems() if v is not None}
if updated:
db.conn.executemany('INSERT OR REPLACE INTO %s(book,%s) VALUES (?,?)'%(
db.executemany('INSERT OR REPLACE INTO %s(book,%s) VALUES (?,?)'%(
field.metadata['table'], field.metadata['column']),
((k, sqlite_datetime(v)) for k, v in updated.iteritems()))
field.table.book_col_map.update(updated)
@ -217,7 +217,7 @@ def custom_series_index(book_id_val_map, db, field, *args):
sequence.append((sidx, book_id, ids[0]))
field.table.book_col_map[book_id] = sidx
if sequence:
db.conn.executemany('UPDATE %s SET %s=? WHERE book=? AND value=?'%(
db.executemany('UPDATE %s SET %s=? WHERE book=? AND value=?'%(
field.metadata['table'], field.metadata['column']), sequence)
return {s[1] for s in sequence}
# }}}
@ -239,12 +239,12 @@ def get_db_id(val, db, m, table, kmap, rid_map, allow_case_change,
if item_id is None:
if is_authors:
aus = author_to_author_sort(val)
db.conn.execute('INSERT INTO authors(name,sort) VALUES (?,?)',
db.execute('INSERT INTO authors(name,sort) VALUES (?,?)',
(val.replace(',', '|'), aus))
else:
db.conn.execute('INSERT INTO %s(%s) VALUES (?)'%(
db.execute('INSERT INTO %s(%s) VALUES (?)'%(
m['table'], m['column']), (val,))
item_id = rid_map[kval] = db.conn.last_insert_rowid()
item_id = rid_map[kval] = db.last_insert_rowid()
table.id_map[item_id] = val
table.col_book_map[item_id] = set()
if is_authors:
@ -260,7 +260,7 @@ def change_case(case_changes, dirtied, db, table, m, is_authors=False):
case_changes.iteritems())
else:
vals = ((val, item_id) for item_id, val in case_changes.iteritems())
db.conn.executemany(
db.executemany(
'UPDATE %s SET %s=? WHERE id=?'%(m['table'], m['column']), vals)
for item_id, val in case_changes.iteritems():
table.id_map[item_id] = val
@ -316,7 +316,7 @@ def many_one(book_id_val_map, db, field, allow_case_change, *args):
# Update the db link table
if deleted:
db.conn.executemany('DELETE FROM %s WHERE book=?'%table.link_table,
db.executemany('DELETE FROM %s WHERE book=?'%table.link_table,
((k,) for k in deleted))
if updated:
sql = (
@ -324,7 +324,7 @@ def many_one(book_id_val_map, db, field, allow_case_change, *args):
if is_custom_series else
'DELETE FROM {0} WHERE book=?; INSERT INTO {0}(book,{1}) VALUES(?, ?)'
)
db.conn.executemany(sql.format(table.link_table, m['link_column']),
db.executemany(sql.format(table.link_table, m['link_column']),
((book_id, book_id, item_id) for book_id, item_id in
updated.iteritems()))
@ -332,7 +332,7 @@ def many_one(book_id_val_map, db, field, allow_case_change, *args):
remove = {item_id for item_id in table.id_map if not
table.col_book_map.get(item_id, False)}
if remove:
db.conn.executemany('DELETE FROM %s WHERE id=?'%m['table'],
db.executemany('DELETE FROM %s WHERE id=?'%m['table'],
((item_id,) for item_id in remove))
for item_id in remove:
del table.id_map[item_id]
@ -413,16 +413,16 @@ def many_many(book_id_val_map, db, field, allow_case_change, *args):
# Update the db link table
if deleted:
db.conn.executemany('DELETE FROM %s WHERE book=?'%table.link_table,
db.executemany('DELETE FROM %s WHERE book=?'%table.link_table,
((k,) for k in deleted))
if updated:
vals = (
(book_id, val) for book_id, vals in updated.iteritems()
for val in vals
)
db.conn.executemany('DELETE FROM %s WHERE book=?'%table.link_table,
db.executemany('DELETE FROM %s WHERE book=?'%table.link_table,
((k,) for k in updated))
db.conn.executemany('INSERT INTO {0}(book,{1}) VALUES(?, ?)'.format(
db.executemany('INSERT INTO {0}(book,{1}) VALUES(?, ?)'.format(
table.link_table, m['link_column']), vals)
if is_authors:
aus_map = {book_id:field.author_sort_for_book(book_id) for book_id
@ -433,7 +433,7 @@ def many_many(book_id_val_map, db, field, allow_case_change, *args):
remove = {item_id for item_id in table.id_map if not
table.col_book_map.get(item_id, False)}
if remove:
db.conn.executemany('DELETE FROM %s WHERE id=?'%m['table'],
db.executemany('DELETE FROM %s WHERE id=?'%m['table'],
((item_id,) for item_id in remove))
for item_id in remove:
del table.id_map[item_id]
@ -463,10 +463,10 @@ def identifiers(book_id_val_map, db, field, *args): # {{{
table.col_book_map[key] = set()
table.col_book_map[key].add(book_id)
updates.add((book_id, key, val))
db.conn.executemany('DELETE FROM identifiers WHERE book=?',
db.executemany('DELETE FROM identifiers WHERE book=?',
((x,) for x in book_id_val_map))
if updates:
db.conn.executemany('INSERT OR REPLACE INTO identifiers (book, type, val) VALUES (?, ?, ?)',
db.executemany('INSERT OR REPLACE INTO identifiers (book, type, val) VALUES (?, ?, ?)',
tuple(updates))
return set(book_id_val_map)
# }}}