mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-09 03:04:10 -04:00
New db backend: Read custom column tables
This commit is contained in:
parent
fb5ef977bf
commit
42b74a7634
@ -22,7 +22,7 @@ from calibre.library.field_metadata import FieldMetadata
|
|||||||
from calibre.ebooks.metadata import title_sort, author_to_author_sort
|
from calibre.ebooks.metadata import title_sort, author_to_author_sort
|
||||||
from calibre.utils.icu import strcmp
|
from calibre.utils.icu import strcmp
|
||||||
from calibre.utils.config import to_json, from_json, prefs, tweaks
|
from calibre.utils.config import to_json, from_json, prefs, tweaks
|
||||||
from calibre.utils.date import utcfromtimestamp
|
from calibre.utils.date import utcfromtimestamp, parse_date
|
||||||
from calibre.db.tables import (OneToOneTable, ManyToOneTable, ManyToManyTable,
|
from calibre.db.tables import (OneToOneTable, ManyToOneTable, ManyToManyTable,
|
||||||
SizeTable, FormatsTable, AuthorsTable, IdentifiersTable)
|
SizeTable, FormatsTable, AuthorsTable, IdentifiersTable)
|
||||||
# }}}
|
# }}}
|
||||||
@ -248,7 +248,179 @@ class DB(object, SchemaUpgrade):
|
|||||||
UPDATE authors SET sort=author_to_author_sort(name) WHERE sort IS NULL;
|
UPDATE authors SET sort=author_to_author_sort(name) WHERE sort IS NULL;
|
||||||
''')
|
''')
|
||||||
|
|
||||||
def initialize_prefs(self, default_prefs):
|
self.initialize_custom_columns()
|
||||||
|
self.initialize_tables()
|
||||||
|
|
||||||
|
def initialize_custom_columns(self): # {{{
|
||||||
|
with self.conn:
|
||||||
|
# Delete previously marked custom columns
|
||||||
|
for record in self.conn.get(
|
||||||
|
'SELECT id FROM custom_columns WHERE mark_for_delete=1'):
|
||||||
|
num = record[0]
|
||||||
|
table, lt = self.custom_table_names(num)
|
||||||
|
self.conn.execute('''\
|
||||||
|
DROP INDEX IF EXISTS {table}_idx;
|
||||||
|
DROP INDEX IF EXISTS {lt}_aidx;
|
||||||
|
DROP INDEX IF EXISTS {lt}_bidx;
|
||||||
|
DROP TRIGGER IF EXISTS fkc_update_{lt}_a;
|
||||||
|
DROP TRIGGER IF EXISTS fkc_update_{lt}_b;
|
||||||
|
DROP TRIGGER IF EXISTS fkc_insert_{lt};
|
||||||
|
DROP TRIGGER IF EXISTS fkc_delete_{lt};
|
||||||
|
DROP TRIGGER IF EXISTS fkc_insert_{table};
|
||||||
|
DROP TRIGGER IF EXISTS fkc_delete_{table};
|
||||||
|
DROP VIEW IF EXISTS tag_browser_{table};
|
||||||
|
DROP VIEW IF EXISTS tag_browser_filtered_{table};
|
||||||
|
DROP TABLE IF EXISTS {table};
|
||||||
|
DROP TABLE IF EXISTS {lt};
|
||||||
|
'''.format(table=table, lt=lt)
|
||||||
|
)
|
||||||
|
self.conn.execute('DELETE FROM custom_columns WHERE mark_for_delete=1')
|
||||||
|
|
||||||
|
# Load metadata for custom columns
|
||||||
|
self.custom_column_label_map, self.custom_column_num_map = {}, {}
|
||||||
|
triggers = []
|
||||||
|
remove = []
|
||||||
|
custom_tables = self.custom_tables
|
||||||
|
for record in self.conn.get(
|
||||||
|
'SELECT label,name,datatype,editable,display,normalized,id,is_multiple FROM custom_columns'):
|
||||||
|
data = {
|
||||||
|
'label':record[0],
|
||||||
|
'name':record[1],
|
||||||
|
'datatype':record[2],
|
||||||
|
'editable':bool(record[3]),
|
||||||
|
'display':json.loads(record[4]),
|
||||||
|
'normalized':bool(record[5]),
|
||||||
|
'num':record[6],
|
||||||
|
'is_multiple':bool(record[7]),
|
||||||
|
}
|
||||||
|
if data['display'] is None:
|
||||||
|
data['display'] = {}
|
||||||
|
# set up the is_multiple separator dict
|
||||||
|
if data['is_multiple']:
|
||||||
|
if data['display'].get('is_names', False):
|
||||||
|
seps = {'cache_to_list': '|', 'ui_to_list': '&', 'list_to_ui': ' & '}
|
||||||
|
elif data['datatype'] == 'composite':
|
||||||
|
seps = {'cache_to_list': ',', 'ui_to_list': ',', 'list_to_ui': ', '}
|
||||||
|
else:
|
||||||
|
seps = {'cache_to_list': '|', 'ui_to_list': ',', 'list_to_ui': ', '}
|
||||||
|
else:
|
||||||
|
seps = {}
|
||||||
|
data['multiple_seps'] = seps
|
||||||
|
|
||||||
|
table, lt = self.custom_table_names(data['num'])
|
||||||
|
if table not in custom_tables or (data['normalized'] and lt not in
|
||||||
|
custom_tables):
|
||||||
|
remove.append(data)
|
||||||
|
continue
|
||||||
|
|
||||||
|
self.custom_column_label_map[data['label']] = data['num']
|
||||||
|
self.custom_column_num_map[data['num']] = \
|
||||||
|
self.custom_column_label_map[data['label']] = data
|
||||||
|
|
||||||
|
# Create Foreign Key triggers
|
||||||
|
if data['normalized']:
|
||||||
|
trigger = 'DELETE FROM %s WHERE book=OLD.id;'%lt
|
||||||
|
else:
|
||||||
|
trigger = 'DELETE FROM %s WHERE book=OLD.id;'%table
|
||||||
|
triggers.append(trigger)
|
||||||
|
|
||||||
|
if remove:
|
||||||
|
with self.conn:
|
||||||
|
for data in remove:
|
||||||
|
prints('WARNING: Custom column %r not found, removing.' %
|
||||||
|
data['label'])
|
||||||
|
self.conn.execute('DELETE FROM custom_columns WHERE id=?',
|
||||||
|
(data['num'],))
|
||||||
|
|
||||||
|
if triggers:
|
||||||
|
with self.conn:
|
||||||
|
self.conn.execute('''\
|
||||||
|
CREATE TEMP TRIGGER custom_books_delete_trg
|
||||||
|
AFTER DELETE ON books
|
||||||
|
BEGIN
|
||||||
|
%s
|
||||||
|
END;
|
||||||
|
'''%(' \n'.join(triggers)))
|
||||||
|
|
||||||
|
# Setup data adapters
|
||||||
|
def adapt_text(x, d):
|
||||||
|
if d['is_multiple']:
|
||||||
|
if x is None:
|
||||||
|
return []
|
||||||
|
if isinstance(x, (str, unicode, bytes)):
|
||||||
|
x = x.split(d['multiple_seps']['ui_to_list'])
|
||||||
|
x = [y.strip() for y in x if y.strip()]
|
||||||
|
x = [y.decode(preferred_encoding, 'replace') if not isinstance(y,
|
||||||
|
unicode) else y for y in x]
|
||||||
|
return [u' '.join(y.split()) for y in x]
|
||||||
|
else:
|
||||||
|
return x if x is None or isinstance(x, unicode) else \
|
||||||
|
x.decode(preferred_encoding, 'replace')
|
||||||
|
|
||||||
|
def adapt_datetime(x, d):
|
||||||
|
if isinstance(x, (str, unicode, bytes)):
|
||||||
|
x = parse_date(x, assume_utc=False, as_utc=False)
|
||||||
|
return x
|
||||||
|
|
||||||
|
def adapt_bool(x, d):
|
||||||
|
if isinstance(x, (str, unicode, bytes)):
|
||||||
|
x = x.lower()
|
||||||
|
if x == 'true':
|
||||||
|
x = True
|
||||||
|
elif x == 'false':
|
||||||
|
x = False
|
||||||
|
elif x == 'none':
|
||||||
|
x = None
|
||||||
|
else:
|
||||||
|
x = bool(int(x))
|
||||||
|
return x
|
||||||
|
|
||||||
|
def adapt_enum(x, d):
|
||||||
|
v = adapt_text(x, d)
|
||||||
|
if not v:
|
||||||
|
v = None
|
||||||
|
return v
|
||||||
|
|
||||||
|
def adapt_number(x, d):
|
||||||
|
if x is None:
|
||||||
|
return None
|
||||||
|
if isinstance(x, (str, unicode, bytes)):
|
||||||
|
if x.lower() == 'none':
|
||||||
|
return None
|
||||||
|
if d['datatype'] == 'int':
|
||||||
|
return int(x)
|
||||||
|
return float(x)
|
||||||
|
|
||||||
|
self.custom_data_adapters = {
|
||||||
|
'float': adapt_number,
|
||||||
|
'int': adapt_number,
|
||||||
|
'rating':lambda x,d : x if x is None else min(10., max(0., float(x))),
|
||||||
|
'bool': adapt_bool,
|
||||||
|
'comments': lambda x,d: adapt_text(x, {'is_multiple':False}),
|
||||||
|
'datetime' : adapt_datetime,
|
||||||
|
'text':adapt_text,
|
||||||
|
'series':adapt_text,
|
||||||
|
'enumeration': adapt_enum
|
||||||
|
}
|
||||||
|
|
||||||
|
# Create Tag Browser categories for custom columns
|
||||||
|
for k in sorted(self.custom_column_label_map.iterkeys()):
|
||||||
|
v = self.custom_column_label_map[k]
|
||||||
|
if v['normalized']:
|
||||||
|
is_category = True
|
||||||
|
else:
|
||||||
|
is_category = False
|
||||||
|
is_m = v['multiple_seps']
|
||||||
|
tn = 'custom_column_{0}'.format(v['num'])
|
||||||
|
self.field_metadata.add_custom_field(label=v['label'],
|
||||||
|
table=tn, column='value', datatype=v['datatype'],
|
||||||
|
colnum=v['num'], name=v['name'], display=v['display'],
|
||||||
|
is_multiple=is_m, is_category=is_category,
|
||||||
|
is_editable=v['editable'], is_csp=False)
|
||||||
|
|
||||||
|
# }}}
|
||||||
|
|
||||||
|
def initialize_prefs(self, default_prefs): # {{{
|
||||||
self.prefs = DBPrefs(self)
|
self.prefs = DBPrefs(self)
|
||||||
|
|
||||||
if default_prefs is not None and not self._exists:
|
if default_prefs is not None and not self._exists:
|
||||||
@ -339,6 +511,53 @@ class DB(object, SchemaUpgrade):
|
|||||||
cats_changed = True
|
cats_changed = True
|
||||||
if cats_changed:
|
if cats_changed:
|
||||||
self.prefs.set('user_categories', user_cats)
|
self.prefs.set('user_categories', user_cats)
|
||||||
|
# }}}
|
||||||
|
|
||||||
|
def initialize_tables(self): # {{{
|
||||||
|
tables = self.tables = {}
|
||||||
|
for col in ('title', 'sort', 'author_sort', 'series_index', 'comments',
|
||||||
|
'timestamp', 'published', 'uuid', 'path', 'cover',
|
||||||
|
'last_modified'):
|
||||||
|
metadata = self.field_metadata[col].copy()
|
||||||
|
if metadata['table'] is None:
|
||||||
|
metadata['table'], metadata['column'] == 'books', ('has_cover'
|
||||||
|
if col == 'cover' else col)
|
||||||
|
tables[col] = OneToOneTable(col, metadata)
|
||||||
|
|
||||||
|
for col in ('series', 'publisher', 'rating'):
|
||||||
|
tables[col] = ManyToOneTable(col, self.field_metadata[col].copy())
|
||||||
|
|
||||||
|
for col in ('authors', 'tags', 'formats', 'identifiers'):
|
||||||
|
cls = {
|
||||||
|
'authors':AuthorsTable,
|
||||||
|
'formats':FormatsTable,
|
||||||
|
'identifiers':IdentifiersTable,
|
||||||
|
}.get(col, ManyToManyTable)
|
||||||
|
tables[col] = cls(col, self.field_metadata[col].copy())
|
||||||
|
|
||||||
|
tables['size'] = SizeTable('size', self.field_metadata['size'].copy())
|
||||||
|
|
||||||
|
for label, data in self.custom_column_label_map.iteritems():
|
||||||
|
metadata = self.field_metadata[label].copy()
|
||||||
|
link_table = self.custom_table_names(data['num'])[1]
|
||||||
|
|
||||||
|
if data['normalized']:
|
||||||
|
if metadata['is_multiple']:
|
||||||
|
tables[label] = ManyToManyTable(label, metadata,
|
||||||
|
link_table=link_table)
|
||||||
|
else:
|
||||||
|
tables[label] = ManyToOneTable(label, metadata,
|
||||||
|
link_table=link_table)
|
||||||
|
if metadata['datatype'] == 'series':
|
||||||
|
# Create series index table
|
||||||
|
label += '_index'
|
||||||
|
metadata = self.field_metadata[label].copy()
|
||||||
|
metadata['column'] = 'extra'
|
||||||
|
metadata['table'] = link_table
|
||||||
|
tables[label] = OneToOneTable(label, metadata)
|
||||||
|
else:
|
||||||
|
tables[label] = OneToOneTable(label, metadata)
|
||||||
|
# }}}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def conn(self):
|
def conn(self):
|
||||||
@ -372,6 +591,15 @@ class DB(object, SchemaUpgrade):
|
|||||||
|
|
||||||
# Database layer API {{{
|
# Database layer API {{{
|
||||||
|
|
||||||
|
def custom_table_names(self, num):
|
||||||
|
return 'custom_column_%d'%num, 'books_custom_column_%d_link'%num
|
||||||
|
|
||||||
|
@property
|
||||||
|
def custom_tables(self):
|
||||||
|
return set([x[0] for x in self.conn.get(
|
||||||
|
'SELECT name FROM sqlite_master WHERE type="table" AND '
|
||||||
|
'(name GLOB "custom_column_*" OR name GLOB "books_custom_column_*")')])
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def exists_at(cls, path):
|
def exists_at(cls, path):
|
||||||
return path and os.path.exists(os.path.join(path, 'metadata.db'))
|
return path and os.path.exists(os.path.join(path, 'metadata.db'))
|
||||||
@ -405,39 +633,18 @@ class DB(object, SchemaUpgrade):
|
|||||||
return utcfromtimestamp(os.stat(self.dbpath).st_mtime)
|
return utcfromtimestamp(os.stat(self.dbpath).st_mtime)
|
||||||
|
|
||||||
def read_tables(self):
|
def read_tables(self):
|
||||||
tables = {}
|
'''
|
||||||
for col in ('title', 'sort', 'author_sort', 'series_index', 'comments',
|
Read all data from the db into the python in-memory tables
|
||||||
'timestamp', 'published', 'uuid', 'path', 'cover',
|
'''
|
||||||
'last_modified'):
|
|
||||||
metadata = self.field_metadata[col].copy()
|
|
||||||
if metadata['table'] is None:
|
|
||||||
metadata['table'], metadata['column'] == 'books', ('has_cover'
|
|
||||||
if col == 'cover' else col)
|
|
||||||
tables[col] = OneToOneTable(col, metadata)
|
|
||||||
|
|
||||||
for col in ('series', 'publisher', 'rating'):
|
|
||||||
tables[col] = ManyToOneTable(col, self.field_metadata[col].copy())
|
|
||||||
|
|
||||||
for col in ('authors', 'tags', 'formats', 'identifiers'):
|
|
||||||
cls = {
|
|
||||||
'authors':AuthorsTable,
|
|
||||||
'formats':FormatsTable,
|
|
||||||
'identifiers':IdentifiersTable,
|
|
||||||
}.get(col, ManyToManyTable)
|
|
||||||
tables[col] = cls(col, self.field_metadata[col].copy())
|
|
||||||
|
|
||||||
tables['size'] = SizeTable('size', self.field_metadata['size'].copy())
|
|
||||||
|
|
||||||
with self.conn: # Use a single transaction, to ensure nothing modifies
|
with self.conn: # Use a single transaction, to ensure nothing modifies
|
||||||
# the db while we are reading
|
# the db while we are reading
|
||||||
for table in tables.itervalues():
|
for table in self.tables.itervalues():
|
||||||
try:
|
try:
|
||||||
table.read()
|
table.read()
|
||||||
except:
|
except:
|
||||||
prints('Failed to read table:', table.name)
|
prints('Failed to read table:', table.name)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
return tables
|
|
||||||
|
|
||||||
# }}}
|
# }}}
|
||||||
|
|
||||||
|
@ -32,7 +32,7 @@ def _c_convert_timestamp(val):
|
|||||||
|
|
||||||
class Table(object):
|
class Table(object):
|
||||||
|
|
||||||
def __init__(self, name, metadata):
|
def __init__(self, name, metadata, link_table=None):
|
||||||
self.name, self.metadata = name, metadata
|
self.name, self.metadata = name, metadata
|
||||||
|
|
||||||
# self.adapt() maps values from the db to python objects
|
# self.adapt() maps values from the db to python objects
|
||||||
@ -46,6 +46,9 @@ class Table(object):
|
|||||||
# Legacy
|
# Legacy
|
||||||
self.adapt = lambda x: x.replace('|', ',') if x else None
|
self.adapt = lambda x: x.replace('|', ',') if x else None
|
||||||
|
|
||||||
|
self.link_table = (link_table if link_table else
|
||||||
|
'books_%s_link'%self.metadata['table'])
|
||||||
|
|
||||||
class OneToOneTable(Table):
|
class OneToOneTable(Table):
|
||||||
|
|
||||||
'''
|
'''
|
||||||
@ -95,8 +98,8 @@ class ManyToOneTable(Table):
|
|||||||
|
|
||||||
def read_maps(self, db):
|
def read_maps(self, db):
|
||||||
for row in db.conn.execute(
|
for row in db.conn.execute(
|
||||||
'SELECT book, {0} FROM books_{1}_link'.format(
|
'SELECT book, {0} FROM {1}'.format(
|
||||||
self.metadata['link_column'], self.metadata['table'])):
|
self.metadata['link_column'], self.link_table)):
|
||||||
if row[1] not in self.col_book_map:
|
if row[1] not in self.col_book_map:
|
||||||
self.col_book_map[row[1]] = []
|
self.col_book_map[row[1]] = []
|
||||||
self.col_book_map.append(row[0])
|
self.col_book_map.append(row[0])
|
||||||
@ -112,8 +115,8 @@ class ManyToManyTable(ManyToOneTable):
|
|||||||
|
|
||||||
def read_maps(self, db):
|
def read_maps(self, db):
|
||||||
for row in db.conn.execute(
|
for row in db.conn.execute(
|
||||||
'SELECT book, {0} FROM books_{1}_link'.format(
|
'SELECT book, {0} FROM {1}'.format(
|
||||||
self.metadata['link_column'], self.metadata['table'])):
|
self.metadata['link_column'], self.link_table)):
|
||||||
if row[1] not in self.col_book_map:
|
if row[1] not in self.col_book_map:
|
||||||
self.col_book_map[row[1]] = []
|
self.col_book_map[row[1]] = []
|
||||||
self.col_book_map.append(row[0])
|
self.col_book_map.append(row[0])
|
||||||
|
Loading…
x
Reference in New Issue
Block a user