mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-08-07 09:01:38 -04:00
New db backend now reads all metadata from the db correctly
This commit is contained in:
parent
5b805ffc2f
commit
6481066d73
@ -31,6 +31,7 @@ from calibre.db.tables import (OneToOneTable, ManyToOneTable, ManyToManyTable,
|
|||||||
Differences in semantics from pysqlite:
|
Differences in semantics from pysqlite:
|
||||||
|
|
||||||
1. execute/executemany/executescript operate in autocommit mode
|
1. execute/executemany/executescript operate in autocommit mode
|
||||||
|
2. There is no fetchone() method on cursor objects, instead use next()
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
|
||||||
@ -128,32 +129,31 @@ class Connection(apsw.Connection): # {{{
|
|||||||
|
|
||||||
self.setbusytimeout(self.BUSY_TIMEOUT)
|
self.setbusytimeout(self.BUSY_TIMEOUT)
|
||||||
self.execute('pragma cache_size=5000')
|
self.execute('pragma cache_size=5000')
|
||||||
self.conn.execute('pragma temp_store=2')
|
self.execute('pragma temp_store=2')
|
||||||
|
|
||||||
encoding = self.execute('pragma encoding').fetchone()[0]
|
encoding = self.execute('pragma encoding').next()[0]
|
||||||
self.conn.create_collation('PYNOCASE', partial(pynocase,
|
self.createcollation('PYNOCASE', partial(pynocase,
|
||||||
encoding=encoding))
|
encoding=encoding))
|
||||||
|
|
||||||
self.conn.create_function('title_sort', 1, title_sort)
|
self.createscalarfunction('title_sort', title_sort, 1)
|
||||||
self.conn.create_function('author_to_author_sort', 1,
|
self.createscalarfunction('author_to_author_sort',
|
||||||
_author_to_author_sort)
|
_author_to_author_sort, 1)
|
||||||
|
self.createscalarfunction('uuid4', lambda : str(uuid.uuid4()),
|
||||||
self.conn.create_function('uuid4', 0, lambda : str(uuid.uuid4()))
|
0)
|
||||||
|
|
||||||
# Dummy functions for dynamically created filters
|
# Dummy functions for dynamically created filters
|
||||||
self.conn.create_function('books_list_filter', 1, lambda x: 1)
|
self.createscalarfunction('books_list_filter', lambda x: 1, 1)
|
||||||
self.conn.create_collation('icucollate', icu_collator)
|
self.createcollation('icucollate', icu_collator)
|
||||||
|
|
||||||
def create_dynamic_filter(self, name):
|
def create_dynamic_filter(self, name):
|
||||||
f = DynamicFilter(name)
|
f = DynamicFilter(name)
|
||||||
self.conn.create_function(name, 1, f)
|
self.createscalarfunction(name, f, 1)
|
||||||
|
|
||||||
def get(self, *args, **kw):
|
def get(self, *args, **kw):
|
||||||
ans = self.cursor().execute(*args)
|
ans = self.cursor().execute(*args)
|
||||||
if kw.get('all', True):
|
if kw.get('all', True):
|
||||||
return ans.fetchall()
|
return ans.fetchall()
|
||||||
for row in ans:
|
return ans.next()[0]
|
||||||
return ans[0]
|
|
||||||
|
|
||||||
def execute(self, sql, bindings=None):
|
def execute(self, sql, bindings=None):
|
||||||
cursor = self.cursor()
|
cursor = self.cursor()
|
||||||
@ -169,7 +169,7 @@ class Connection(apsw.Connection): # {{{
|
|||||||
return self.cursor().execute(sql)
|
return self.cursor().execute(sql)
|
||||||
# }}}
|
# }}}
|
||||||
|
|
||||||
class DB(object, SchemaUpgrade):
|
class DB(SchemaUpgrade):
|
||||||
|
|
||||||
PATH_LIMIT = 40 if iswindows else 100
|
PATH_LIMIT = 40 if iswindows else 100
|
||||||
WINDOWS_LIBRARY_PATH_LIMIT = 75
|
WINDOWS_LIBRARY_PATH_LIMIT = 75
|
||||||
@ -516,12 +516,14 @@ class DB(object, SchemaUpgrade):
|
|||||||
def initialize_tables(self): # {{{
|
def initialize_tables(self): # {{{
|
||||||
tables = self.tables = {}
|
tables = self.tables = {}
|
||||||
for col in ('title', 'sort', 'author_sort', 'series_index', 'comments',
|
for col in ('title', 'sort', 'author_sort', 'series_index', 'comments',
|
||||||
'timestamp', 'published', 'uuid', 'path', 'cover',
|
'timestamp', 'pubdate', 'uuid', 'path', 'cover',
|
||||||
'last_modified'):
|
'last_modified'):
|
||||||
metadata = self.field_metadata[col].copy()
|
metadata = self.field_metadata[col].copy()
|
||||||
if metadata['table'] is None:
|
if not metadata['table']:
|
||||||
metadata['table'], metadata['column'] == 'books', ('has_cover'
|
metadata['table'], metadata['column'] = 'books', ('has_cover'
|
||||||
if col == 'cover' else col)
|
if col == 'cover' else col)
|
||||||
|
if not metadata['column']:
|
||||||
|
metadata['column'] = col
|
||||||
tables[col] = OneToOneTable(col, metadata)
|
tables[col] = OneToOneTable(col, metadata)
|
||||||
|
|
||||||
for col in ('series', 'publisher', 'rating'):
|
for col in ('series', 'publisher', 'rating'):
|
||||||
@ -538,6 +540,7 @@ class DB(object, SchemaUpgrade):
|
|||||||
tables['size'] = SizeTable('size', self.field_metadata['size'].copy())
|
tables['size'] = SizeTable('size', self.field_metadata['size'].copy())
|
||||||
|
|
||||||
for label, data in self.custom_column_label_map.iteritems():
|
for label, data in self.custom_column_label_map.iteritems():
|
||||||
|
label = '#' + label
|
||||||
metadata = self.field_metadata[label].copy()
|
metadata = self.field_metadata[label].copy()
|
||||||
link_table = self.custom_table_names(data['num'])[1]
|
link_table = self.custom_table_names(data['num'])[1]
|
||||||
|
|
||||||
@ -562,11 +565,11 @@ class DB(object, SchemaUpgrade):
|
|||||||
@property
|
@property
|
||||||
def conn(self):
|
def conn(self):
|
||||||
if self._conn is None:
|
if self._conn is None:
|
||||||
self._conn = apsw.Connection(self.dbpath)
|
self._conn = Connection(self.dbpath)
|
||||||
if self._exists and self.user_version == 0:
|
if self._exists and self.user_version == 0:
|
||||||
self._conn.close()
|
self._conn.close()
|
||||||
os.remove(self.dbpath)
|
os.remove(self.dbpath)
|
||||||
self._conn = apsw.Connection(self.dbpath)
|
self._conn = Connection(self.dbpath)
|
||||||
return self._conn
|
return self._conn
|
||||||
|
|
||||||
@dynamic_property
|
@dynamic_property
|
||||||
@ -641,9 +644,11 @@ class DB(object, SchemaUpgrade):
|
|||||||
# the db while we are reading
|
# the db while we are reading
|
||||||
for table in self.tables.itervalues():
|
for table in self.tables.itervalues():
|
||||||
try:
|
try:
|
||||||
table.read()
|
table.read(self)
|
||||||
except:
|
except:
|
||||||
prints('Failed to read table:', table.name)
|
prints('Failed to read table:', table.name)
|
||||||
|
import pprint
|
||||||
|
pprint.pprint(table.metadata)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
# }}}
|
# }}}
|
||||||
|
@ -92,7 +92,7 @@ class ManyToOneTable(Table):
|
|||||||
|
|
||||||
def read_id_maps(self, db):
|
def read_id_maps(self, db):
|
||||||
for row in db.conn.execute('SELECT id, {0} FROM {1}'.format(
|
for row in db.conn.execute('SELECT id, {0} FROM {1}'.format(
|
||||||
self.metadata['name'], self.metadata['table'])):
|
self.metadata['column'], self.metadata['table'])):
|
||||||
if row[1]:
|
if row[1]:
|
||||||
self.id_map[row[0]] = self.unserialize(row[1])
|
self.id_map[row[0]] = self.unserialize(row[1])
|
||||||
|
|
||||||
@ -102,7 +102,7 @@ class ManyToOneTable(Table):
|
|||||||
self.metadata['link_column'], self.link_table)):
|
self.metadata['link_column'], self.link_table)):
|
||||||
if row[1] not in self.col_book_map:
|
if row[1] not in self.col_book_map:
|
||||||
self.col_book_map[row[1]] = []
|
self.col_book_map[row[1]] = []
|
||||||
self.col_book_map.append(row[0])
|
self.col_book_map[row[1]].append(row[0])
|
||||||
self.book_col_map[row[0]] = row[1]
|
self.book_col_map[row[0]] = row[1]
|
||||||
|
|
||||||
class ManyToManyTable(ManyToOneTable):
|
class ManyToManyTable(ManyToOneTable):
|
||||||
@ -119,7 +119,7 @@ class ManyToManyTable(ManyToOneTable):
|
|||||||
self.metadata['link_column'], self.link_table)):
|
self.metadata['link_column'], self.link_table)):
|
||||||
if row[1] not in self.col_book_map:
|
if row[1] not in self.col_book_map:
|
||||||
self.col_book_map[row[1]] = []
|
self.col_book_map[row[1]] = []
|
||||||
self.col_book_map.append(row[0])
|
self.col_book_map[row[1]].append(row[0])
|
||||||
if row[0] not in self.book_col_map:
|
if row[0] not in self.book_col_map:
|
||||||
self.book_col_map[row[0]] = []
|
self.book_col_map[row[0]] = []
|
||||||
self.book_col_map[row[0]].append(row[1])
|
self.book_col_map[row[0]].append(row[1])
|
||||||
@ -145,7 +145,7 @@ class FormatsTable(ManyToManyTable):
|
|||||||
if row[1] is not None:
|
if row[1] is not None:
|
||||||
if row[1] not in self.col_book_map:
|
if row[1] not in self.col_book_map:
|
||||||
self.col_book_map[row[1]] = []
|
self.col_book_map[row[1]] = []
|
||||||
self.col_book_map.append(row[0])
|
self.col_book_map[row[1]].append(row[0])
|
||||||
if row[0] not in self.book_col_map:
|
if row[0] not in self.book_col_map:
|
||||||
self.book_col_map[row[0]] = []
|
self.book_col_map[row[0]] = []
|
||||||
self.book_col_map[row[0]].append((row[1], row[2]))
|
self.book_col_map[row[0]].append((row[1], row[2]))
|
||||||
@ -160,7 +160,7 @@ class IdentifiersTable(ManyToManyTable):
|
|||||||
if row[1] is not None and row[2] is not None:
|
if row[1] is not None and row[2] is not None:
|
||||||
if row[1] not in self.col_book_map:
|
if row[1] not in self.col_book_map:
|
||||||
self.col_book_map[row[1]] = []
|
self.col_book_map[row[1]] = []
|
||||||
self.col_book_map.append(row[0])
|
self.col_book_map[row[1]].append(row[0])
|
||||||
if row[0] not in self.book_col_map:
|
if row[0] not in self.book_col_map:
|
||||||
self.book_col_map[row[0]] = []
|
self.book_col_map[row[0]] = []
|
||||||
self.book_col_map[row[0]].append((row[1], row[2]))
|
self.book_col_map[row[0]].append((row[1], row[2]))
|
||||||
|
@ -220,8 +220,8 @@ class FieldMetadata(dict):
|
|||||||
'is_custom':False,
|
'is_custom':False,
|
||||||
'is_category':False,
|
'is_category':False,
|
||||||
'is_csp': False}),
|
'is_csp': False}),
|
||||||
('comments', {'table':None,
|
('comments', {'table':'comments',
|
||||||
'column':None,
|
'column':'text',
|
||||||
'datatype':'text',
|
'datatype':'text',
|
||||||
'is_multiple':{},
|
'is_multiple':{},
|
||||||
'kind':'field',
|
'kind':'field',
|
||||||
|
Loading…
x
Reference in New Issue
Block a user