Add ability to create a composite column containing the virtual libraries that the book is a member of.

Intermediate commit -- still testing
This commit is contained in:
Charles Haley 2013-07-12 17:22:45 +02:00
parent 7ad78fc447
commit 8a3cb9b977
7 changed files with 119 additions and 48 deletions

View File

@ -332,6 +332,7 @@ class SearchRestrictionMixin(object):
virt_libs = db.prefs.get('virtual_libraries', {})
virt_libs[name] = search
db.prefs.set('virtual_libraries', virt_libs)
db.data.invalidate_virtual_libraries_caches(db)
def do_create_edit(self, name=None):
db = self.library_view.model().db
@ -341,8 +342,11 @@ class SearchRestrictionMixin(object):
if name:
self._remove_vl(name, reapply=False)
self.add_virtual_library(db, cd.library_name, cd.library_search)
db.data.invalidate_virtual_libraries_caches(db)
if not name or name == db.data.get_base_restriction_name():
self.apply_virtual_library(cd.library_name)
else:
self.tags_view.recount()
def virtual_library_clicked(self):
m = self.virtual_library_menu
@ -462,6 +466,9 @@ class SearchRestrictionMixin(object):
default_yes=False):
return
self._remove_vl(name, reapply=True)
db = self.library_view.model().db
db.data.invalidate_virtual_libraries_caches(db)
self.tags_view.recount()
def _remove_vl(self, name, reapply=True):
db = self.library_view.model().db

View File

@ -2,7 +2,7 @@
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
from __future__ import with_statement
__license__ = 'GPL v3'
__license__ = 'GPL v3'
__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
@ -144,7 +144,8 @@ def force_to_bool(val):
class CacheRow(list): # {{{
def __init__(self, db, composites, val, series_col, series_sort_col):
def __init__(self, db, composites, val, series_col, series_sort_col,
virtual_library_col):
self.db = db
self._composites = composites
list.__init__(self, val)
@ -152,6 +153,8 @@ class CacheRow(list): # {{{
self._series_col = series_col
self._series_sort_col = series_sort_col
self._series_sort = None
self._virt_lib_col = virtual_library_col
self._virt_libs = None
def __getitem__(self, col):
if self._must_do:
@ -171,7 +174,7 @@ class CacheRow(list): # {{{
mi = self.db.get_metadata(id_, index_is_id=True,
get_user_categories=False)
for c in self._composites:
self[c] = mi.get(self._composites[c])
self[c] = mi.get(self._composites[c])
if col == self._series_sort_col and self._series_sort is None:
if self[self._series_col]:
self._series_sort = title_sort(self[self._series_col])
@ -179,6 +182,26 @@ class CacheRow(list): # {{{
else:
self._series_sort = ''
self[self._series_sort_col] = ''
if col == self._virt_lib_col and self._virt_libs is None:
try:
if not getattr(self.db.data, '_virt_libs_computed', False):
self.db.data._ids_in_virt_libs = {}
for v,s in self.db.prefs.get('virtual_libraries', {}).iteritems():
self.db.data._ids_in_virt_libs[v] = self.db.data.search_raw(s)
self.db.data._virt_libs_computed = True
r = []
for v in self.db.prefs.get('virtual_libraries', {}).keys():
# optimize the lookup of the ID -- it is always zero
if self[0] in self.db.data._ids_in_virt_libs[v]:
r.append(v)
from calibre.utils.icu import sort_key
self._virt_libs = ", ".join(sorted(r, key=sort_key))
self[self._virt_lib_col] = self._virt_libs
except:
print len(self)
traceback.print_exc()
return list.__getitem__(self, col)
def __getslice__(self, i, j):
@ -186,7 +209,7 @@ class CacheRow(list): # {{{
def refresh_composites(self):
for c in self._composites:
self[c] = None
self[c] = None
self._must_do = True
# }}}
@ -206,6 +229,7 @@ class ResultCache(SearchQueryParser): # {{{
self.composites[field_metadata[key]['rec_index']] = key
self.series_col = field_metadata['series']['rec_index']
self.series_sort_col = field_metadata['series_sort']['rec_index']
self.virtual_libraries_col = field_metadata['virtual_libraries']['rec_index']
self._data = []
self._map = self._map_filtered = []
self.first_sort = True
@ -312,12 +336,12 @@ class ResultCache(SearchQueryParser): # {{{
'<=':[2, relop_le]
}
local_today = ('_today', icu_lower(_('today')))
local_yesterday = ('_yesterday', icu_lower(_('yesterday')))
local_thismonth = ('_thismonth', icu_lower(_('thismonth')))
local_daysago = icu_lower(_('daysago'))
local_daysago_len = len(local_daysago)
untrans_daysago = '_daysago'
local_today = ('_today', icu_lower(_('today')))
local_yesterday = ('_yesterday', icu_lower(_('yesterday')))
local_thismonth = ('_thismonth', icu_lower(_('thismonth')))
local_daysago = icu_lower(_('daysago'))
local_daysago_len = len(local_daysago)
untrans_daysago = '_daysago'
untrans_daysago_len = len('_daysago')
def get_dates_matches(self, location, query, candidates):
@ -413,21 +437,21 @@ class ResultCache(SearchQueryParser): # {{{
if val_func is None:
loc = self.field_metadata[location]['rec_index']
val_func = lambda item, loc=loc: item[loc]
val_func = lambda item, loc = loc: item[loc]
q = ''
cast = adjust = lambda x: x
dt = self.field_metadata[location]['datatype']
if query == 'false':
if dt == 'rating' or location == 'cover':
relop = lambda x,y: not bool(x)
relop = lambda x, y: not bool(x)
else:
relop = lambda x,y: x is None
relop = lambda x, y: x is None
elif query == 'true':
if dt == 'rating' or location == 'cover':
relop = lambda x,y: bool(x)
relop = lambda x, y: bool(x)
else:
relop = lambda x,y: x is not None
relop = lambda x, y: x is not None
else:
relop = None
for k in self.numeric_search_relops.keys():
@ -441,7 +465,7 @@ class ResultCache(SearchQueryParser): # {{{
cast = lambda x: int(x)
elif dt == 'rating':
cast = lambda x: 0 if x is None else int(x)
adjust = lambda x: x/2
adjust = lambda x: x / 2
elif dt in ('float', 'composite'):
cast = lambda x : float(x)
else: # count operation
@ -449,7 +473,7 @@ class ResultCache(SearchQueryParser): # {{{
if len(query) > 1:
mult = query[-1:].lower()
mult = {'k':1024.,'m': 1024.**2, 'g': 1024.**3}.get(mult, 1.0)
mult = {'k':1024., 'm': 1024.**2, 'g': 1024.**3}.get(mult, 1.0)
if mult != 1.0:
query = query[:-1]
else:
@ -568,12 +592,12 @@ class ResultCache(SearchQueryParser): # {{{
query = icu_lower(query)
return matchkind, query
local_no = icu_lower(_('no'))
local_yes = icu_lower(_('yes'))
local_no = icu_lower(_('no'))
local_yes = icu_lower(_('yes'))
local_unchecked = icu_lower(_('unchecked'))
local_checked = icu_lower(_('checked'))
local_empty = icu_lower(_('empty'))
local_blank = icu_lower(_('blank'))
local_checked = icu_lower(_('checked'))
local_empty = icu_lower(_('empty'))
local_blank = icu_lower(_('blank'))
local_bool_values = (
local_no, local_unchecked, '_no', 'false',
local_yes, local_checked, '_yes', 'true',
@ -696,8 +720,8 @@ class ResultCache(SearchQueryParser): # {{{
if fm['is_multiple'] and \
len(query) > 1 and query.startswith('#') and \
query[1:1] in '=<>!':
vf = lambda item, loc=fm['rec_index'], \
ms=fm['is_multiple']['cache_to_list']:\
vf = lambda item, loc = fm['rec_index'], \
ms = fm['is_multiple']['cache_to_list']:\
len(item[loc].split(ms)) if item[loc] is not None else 0
return self.get_numeric_matches(location, query[1:],
candidates, val_func=vf)
@ -707,7 +731,7 @@ class ResultCache(SearchQueryParser): # {{{
if fm.get('is_csp', False):
if location == 'identifiers' and original_location == 'isbn':
return self.get_keypair_matches('identifiers',
'=isbn:'+query, candidates)
'=isbn:' + query, candidates)
return self.get_keypair_matches(location, query, candidates)
# check for user categories
@ -759,7 +783,7 @@ class ResultCache(SearchQueryParser): # {{{
q = canonicalize_lang(query)
if q is None:
lm = lang_map()
rm = {v.lower():k for k,v in lm.iteritems()}
rm = {v.lower():k for k, v in lm.iteritems()}
q = rm.get(query, query)
else:
q = query
@ -772,7 +796,7 @@ class ResultCache(SearchQueryParser): # {{{
if not item[loc]:
if q == 'false' and matchkind == CONTAINS_MATCH:
matches.add(item[0])
continue # item is empty. No possible matches below
continue # item is empty. No possible matches below
if q == 'false'and matchkind == CONTAINS_MATCH:
# Field has something in it, so a false query does not match
continue
@ -816,6 +840,13 @@ class ResultCache(SearchQueryParser): # {{{
current_candidates -= matches
return matches
def invalidate_virtual_libraries_caches(self, db):
self.refresh(db)
def search_raw(self, query):
matches = self.parse(query)
return matches
def search(self, query, return_matches=False):
ans = self.search_getting_ids(query, self.search_restriction,
set_restriction_count=True)
@ -973,10 +1004,11 @@ class ResultCache(SearchQueryParser): # {{{
try:
self._data[id] = CacheRow(db, self.composites,
db.conn.get('SELECT * from meta2 WHERE id=?', (id,))[0],
self.series_col, self.series_sort_col)
self.series_col, self.series_sort_col,
self.virtual_libraries_col)
self._data[id].append(db.book_on_device_string(id))
self._data[id].append(self.marked_ids_dict.get(id, None))
self._data[id].append(None)
self._data[id].extend((self.marked_ids_dict.get(id, None), None, None))
self._virt_libs_computed = False
self._uuid_map[self._data[id][self._uuid_column_index]] = id
except IndexError:
return None
@ -989,14 +1021,15 @@ class ResultCache(SearchQueryParser): # {{{
def books_added(self, ids, db):
if not ids:
return
self._data.extend(repeat(None, max(ids)-len(self._data)+2))
self._data.extend(repeat(None, max(ids) - len(self._data) + 2))
for id in ids:
self._data[id] = CacheRow(db, self.composites,
db.conn.get('SELECT * from meta2 WHERE id=?', (id,))[0],
self.series_col, self.series_sort_col)
self.series_col, self.series_sort_col,
self.virtual_libraries_col)
self._data[id].append(db.book_on_device_string(id))
self._data[id].append(self.marked_ids_dict.get(id, None))
self._data[id].append(None) # Series sort column
self._data[id].extend((self.marked_ids_dict.get(id, None), None, None))
self._virt_libs_computed = False
self._uuid_map[self._data[id][self._uuid_column_index]] = id
self._map[0:0] = ids
self._map_filtered[0:0] = ids
@ -1020,20 +1053,22 @@ class ResultCache(SearchQueryParser): # {{{
db.initialize_template_cache()
temp = db.conn.get('SELECT * FROM meta2')
self._data = list(itertools.repeat(None, temp[-1][0]+2)) if temp else []
self._data = list(itertools.repeat(None, temp[-1][0] + 2)) if temp else []
for r in temp:
self._data[r[0]] = CacheRow(db, self.composites, r,
self.series_col, self.series_sort_col)
self.series_col, self.series_sort_col,
self.virtual_libraries_col)
self._uuid_map[self._data[r[0]][self._uuid_column_index]] = r[0]
for item in self._data:
if item is not None:
item.append(db.book_on_device_string(item[0]))
# Temp mark and series_sort columns
item.extend((None, None))
# Temp mark, series_sort, virtual_library columns
item.extend((None, None, None))
self._virt_libs_computed = False
marked_col = self.FIELD_MAP['marked']
for id_,val in self.marked_ids_dict.iteritems():
for id_, val in self.marked_ids_dict.iteritems():
try:
self._data[id_][marked_col] = val
except:
@ -1134,7 +1169,7 @@ class SortKeyGenerator(object):
for i, candidate in enumerate(
('B', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB')):
if val.endswith(candidate):
p = 1024**(i)
p = 1024 ** (i)
val = val[:-len(candidate)].strip()
break
val = locale.atof(val) * p

View File

@ -575,7 +575,7 @@ def command_set_metadata(args, dbpath):
for key in sorted(db.field_metadata.all_field_keys()):
m = db.field_metadata[key]
if (key not in {'formats', 'series_sort', 'ondevice', 'path',
'last_modified'} and m['is_editable'] and m['name']):
'virtual_libraries', 'last_modified'} and m['is_editable'] and m['name']):
yield key, m
if m['datatype'] == 'series':
si = m.copy()

View File

@ -449,6 +449,8 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
self.field_metadata.set_field_record_index('marked', base, prefer_custom=False)
self.FIELD_MAP['series_sort'] = base = base+1
self.field_metadata.set_field_record_index('series_sort', base, prefer_custom=False)
self.FIELD_MAP['virtual_libraries'] = base = base+1
self.field_metadata.set_field_record_index('virtual_libraries', base, prefer_custom=False)
script = '''
DROP VIEW IF EXISTS meta2;
@ -992,6 +994,10 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
mi.book_size = row[fm['size']]
mi.ondevice_col= row[fm['ondevice']]
mi.last_modified = row[fm['last_modified']]
mi._base_db_row = row # So the formatter functions can see the underlying data
mi._virt_lib_column = fm['virtual_libraries']
formats = row[fm['formats']]
mi.format_metadata = {}
if not formats:

View File

@ -387,6 +387,16 @@ class FieldMetadata(dict):
'is_custom':False,
'is_category':False,
'is_csp': False}),
('virtual_libraries', {'table':None,
'column':None,
'datatype':'text',
'is_multiple':{},
'kind':'field',
'name':_('Virtual Libraries'),
'search_terms':['virtual_libraries'],
'is_custom':False,
'is_category':False,
'is_csp': False}),
]
# }}}

View File

@ -1209,9 +1209,19 @@ class BuiltinFinishFormatting(BuiltinFormatterFunction):
return val
return prefix + formatter._do_format(val, fmt) + suffix
class BuiltinBookInVirtualLibraries(BuiltinFormatterFunction):
name = 'book_in_virtual_libraries'
arg_count = 0
category = 'Get values from metadata'
__doc__ = doc = _('book_in_virtual_libraries() -- returns a list of '
'virtual libraries that this book is in.')
def evaluate(self, formatter, kwargs, mi, locals_):
return mi._base_db_row[mi._virt_lib_column ]
_formatter_builtins = [
BuiltinAdd(), BuiltinAnd(), BuiltinApproximateFormats(),
BuiltinAssign(), BuiltinBooksize(),
BuiltinAssign(), BuiltinBookInVirtualLibraries(), BuiltinBooksize(),
BuiltinCapitalize(), BuiltinCmp(), BuiltinContains(), BuiltinCount(),
BuiltinCurrentLibraryName(), BuiltinCurrentLibraryPath(),
BuiltinDaysBetween(), BuiltinDivide(), BuiltinEval(), BuiltinFirstNonEmpty(),

View File

@ -294,6 +294,7 @@ class SearchQueryParser(object):
def __init__(self, locations, test=False, optimize=False):
self.sqp_initialize(locations, test=test, optimize=optimize)
self.sqp_parsed_search_cache = {}
self.parser = Parser()
def sqp_change_locations(self, locations):
@ -308,8 +309,7 @@ class SearchQueryParser(object):
# empty the list of searches used for recursion testing
self.recurse_level = 0
self.searches_seen = set([])
candidates = self.universal_set()
return self._parse(query, candidates)
return self._parse(query)
# this parse is used internally because it doesn't clear the
# recursive search test list. However, we permit seeing the
@ -317,10 +317,13 @@ class SearchQueryParser(object):
# another search.
def _parse(self, query, candidates=None):
self.recurse_level += 1
try:
res = self.parser.parse(query, self.locations)
except RuntimeError:
raise ParseException(_('Failed to parse query, recursion limit reached: %s')%repr(query))
res = self.sqp_parsed_search_cache.get(query, None)
if res is None:
try:
res = self.parser.parse(query, self.locations)
self.sqp_parsed_search_cache[query] = res
except RuntimeError:
raise ParseException(_('Failed to parse query, recursion limit reached: %s')%repr(query))
if candidates is None:
candidates = self.universal_set()
t = self.evaluate(res, candidates)