Don't use the search cache if the search involves dates.

This commit is contained in:
Charles Haley 2020-10-04 12:15:14 +01:00
parent 1c3c302e1a
commit df3c64eb92
2 changed files with 89 additions and 41 deletions

View File

@ -926,6 +926,20 @@ class Search(object):
finally: finally:
sqp.dbcache = sqp.lookup_saved_search = None sqp.dbcache = sqp.lookup_saved_search = None
def _use_cache(self, sqp, dbcache, query):
if query:
for name, value in sqp.get_queried_fields(query):
if name == 'template' and '#@#:d:' in value:
return False
elif name in dbcache.field_metadata.all_field_keys():
fm = dbcache.field_metadata[name]
if fm['datatype'] == 'datetime':
return False
if fm['datatype'] == 'composite':
if fm.get('display', {}).get('composite_sort', '') == 'date':
return False
return True
def _do_search(self, sqp, query, search_restriction, dbcache, book_ids=None): def _do_search(self, sqp, query, search_restriction, dbcache, book_ids=None):
''' Do the search, caching the results. Results are cached only if the ''' Do the search, caching the results. Results are cached only if the
search is on the full library and no virtual field is searched on ''' search is on the full library and no virtual field is searched on '''
@ -935,30 +949,36 @@ class Search(object):
query = query.decode('utf-8') query = query.decode('utf-8')
query = query.strip() query = query.strip()
if book_ids is None and query and not search_restriction: use_cache = self._use_cache(sqp, dbcache, query)
if use_cache and book_ids is None and query and not search_restriction:
cached = self.cache.get(query) cached = self.cache.get(query)
if cached is not None: if cached is not None:
return cached return cached
restricted_ids = all_book_ids = dbcache._all_book_ids(type=set) restricted_ids = all_book_ids = dbcache._all_book_ids(type=set)
if search_restriction and search_restriction.strip(): if search_restriction and search_restriction.strip():
cached = self.cache.get(search_restriction.strip()) sr = search_restriction.strip()
if cached is None:
sqp.all_book_ids = all_book_ids if book_ids is None else book_ids sqp.all_book_ids = all_book_ids if book_ids is None else book_ids
restricted_ids = sqp.parse(search_restriction) if self._use_cache(sqp, dbcache, sr):
cached = self.cache.get(sr)
if cached is None:
restricted_ids = sqp.parse(sr)
if not sqp.virtual_field_used and sqp.all_book_ids is all_book_ids: if not sqp.virtual_field_used and sqp.all_book_ids is all_book_ids:
self.cache.add(search_restriction.strip(), restricted_ids) self.cache.add(sr, restricted_ids)
else: else:
restricted_ids = cached restricted_ids = cached
if book_ids is not None: if book_ids is not None:
restricted_ids = book_ids.intersection(restricted_ids) restricted_ids = book_ids.intersection(restricted_ids)
else:
restricted_ids = sqp.parse(sr)
elif book_ids is not None: elif book_ids is not None:
restricted_ids = book_ids restricted_ids = book_ids
if not query: if not query:
return restricted_ids return restricted_ids
if restricted_ids is all_book_ids: if use_cache and restricted_ids is all_book_ids:
cached = self.cache.get(query) cached = self.cache.get(query)
if cached is not None: if cached is not None:
return cached return cached

View File

@ -207,7 +207,6 @@ class Parser(object):
prog = self.or_expression() prog = self.or_expression()
if not self.is_eof(): if not self.is_eof():
raise ParseException(_('Extra characters at end of search')) raise ParseException(_('Extra characters at end of search'))
# prints(self.tokens, '\n', prog)
return prog return prog
def or_expression(self): def or_expression(self):
@ -334,6 +333,26 @@ class SearchQueryParser(object):
self._tests_failed = False self._tests_failed = False
self.optimize = optimize self.optimize = optimize
def get_queried_fields(self, query):
# empty the list of searches used for recursion testing
self.recurse_level = 0
self.searches_seen = set()
tree = self._get_tree(query)
yield from self._walk_expr(tree)
def _walk_expr(self, tree):
if tree[0] in ('or', 'and'):
yield from self._walk_expr(tree[1])
yield from self._walk_expr(tree[2])
elif tree[0] == 'not':
yield from self._walk_expr(tree[1])
else:
if tree[1] == 'search':
yield from self._walk_expr(self._get_tree(
self._get_saved_search_text(tree[2])))
else:
yield (tree[1], tree[2])
def parse(self, query, candidates=None): def parse(self, query, candidates=None):
# empty the list of searches used for recursion testing # empty the list of searches used for recursion testing
self.recurse_level = 0 self.recurse_level = 0
@ -341,26 +360,32 @@ class SearchQueryParser(object):
candidates = self.universal_set() candidates = self.universal_set()
return self._parse(query, candidates=candidates) return self._parse(query, candidates=candidates)
# this parse is used internally because it doesn't clear the def _get_tree(self, query):
# recursive search test list. However, we permit seeing the
# same search a few times because the search might appear within
# another search.
def _parse(self, query, candidates=None):
self.recurse_level += 1 self.recurse_level += 1
try: try:
res = self.sqp_parse_cache.get(query, None) res = self.sqp_parse_cache.get(query, None)
except AttributeError: except AttributeError:
res = None res = None
if res is None: if res is not None:
return res
try: try:
res = self.parser.parse(query, self.locations) res = self.parser.parse(query, self.locations)
except RuntimeError: except RuntimeError:
raise ParseException(_('Failed to parse query, recursion limit reached: %s')%repr(query)) raise ParseException(_('Failed to parse query, recursion limit reached: %s')%repr(query))
if self.sqp_parse_cache is not None: if self.sqp_parse_cache is not None:
self.sqp_parse_cache[query] = res self.sqp_parse_cache[query] = res
return res
# this parse is used internally because it doesn't clear the
# recursive search test list. However, we permit seeing the
# same search a few times because the search might appear within
# another search.
def _parse(self, query, candidates=None):
self.recurse_level += 1
tree = self._get_tree(query)
if candidates is None: if candidates is None:
candidates = self.universal_set() candidates = self.universal_set()
t = self.evaluate(res, candidates) t = self.evaluate(tree, candidates)
self.recurse_level -= 1 self.recurse_level -= 1
return t return t
@ -393,10 +418,7 @@ class SearchQueryParser(object):
# def evaluate_parenthesis(self, argument, candidates): # def evaluate_parenthesis(self, argument, candidates):
# return self.evaluate(argument[0], candidates) # return self.evaluate(argument[0], candidates)
def evaluate_token(self, argument, candidates): def _get_saved_search_text(self, query):
location = argument[0]
query = argument[1]
if location.lower() == 'search':
if query.startswith('='): if query.startswith('='):
query = query[1:] query = query[1:]
try: try:
@ -407,13 +429,19 @@ class SearchQueryParser(object):
ss = self.lookup_saved_search(query) ss = self.lookup_saved_search(query)
if ss is None: if ss is None:
raise ParseException(_('Unknown saved search: {}').format(query)) raise ParseException(_('Unknown saved search: {}').format(query))
return self._parse(ss, candidates) return ss
except ParseException as e: except ParseException as e:
raise e raise e
except: # convert all exceptions (e.g., missing key) to a parse error except: # convert all exceptions (e.g., missing key) to a parse error
import traceback import traceback
traceback.print_exc() traceback.print_exc()
raise ParseException(_('Unknown error in saved search: {0}').format(query)) raise ParseException(_('Unknown error in saved search: {0}').format(query))
def evaluate_token(self, argument, candidates):
location = argument[0]
query = argument[1]
if location.lower() == 'search':
return self._parse(self._get_saved_search_text(query), candidates)
return self._get_matches(location, query, candidates) return self._get_matches(location, query, candidates)
def _get_matches(self, location, query, candidates): def _get_matches(self, location, query, candidates):