mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-09 03:04:10 -04:00
Use a cache for search query parsing
This commit is contained in:
parent
c7f6bc0100
commit
d0b1001ee7
@ -455,7 +455,7 @@ class Parser(SearchQueryParser):
|
||||
|
||||
def __init__(self, dbcache, all_book_ids, gst, date_search, num_search,
|
||||
bool_search, keypair_search, limit_search_columns, limit_search_columns_to,
|
||||
locations, virtual_fields, lookup_saved_search):
|
||||
locations, virtual_fields, lookup_saved_search, parse_cache):
|
||||
self.dbcache, self.all_book_ids = dbcache, all_book_ids
|
||||
self.all_search_locations = frozenset(locations)
|
||||
self.grouped_search_terms = gst
|
||||
@ -466,7 +466,7 @@ class Parser(SearchQueryParser):
|
||||
self.virtual_fields = virtual_fields or {}
|
||||
if 'marked' not in self.virtual_fields:
|
||||
self.virtual_fields['marked'] = self
|
||||
super(Parser, self).__init__(locations, optimize=True, lookup_saved_search=lookup_saved_search)
|
||||
SearchQueryParser.__init__(self, locations, optimize=True, lookup_saved_search=lookup_saved_search, parse_cache=parse_cache)
|
||||
|
||||
@property
|
||||
def field_metadata(self):
|
||||
@ -736,6 +736,7 @@ class LRUCache(object):
|
||||
|
||||
self.item_map[key] = val
|
||||
self.age_map.append(key)
|
||||
__setitem__ = add
|
||||
|
||||
def get(self, key, default=None):
|
||||
ans = self.item_map.get(key, default)
|
||||
@ -753,6 +754,9 @@ class LRUCache(object):
|
||||
def __len__(self):
|
||||
return len(self.age_map)
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.get(key)
|
||||
|
||||
class Search(object):
|
||||
|
||||
def __init__(self, db, opt_name, all_search_locations=()):
|
||||
@ -763,6 +767,7 @@ class Search(object):
|
||||
self.keypair_search = KeyPairSearch()
|
||||
self.saved_searches = SavedSearchQueries(db, opt_name)
|
||||
self.cache = LRUCache()
|
||||
self.parse_cache = LRUCache(limit=100)
|
||||
|
||||
def get_saved_searches(self):
|
||||
return self.saved_searches
|
||||
@ -770,6 +775,7 @@ class Search(object):
|
||||
def change_locations(self, newlocs):
|
||||
if frozenset(newlocs) != frozenset(self.all_search_locations):
|
||||
self.clear_caches()
|
||||
self.parse_cache.clear()
|
||||
self.all_search_locations = newlocs
|
||||
|
||||
def clear_caches(self):
|
||||
@ -788,7 +794,7 @@ class Search(object):
|
||||
self.keypair_search,
|
||||
prefs['limit_search_columns'],
|
||||
prefs['limit_search_columns_to'], self.all_search_locations,
|
||||
virtual_fields, self.saved_searches.lookup)
|
||||
virtual_fields, self.saved_searches.lookup, self.parse_cache)
|
||||
try:
|
||||
return self._do_search(sqp, query, search_restriction, dbcache, book_ids=book_ids)
|
||||
finally:
|
||||
|
@ -300,25 +300,28 @@ class SearchQueryParser(object):
|
||||
failed.append(test[0])
|
||||
return failed
|
||||
|
||||
def __init__(self, locations, test=False, optimize=False, lookup_saved_search=None):
|
||||
def __init__(self, locations, test=False, optimize=False, lookup_saved_search=None, parse_cache=None):
|
||||
self.sqp_initialize(locations, test=test, optimize=optimize)
|
||||
self.parser = Parser()
|
||||
self.lookup_saved_search = global_lookup_saved_search if lookup_saved_search is None else lookup_saved_search
|
||||
self.sqp_parse_cache = parse_cache
|
||||
|
||||
def sqp_change_locations(self, locations):
|
||||
self.sqp_initialize(locations, optimize=self.optimize)
|
||||
if self.sqp_parse_cache is not None:
|
||||
self.sqp_parse_cache.clear()
|
||||
|
||||
def sqp_initialize(self, locations, test=False, optimize=False):
|
||||
self.locations = locations
|
||||
self._tests_failed = False
|
||||
self.optimize = optimize
|
||||
|
||||
def parse(self, query):
|
||||
def parse(self, query, candidates=None):
|
||||
# empty the list of searches used for recursion testing
|
||||
self.recurse_level = 0
|
||||
self.searches_seen = set([])
|
||||
candidates = self.universal_set()
|
||||
return self._parse(query, candidates)
|
||||
return self._parse(query, candidates=candidates)
|
||||
|
||||
# this parse is used internally because it doesn't clear the
|
||||
# recursive search test list. However, we permit seeing the
|
||||
@ -326,10 +329,17 @@ class SearchQueryParser(object):
|
||||
# another search.
|
||||
def _parse(self, query, candidates=None):
|
||||
self.recurse_level += 1
|
||||
try:
|
||||
res = self.sqp_parse_cache.get(query, None)
|
||||
except AttributeError:
|
||||
res = None
|
||||
if res is None:
|
||||
try:
|
||||
res = self.parser.parse(query, self.locations)
|
||||
except RuntimeError:
|
||||
raise ParseException(_('Failed to parse query, recursion limit reached: %s')%repr(query))
|
||||
if self.sqp_parse_cache is not None:
|
||||
self.sqp_parse_cache[query] = res
|
||||
if candidates is None:
|
||||
candidates = self.universal_set()
|
||||
t = self.evaluate(res, candidates)
|
||||
|
Loading…
x
Reference in New Issue
Block a user