mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-09 03:04:10 -04:00
Starting from Kovid's multisort:
1) change _map_filtered to an ordered dict to make 'in' operations much faster 2) add a method to field_metadata to return a dict of database fields. 3) fix a couple of places where field_metadata needed to be used. 4) make changes so gui2.library.models.resort uses multisort
This commit is contained in:
parent
b99f36a615
commit
bcd0430791
@ -247,7 +247,7 @@ class BooksModel(QAbstractTableModel): # {{{
|
|||||||
# the search and count records for restrictions
|
# the search and count records for restrictions
|
||||||
self.searched.emit(True)
|
self.searched.emit(True)
|
||||||
|
|
||||||
def sort(self, col, order, reset=True, update_history=True):
|
def sort(self, col, order, reset=True):
|
||||||
if not self.db:
|
if not self.db:
|
||||||
return
|
return
|
||||||
self.about_to_be_sorted.emit(self.db.id)
|
self.about_to_be_sorted.emit(self.db.id)
|
||||||
@ -258,8 +258,7 @@ class BooksModel(QAbstractTableModel): # {{{
|
|||||||
self.clear_caches()
|
self.clear_caches()
|
||||||
self.reset()
|
self.reset()
|
||||||
self.sorted_on = (label, order)
|
self.sorted_on = (label, order)
|
||||||
if update_history:
|
self.sort_history.insert(0, self.sorted_on)
|
||||||
self.sort_history.insert(0, self.sorted_on)
|
|
||||||
self.sorting_done.emit(self.db.index)
|
self.sorting_done.emit(self.db.index)
|
||||||
|
|
||||||
def refresh(self, reset=True):
|
def refresh(self, reset=True):
|
||||||
@ -267,12 +266,9 @@ class BooksModel(QAbstractTableModel): # {{{
|
|||||||
self.resort(reset=reset)
|
self.resort(reset=reset)
|
||||||
|
|
||||||
def resort(self, reset=True):
|
def resort(self, reset=True):
|
||||||
for col,ord in reversed(self.sort_history[:tweaks['maximum_resort_levels']]):
|
if not self.db:
|
||||||
try:
|
return
|
||||||
col = self.column_map.index(col)
|
self.db.multisort(self.sort_history[:tweaks['maximum_resort_levels']])
|
||||||
except ValueError:
|
|
||||||
col = 0
|
|
||||||
self.sort(col, ord, reset=False, update_history=False)
|
|
||||||
if reset:
|
if reset:
|
||||||
self.reset()
|
self.reset()
|
||||||
|
|
||||||
|
@ -20,6 +20,7 @@ from calibre.utils.search_query_parser import SearchQueryParser
|
|||||||
from calibre.utils.pyparsing import ParseException
|
from calibre.utils.pyparsing import ParseException
|
||||||
from calibre.ebooks.metadata import title_sort
|
from calibre.ebooks.metadata import title_sort
|
||||||
from calibre import fit_image
|
from calibre import fit_image
|
||||||
|
from calibre.utils.ordered_dict import OrderedDict
|
||||||
|
|
||||||
class CoverCache(Thread):
|
class CoverCache(Thread):
|
||||||
|
|
||||||
@ -112,7 +113,8 @@ class ResultCache(SearchQueryParser):
|
|||||||
'''
|
'''
|
||||||
def __init__(self, FIELD_MAP, field_metadata):
|
def __init__(self, FIELD_MAP, field_metadata):
|
||||||
self.FIELD_MAP = FIELD_MAP
|
self.FIELD_MAP = FIELD_MAP
|
||||||
self._map = self._map_filtered = self._data = []
|
self._map = self._data = []
|
||||||
|
self._map_filtered = OrderedDict()
|
||||||
self.first_sort = True
|
self.first_sort = True
|
||||||
self.search_restriction = ''
|
self.search_restriction = ''
|
||||||
self.field_metadata = field_metadata
|
self.field_metadata = field_metadata
|
||||||
@ -122,14 +124,14 @@ class ResultCache(SearchQueryParser):
|
|||||||
self.build_numeric_relop_dict()
|
self.build_numeric_relop_dict()
|
||||||
|
|
||||||
def __getitem__(self, row):
|
def __getitem__(self, row):
|
||||||
return self._data[self._map_filtered[row]]
|
return self._data[self._map_filtered.keys()[row]]
|
||||||
|
|
||||||
def __len__(self):
|
def __len__(self):
|
||||||
return len(self._map_filtered)
|
return len(self._map_filtered)
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
for id in self._map_filtered:
|
for id in self._map_filtered:
|
||||||
yield self._data[id]
|
yield id
|
||||||
|
|
||||||
def iterall(self):
|
def iterall(self):
|
||||||
for x in self._data:
|
for x in self._data:
|
||||||
@ -468,7 +470,7 @@ class ResultCache(SearchQueryParser):
|
|||||||
ans = self.search_getting_ids(query, self.search_restriction)
|
ans = self.search_getting_ids(query, self.search_restriction)
|
||||||
if return_matches:
|
if return_matches:
|
||||||
return ans
|
return ans
|
||||||
self._map_filtered = ans
|
self._map_filtered = OrderedDict.fromkeys(ans, True)
|
||||||
|
|
||||||
def search_getting_ids(self, query, search_restriction):
|
def search_getting_ids(self, query, search_restriction):
|
||||||
q = ''
|
q = ''
|
||||||
@ -480,7 +482,7 @@ class ResultCache(SearchQueryParser):
|
|||||||
q = u'%s (%s)' % (search_restriction, query)
|
q = u'%s (%s)' % (search_restriction, query)
|
||||||
if not q:
|
if not q:
|
||||||
return list(self._map)
|
return list(self._map)
|
||||||
matches = sorted(self.parse(q))
|
matches = self.parse(q)
|
||||||
return [id for id in self._map if id in matches]
|
return [id for id in self._map if id in matches]
|
||||||
|
|
||||||
def set_search_restriction(self, s):
|
def set_search_restriction(self, s):
|
||||||
@ -493,18 +495,18 @@ class ResultCache(SearchQueryParser):
|
|||||||
if id in self._map:
|
if id in self._map:
|
||||||
self._map.remove(id)
|
self._map.remove(id)
|
||||||
if id in self._map_filtered:
|
if id in self._map_filtered:
|
||||||
self._map_filtered.remove(id)
|
del self._map_filtered[id]
|
||||||
|
|
||||||
def set(self, row, col, val, row_is_id=False):
|
def set(self, row, col, val, row_is_id=False):
|
||||||
id = row if row_is_id else self._map_filtered[row]
|
id = row if row_is_id else self._map_filtered.keys()[row]
|
||||||
self._data[id][col] = val
|
self._data[id][col] = val
|
||||||
|
|
||||||
def get(self, row, col, row_is_id=False):
|
def get(self, row, col, row_is_id=False):
|
||||||
id = row if row_is_id else self._map_filtered[row]
|
id = row if row_is_id else self._map_filtered.keys()[row]
|
||||||
return self._data[id][col]
|
return self._data[id][col]
|
||||||
|
|
||||||
def index(self, id, cache=False):
|
def index(self, id, cache=False):
|
||||||
x = self._map if cache else self._map_filtered
|
x = self._map if cache else self._map_filtered.keys()
|
||||||
return x.index(id)
|
return x.index(id)
|
||||||
|
|
||||||
def row(self, id):
|
def row(self, id):
|
||||||
@ -544,13 +546,18 @@ class ResultCache(SearchQueryParser):
|
|||||||
self._data[id].append(db.has_cover(id, index_is_id=True))
|
self._data[id].append(db.has_cover(id, index_is_id=True))
|
||||||
self._data[id].append(db.book_on_device_string(id))
|
self._data[id].append(db.book_on_device_string(id))
|
||||||
self._map[0:0] = ids
|
self._map[0:0] = ids
|
||||||
self._map_filtered[0:0] = ids
|
mf = OrderedDict()
|
||||||
|
for id in ids:
|
||||||
|
mf[id] = True
|
||||||
|
for id in self._map_filtered:
|
||||||
|
mf[id] = True
|
||||||
|
self._map_filtered = mf
|
||||||
|
|
||||||
def books_deleted(self, ids):
|
def books_deleted(self, ids):
|
||||||
for id in ids:
|
for id in ids:
|
||||||
self._data[id] = None
|
self._data[id] = None
|
||||||
if id in self._map: self._map.remove(id)
|
if id in self._map: self._map.remove(id)
|
||||||
if id in self._map_filtered: self._map_filtered.remove(id)
|
if id in self._map_filtered: del self._map_filtered[id]
|
||||||
|
|
||||||
def count(self):
|
def count(self):
|
||||||
return len(self._map)
|
return len(self._map)
|
||||||
@ -573,7 +580,7 @@ class ResultCache(SearchQueryParser):
|
|||||||
self._map = [i[0] for i in self._data if i is not None]
|
self._map = [i[0] for i in self._data if i is not None]
|
||||||
if field is not None:
|
if field is not None:
|
||||||
self.sort(field, ascending)
|
self.sort(field, ascending)
|
||||||
self._map_filtered = list(self._map)
|
self._map_filtered = OrderedDict.fromkeys(self._map, True)
|
||||||
if self.search_restriction:
|
if self.search_restriction:
|
||||||
self.search('', return_matches=False)
|
self.search('', return_matches=False)
|
||||||
|
|
||||||
@ -644,10 +651,14 @@ class ResultCache(SearchQueryParser):
|
|||||||
self.FIELD_MAP['series_index'],
|
self.FIELD_MAP['series_index'],
|
||||||
library_order=tweaks['title_series_sorting'] == 'library_order')
|
library_order=tweaks['title_series_sorting'] == 'library_order')
|
||||||
else:
|
else:
|
||||||
fcmp = functools.partial(self.cmp, self.FIELD_MAP[field],
|
fcmp = functools.partial(self.cmp, self.field_metadata[field]['rec_index'],
|
||||||
subsort=subsort, asstr=as_string)
|
subsort=subsort, asstr=as_string)
|
||||||
self._map.sort(cmp=fcmp, reverse=not ascending)
|
self._map.sort(cmp=fcmp, reverse=not ascending)
|
||||||
self._map_filtered = [id for id in self._map if id in self._map_filtered]
|
mf = OrderedDict()
|
||||||
|
for id in self._map:
|
||||||
|
if id in self._map_filtered:
|
||||||
|
mf[id] = True
|
||||||
|
self._map_filtered = mf
|
||||||
|
|
||||||
def multisort(self, fields=[], subsort=False):
|
def multisort(self, fields=[], subsort=False):
|
||||||
fields = [(self.sanitize_field_name(x), bool(y)) for x, y in fields]
|
fields = [(self.sanitize_field_name(x), bool(y)) for x, y in fields]
|
||||||
@ -655,7 +666,7 @@ class ResultCache(SearchQueryParser):
|
|||||||
fields += [('sort', True)]
|
fields += [('sort', True)]
|
||||||
if not fields:
|
if not fields:
|
||||||
fields = [('timestamp', False)]
|
fields = [('timestamp', False)]
|
||||||
keys = self.field_metadata.keys()
|
keys = self.field_metadata.field_keys()
|
||||||
for f, order in fields:
|
for f, order in fields:
|
||||||
if f not in keys:
|
if f not in keys:
|
||||||
raise ValueError(f + ' not an existing field name')
|
raise ValueError(f + ' not an existing field name')
|
||||||
@ -665,7 +676,11 @@ class ResultCache(SearchQueryParser):
|
|||||||
self._map.sort(key=keyg, reverse=not fields[0][1])
|
self._map.sort(key=keyg, reverse=not fields[0][1])
|
||||||
else:
|
else:
|
||||||
self._map.sort(key=keyg)
|
self._map.sort(key=keyg)
|
||||||
self._map_filtered = [id for id in self._map if id in self._map_filtered]
|
mf = OrderedDict()
|
||||||
|
for id in self._map:
|
||||||
|
if id in self._map_filtered:
|
||||||
|
mf[id] = id
|
||||||
|
self._map_filtered = mf
|
||||||
|
|
||||||
|
|
||||||
class SortKey(object):
|
class SortKey(object):
|
||||||
@ -677,16 +692,14 @@ class SortKey(object):
|
|||||||
for i, ascending in enumerate(self.orders):
|
for i, ascending in enumerate(self.orders):
|
||||||
ans = cmp(self.values[i], other.values[i])
|
ans = cmp(self.values[i], other.values[i])
|
||||||
if ans != 0:
|
if ans != 0:
|
||||||
if not ascending:
|
return ans * ascending
|
||||||
ans *= -1
|
|
||||||
return ans
|
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
class SortKeyGenerator(object):
|
class SortKeyGenerator(object):
|
||||||
|
|
||||||
def __init__(self, fields, field_metadata, data):
|
def __init__(self, fields, field_metadata, data):
|
||||||
self.field_metadata = field_metadata
|
self.field_metadata = field_metadata
|
||||||
self.orders = [x[1] for x in fields]
|
self.orders = [-1 if x[1] else 1 for x in fields]
|
||||||
self.entries = [(x[0], field_metadata[x[0]]) for x in fields]
|
self.entries = [(x[0], field_metadata[x[0]]) for x in fields]
|
||||||
self.library_order = tweaks['title_series_sorting'] == 'library_order'
|
self.library_order = tweaks['title_series_sorting'] == 'library_order'
|
||||||
self.data = data
|
self.data = data
|
||||||
@ -735,7 +748,7 @@ if __name__ == '__main__':
|
|||||||
|
|
||||||
db.refresh()
|
db.refresh()
|
||||||
|
|
||||||
fields = db.field_metadata.keys()
|
fields = db.field_metadata.field_keys()
|
||||||
|
|
||||||
print fields
|
print fields
|
||||||
|
|
||||||
@ -765,7 +778,7 @@ if __name__ == '__main__':
|
|||||||
print 'Running single sort differentials'
|
print 'Running single sort differentials'
|
||||||
for field in fields:
|
for field in fields:
|
||||||
if field in ('search', 'id', 'news', 'flags'): continue
|
if field in ('search', 'id', 'news', 'flags'): continue
|
||||||
print '\t', field
|
print '\t', field, db.field_metadata[field]['datatype']
|
||||||
old, new = test_single_sort(field)
|
old, new = test_single_sort(field)
|
||||||
if old[1] != new[1] or old[2] != new[2]:
|
if old[1] != new[1] or old[2] != new[2]:
|
||||||
print '\t\t', 'Sort failure!'
|
print '\t\t', 'Sort failure!'
|
||||||
@ -797,7 +810,7 @@ if __name__ == '__main__':
|
|||||||
[('size', True), ('tags', True), ('author', False)],
|
[('size', True), ('tags', True), ('author', False)],
|
||||||
[('series', False), ('title', True)],
|
[('series', False), ('title', True)],
|
||||||
[('size', True), ('tags', True), ('author', False), ('pubdate',
|
[('size', True), ('tags', True), ('author', False), ('pubdate',
|
||||||
True), ('tags', False), ('formats', False), ('uuid', True)],
|
True), ('series', False), ('formats', False), ('uuid', True)],
|
||||||
|
|
||||||
]:
|
]:
|
||||||
print '\t', ms
|
print '\t', ms
|
||||||
|
@ -311,6 +311,7 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
|
|||||||
self.search_getting_ids = self.data.search_getting_ids
|
self.search_getting_ids = self.data.search_getting_ids
|
||||||
self.refresh = functools.partial(self.data.refresh, self)
|
self.refresh = functools.partial(self.data.refresh, self)
|
||||||
self.sort = self.data.sort
|
self.sort = self.data.sort
|
||||||
|
self.multisort = self.data.multisort
|
||||||
self.index = self.data.index
|
self.index = self.data.index
|
||||||
self.refresh_ids = functools.partial(self.data.refresh_ids, self)
|
self.refresh_ids = functools.partial(self.data.refresh_ids, self)
|
||||||
self.row = self.data.row
|
self.row = self.data.row
|
||||||
|
@ -335,6 +335,9 @@ class FieldMetadata(dict):
|
|||||||
def keys(self):
|
def keys(self):
|
||||||
return self._tb_cats.keys()
|
return self._tb_cats.keys()
|
||||||
|
|
||||||
|
def field_keys(self):
|
||||||
|
return [k for k in self._tb_cats.keys() if self._tb_cats[k]['kind']=='field']
|
||||||
|
|
||||||
def iterkeys(self):
|
def iterkeys(self):
|
||||||
for key in self._tb_cats:
|
for key in self._tb_cats:
|
||||||
yield key
|
yield key
|
||||||
|
Loading…
x
Reference in New Issue
Block a user