mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-09 03:04:10 -04:00
Various library wide properties
This commit is contained in:
parent
6a724d931f
commit
0a72140cac
@ -360,6 +360,30 @@ class Cache(object):
|
|||||||
'''
|
'''
|
||||||
return frozenset(iter(self.fields[name]))
|
return frozenset(iter(self.fields[name]))
|
||||||
|
|
||||||
|
@read_api
|
||||||
|
def all_field_names(self, field):
|
||||||
|
''' Frozen set of all fields names (should only be used for many-one and many-many fields) '''
|
||||||
|
try:
|
||||||
|
return frozenset(self.fields[field].table.id_map.itervalues())
|
||||||
|
except AttributeError:
|
||||||
|
raise ValueError('%s is not a many-one or many-many field' % field)
|
||||||
|
|
||||||
|
@read_api
|
||||||
|
def get_usage_count_by_id(self, field):
|
||||||
|
try:
|
||||||
|
return {k:len(v) for k, v in self.fields[field].table.col_book_map.iteritems()}
|
||||||
|
except AttributeError:
|
||||||
|
raise ValueError('%s is not a many-one or many-many field' % field)
|
||||||
|
|
||||||
|
@read_api
|
||||||
|
def get_id_map(self, field):
|
||||||
|
try:
|
||||||
|
return self.fields[field].table.id_map.copy()
|
||||||
|
except AttributeError:
|
||||||
|
if field == 'title':
|
||||||
|
return self.fields[field].table.book_col_map.copy()
|
||||||
|
raise ValueError('%s is not a many-one or many-many field' % field)
|
||||||
|
|
||||||
@read_api
|
@read_api
|
||||||
def author_data(self, author_id):
|
def author_data(self, author_id):
|
||||||
'''
|
'''
|
||||||
|
@ -65,6 +65,14 @@ class LibraryDatabase(object):
|
|||||||
for meth in ('get_next_series_num_for', 'has_book', 'author_sort_from_authors'):
|
for meth in ('get_next_series_num_for', 'has_book', 'author_sort_from_authors'):
|
||||||
setattr(self, meth, getattr(self.new_api, meth))
|
setattr(self, meth, getattr(self.new_api, meth))
|
||||||
|
|
||||||
|
for field in ('authors', 'tags', 'publisher', 'series'):
|
||||||
|
name = field[:-1] if field in {'authors', 'tags'} else field
|
||||||
|
setattr(self, 'all_%s_names' % name, partial(self.new_api.all_field_names, field))
|
||||||
|
|
||||||
|
for func, field in {'all_authors':'authors', 'all_titles':'title', 'all_tags2':'tags', 'all_series':'series', 'all_publishers':'publisher'}.iteritems():
|
||||||
|
setattr(self, func, partial(self.field_id_map, field))
|
||||||
|
self.all_tags = lambda : list(self.all_tag_names())
|
||||||
|
|
||||||
self.last_update_check = self.last_modified()
|
self.last_update_check = self.last_modified()
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
@ -129,6 +137,12 @@ class LibraryDatabase(object):
|
|||||||
for book_id in self.data.cache.all_book_ids():
|
for book_id in self.data.cache.all_book_ids():
|
||||||
yield book_id
|
yield book_id
|
||||||
|
|
||||||
|
def get_usage_count_by_id(self, field):
|
||||||
|
return [[k, v] for k, v in self.new_api.get_usage_count_by_id(field).iteritems()]
|
||||||
|
|
||||||
|
def field_id_map(self, field):
|
||||||
|
return [(k, v) for k, v in self.new_api.get_id_map(field).iteritems()]
|
||||||
|
|
||||||
def refresh(self, field=None, ascending=True):
|
def refresh(self, field=None, ascending=True):
|
||||||
self.data.cache.refresh()
|
self.data.cache.refresh()
|
||||||
self.data.refresh(field=field, ascending=ascending)
|
self.data.refresh(field=field, ascending=ascending)
|
||||||
|
@ -139,9 +139,25 @@ class LegacyTest(BaseTest):
|
|||||||
'get_next_series_num_for': [('A Series One',)],
|
'get_next_series_num_for': [('A Series One',)],
|
||||||
'author_sort_from_authors': [(['Author One', 'Author Two', 'Unknown'],)],
|
'author_sort_from_authors': [(['Author One', 'Author Two', 'Unknown'],)],
|
||||||
'has_book':[(Metadata('title one'),), (Metadata('xxxx1111'),)],
|
'has_book':[(Metadata('title one'),), (Metadata('xxxx1111'),)],
|
||||||
|
'all_author_names':[()],
|
||||||
|
'all_tag_names':[()],
|
||||||
|
'all_series_names':[()],
|
||||||
|
'all_publisher_names':[()],
|
||||||
|
'all_authors':[()],
|
||||||
|
'all_tags2':[()],
|
||||||
|
'all_tags':[()],
|
||||||
|
'all_publishers':[()],
|
||||||
|
'all_titles':[()],
|
||||||
|
'all_series':[()],
|
||||||
|
'get_usage_count_by_id':[('authors',), ('tags',), ('series',), ('publisher',), ('#tags',), ('languages',)],
|
||||||
}.iteritems():
|
}.iteritems():
|
||||||
for a in args:
|
for a in args:
|
||||||
self.assertEqual(getattr(db, meth)(*a), getattr(ndb, meth)(*a),
|
fmt = lambda x: x
|
||||||
|
if meth in {'get_usage_count_by_id', 'all_series', 'all_authors', 'all_tags2', 'all_publishers', 'all_titles'}:
|
||||||
|
fmt = dict
|
||||||
|
elif meth in {'all_tags'}:
|
||||||
|
fmt = frozenset
|
||||||
|
self.assertEqual(fmt(getattr(db, meth)(*a)), fmt(getattr(ndb, meth)(*a)),
|
||||||
'The method: %s() returned different results for argument %s' % (meth, a))
|
'The method: %s() returned different results for argument %s' % (meth, a))
|
||||||
db.close()
|
db.close()
|
||||||
# }}}
|
# }}}
|
||||||
@ -220,7 +236,7 @@ class LegacyTest(BaseTest):
|
|||||||
'get_feeds', 'get_feed', 'update_feed', 'remove_feeds', 'add_feed', 'set_feeds',
|
'get_feeds', 'get_feed', 'update_feed', 'remove_feeds', 'add_feed', 'set_feeds',
|
||||||
}
|
}
|
||||||
SKIP_ARGSPEC = {
|
SKIP_ARGSPEC = {
|
||||||
'__init__', 'get_next_series_num_for', 'has_book', 'author_sort_from_authors',
|
'__init__', 'get_next_series_num_for', 'has_book', 'author_sort_from_authors', 'all_tags',
|
||||||
}
|
}
|
||||||
|
|
||||||
missing = []
|
missing = []
|
||||||
@ -238,10 +254,11 @@ class LegacyTest(BaseTest):
|
|||||||
if attr not in SKIP_ARGSPEC:
|
if attr not in SKIP_ARGSPEC:
|
||||||
try:
|
try:
|
||||||
argspec = inspect.getargspec(obj)
|
argspec = inspect.getargspec(obj)
|
||||||
|
nargspec = inspect.getargspec(nobj)
|
||||||
except TypeError:
|
except TypeError:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
compare_argspecs(argspec, inspect.getargspec(nobj), attr)
|
compare_argspecs(argspec, nargspec, attr)
|
||||||
finally:
|
finally:
|
||||||
for db in (ndb, db):
|
for db in (ndb, db):
|
||||||
db.close()
|
db.close()
|
||||||
|
Loading…
x
Reference in New Issue
Block a user