mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-09 03:04:10 -04:00
tags_older_than()
This commit is contained in:
parent
e8a912267d
commit
51018ff76f
@ -107,12 +107,10 @@ Various things that require other things before they can be migrated:
|
|||||||
1. From initialize_dynamic(): set_saved_searches,
|
1. From initialize_dynamic(): set_saved_searches,
|
||||||
load_user_template_functions. Also add custom
|
load_user_template_functions. Also add custom
|
||||||
columns/categories/searches info into
|
columns/categories/searches info into
|
||||||
self.field_metadata. Finally, implement metadata dirtied
|
self.field_metadata.
|
||||||
functionality.
|
|
||||||
2. Catching DatabaseException and sqlite.Error when creating new
|
2. Catching DatabaseException and sqlite.Error when creating new
|
||||||
libraries/switching/on calibre startup.
|
libraries/switching/on calibre startup.
|
||||||
3. From refresh in the legacy interface: Rember to flush the composite
|
3. Port library/restore.py
|
||||||
column template cache.
|
|
||||||
4. Replace the metadatabackup thread with the new implementation when using the new backend.
|
4. Replace the metadatabackup thread with the new implementation when using the new backend.
|
||||||
5. In the new API refresh() does not re-read from disk. That might break a
|
5. In the new API refresh() does not re-read from disk. That might break a
|
||||||
few things, for example content server reloading on db change as well as
|
few things, for example content server reloading on db change as well as
|
||||||
|
@ -1280,6 +1280,51 @@ class Cache(object):
|
|||||||
def refresh_ondevice(self):
|
def refresh_ondevice(self):
|
||||||
self.fields['ondevice'].clear_caches()
|
self.fields['ondevice'].clear_caches()
|
||||||
|
|
||||||
|
@read_api
|
||||||
|
def tags_older_than(self, tag, delta=None, must_have_tag=None, must_have_authors=None):
|
||||||
|
'''
|
||||||
|
Return the ids of all books having the tag ``tag`` that are older than
|
||||||
|
than the specified time. tag comparison is case insensitive.
|
||||||
|
|
||||||
|
:param delta: A timedelta object or None. If None, then all ids with
|
||||||
|
the tag are returned.
|
||||||
|
:param must_have_tag: If not None the list of matches will be
|
||||||
|
restricted to books that have this tag
|
||||||
|
:param must_have_authors: A list of authors. If not None the list of
|
||||||
|
matches will be restricted to books that have these authors (case
|
||||||
|
insensitive).
|
||||||
|
'''
|
||||||
|
tag_map = {icu_lower(v):k for k, v in self._get_id_map('tags').iteritems()}
|
||||||
|
tag = icu_lower(tag.strip())
|
||||||
|
mht = icu_lower(must_have_tag.strip()) if must_have_tag else None
|
||||||
|
tag_id, mht_id = tag_map.get(tag, None), tag_map.get(mht, None)
|
||||||
|
ans = set()
|
||||||
|
if mht_id is None and mht:
|
||||||
|
return ans
|
||||||
|
if tag_id is not None:
|
||||||
|
tagged_books = self._books_for_field('tags', tag_id)
|
||||||
|
if mht_id is not None and tagged_books:
|
||||||
|
tagged_books = tagged_books.intersection(self._books_for_field('tags', mht_id))
|
||||||
|
if tagged_books:
|
||||||
|
if must_have_authors is not None:
|
||||||
|
amap = {icu_lower(v):k for k, v in self._get_id_map('authors').iteritems()}
|
||||||
|
books = None
|
||||||
|
for author in must_have_authors:
|
||||||
|
abooks = self._books_for_field('authors', amap.get(icu_lower(author), None))
|
||||||
|
books = abooks if books is None else books.intersection(abooks)
|
||||||
|
if not books:
|
||||||
|
break
|
||||||
|
tagged_books = tagged_books.intersection(books or set())
|
||||||
|
if delta is None:
|
||||||
|
ans = tagged_books
|
||||||
|
else:
|
||||||
|
now = nowf()
|
||||||
|
for book_id in tagged_books:
|
||||||
|
ts = self._field_for('timestamp', book_id)
|
||||||
|
if (now - ts) > delta:
|
||||||
|
ans.add(book_id)
|
||||||
|
return ans
|
||||||
|
|
||||||
# }}}
|
# }}}
|
||||||
|
|
||||||
class SortKey(object): # {{{
|
class SortKey(object): # {{{
|
||||||
|
@ -472,6 +472,10 @@ class LibraryDatabase(object):
|
|||||||
def refresh_ondevice(self):
|
def refresh_ondevice(self):
|
||||||
self.new_api.refresh_ondevice()
|
self.new_api.refresh_ondevice()
|
||||||
|
|
||||||
|
def tags_older_than(self, tag, delta, must_have_tag=None, must_have_authors=None):
|
||||||
|
for book_id in sorted(self.new_api.tags_older_than(tag, delta=delta, must_have_tag=must_have_tag, must_have_authors=must_have_authors)):
|
||||||
|
yield book_id
|
||||||
|
|
||||||
# Private interface {{{
|
# Private interface {{{
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
for row in self.data.iterall():
|
for row in self.data.iterall():
|
||||||
|
@ -153,13 +153,19 @@ class LegacyTest(BaseTest):
|
|||||||
# }}}
|
# }}}
|
||||||
|
|
||||||
def test_legacy_direct(self): # {{{
|
def test_legacy_direct(self): # {{{
|
||||||
'Test methods that are directly equivalent in the old and new interface'
|
'Test read-only methods that are directly equivalent in the old and new interface'
|
||||||
from calibre.ebooks.metadata.book.base import Metadata
|
from calibre.ebooks.metadata.book.base import Metadata
|
||||||
|
from datetime import timedelta
|
||||||
ndb = self.init_legacy(self.cloned_library)
|
ndb = self.init_legacy(self.cloned_library)
|
||||||
db = self.init_old()
|
db = self.init_old()
|
||||||
|
|
||||||
for meth, args in {
|
for meth, args in {
|
||||||
'get_next_series_num_for': [('A Series One',)],
|
'get_next_series_num_for': [('A Series One',)],
|
||||||
|
'@tags_older_than': [
|
||||||
|
('News', None), ('Tag One', None), ('xxxx', None), ('Tag One', None, 'News'), ('News', None, 'xxxx'),
|
||||||
|
('News', None, None, ['xxxxxxx']), ('News', None, 'Tag One', ['Author Two', 'Author One']),
|
||||||
|
('News', timedelta(0), None, None), ('News', timedelta(100000)),
|
||||||
|
],
|
||||||
'format':[(1, 'FMT1', True), (2, 'FMT1', True), (0, 'xxxxxx')],
|
'format':[(1, 'FMT1', True), (2, 'FMT1', True), (0, 'xxxxxx')],
|
||||||
'has_format':[(1, 'FMT1', True), (2, 'FMT1', True), (0, 'xxxxxx')],
|
'has_format':[(1, 'FMT1', True), (2, 'FMT1', True), (0, 'xxxxxx')],
|
||||||
'@format_files':[(0,),(1,),(2,)],
|
'@format_files':[(0,),(1,),(2,)],
|
||||||
@ -208,13 +214,13 @@ class LegacyTest(BaseTest):
|
|||||||
'books_in_series_of':[(0,), (1,), (2,)],
|
'books_in_series_of':[(0,), (1,), (2,)],
|
||||||
'books_with_same_title':[(Metadata(db.title(0)),), (Metadata(db.title(1)),), (Metadata('1234'),)],
|
'books_with_same_title':[(Metadata(db.title(0)),), (Metadata(db.title(1)),), (Metadata('1234'),)],
|
||||||
}.iteritems():
|
}.iteritems():
|
||||||
|
fmt = lambda x: x
|
||||||
|
if meth[0] in {'!', '@'}:
|
||||||
|
fmt = {'!':dict, '@':frozenset}[meth[0]]
|
||||||
|
meth = meth[1:]
|
||||||
|
elif meth == 'get_authors_with_ids':
|
||||||
|
fmt = lambda val:{x[0]:tuple(x[1:]) for x in val}
|
||||||
for a in args:
|
for a in args:
|
||||||
fmt = lambda x: x
|
|
||||||
if meth[0] in {'!', '@'}:
|
|
||||||
fmt = {'!':dict, '@':frozenset}[meth[0]]
|
|
||||||
meth = meth[1:]
|
|
||||||
elif meth == 'get_authors_with_ids':
|
|
||||||
fmt = lambda val:{x[0]:tuple(x[1:]) for x in val}
|
|
||||||
self.assertEqual(fmt(getattr(db, meth)(*a)), fmt(getattr(ndb, meth)(*a)),
|
self.assertEqual(fmt(getattr(db, meth)(*a)), fmt(getattr(ndb, meth)(*a)),
|
||||||
'The method: %s() returned different results for argument %s' % (meth, a))
|
'The method: %s() returned different results for argument %s' % (meth, a))
|
||||||
db.close()
|
db.close()
|
||||||
|
Loading…
x
Reference in New Issue
Block a user