Changes as discussed. Tests improved to check that the cache is cleared and that ProxyMetadata returns the link map

This commit is contained in:
Charles Haley 2023-03-27 12:53:26 +01:00
parent 26d88d1d65
commit a4e49d85c1
4 changed files with 47 additions and 10 deletions

View File

@ -291,7 +291,14 @@ class Cache:
self.format_metadata_cache.clear() self.format_metadata_cache.clear()
if search_cache: if search_cache:
self._clear_search_caches(book_ids) self._clear_search_caches(book_ids)
self.link_maps_cache = {} self.clear_link_map_cache(book_ids)
def clear_link_map_cache(self, book_ids=None):
if book_ids is None:
self.link_maps_cache = {}
else:
for book in book_ids:
self.link_maps_cache.pop(book, None)
@write_api @write_api
def reload_from_db(self, clear_caches=True): def reload_from_db(self, clear_caches=True):
@ -1487,6 +1494,7 @@ class Cache:
if update_path and do_path_update: if update_path and do_path_update:
self._update_path(dirtied, mark_as_dirtied=False) self._update_path(dirtied, mark_as_dirtied=False)
self._mark_as_dirty(dirtied) self._mark_as_dirty(dirtied)
self.clear_link_map_cache(dirtied)
self.event_dispatcher(EventType.metadata_changed, name, dirtied) self.event_dispatcher(EventType.metadata_changed, name, dirtied)
return dirtied return dirtied
@ -1502,6 +1510,7 @@ class Cache:
self.format_metadata_cache.pop(book_id, None) self.format_metadata_cache.pop(book_id, None)
if mark_as_dirtied: if mark_as_dirtied:
self._mark_as_dirty(book_ids) self._mark_as_dirty(book_ids)
self.clear_link_map_cache(book_ids)
@read_api @read_api
def get_a_dirtied_book(self): def get_a_dirtied_book(self):
@ -2161,6 +2170,7 @@ class Cache:
for book_id in moved_books: for book_id in moved_books:
self._set_field(f.index_field.name, {book_id:self._get_next_series_num_for(self._fast_field_for(f, book_id), field=field)}) self._set_field(f.index_field.name, {book_id:self._get_next_series_num_for(self._fast_field_for(f, book_id), field=field)})
self._mark_as_dirty(affected_books) self._mark_as_dirty(affected_books)
self.clear_link_map_cache(affected_books)
self.event_dispatcher(EventType.items_renamed, field, affected_books, id_map) self.event_dispatcher(EventType.items_renamed, field, affected_books, id_map)
return affected_books, id_map return affected_books, id_map
@ -2180,6 +2190,7 @@ class Cache:
self._set_field(field.index_field.name, {bid:1.0 for bid in affected_books}) self._set_field(field.index_field.name, {bid:1.0 for bid in affected_books})
else: else:
self._mark_as_dirty(affected_books) self._mark_as_dirty(affected_books)
self.clear_link_map_cache(affected_books)
self.event_dispatcher(EventType.items_removed, field, affected_books, item_ids) self.event_dispatcher(EventType.items_removed, field, affected_books, item_ids)
return affected_books return affected_books
@ -2314,6 +2325,7 @@ class Cache:
self._set_field('author_sort', val_map) self._set_field('author_sort', val_map)
if changed_books: if changed_books:
self._mark_as_dirty(changed_books) self._mark_as_dirty(changed_books)
self.clear_link_map_cache(changed_books)
return changed_books return changed_books
@write_api @write_api
@ -2324,6 +2336,7 @@ class Cache:
changed_books |= self._books_for_field('authors', author_id) changed_books |= self._books_for_field('authors', author_id)
if changed_books: if changed_books:
self._mark_as_dirty(changed_books) self._mark_as_dirty(changed_books)
self.clear_link_map_cache(changed_books)
return changed_books return changed_books
@read_api @read_api
@ -2416,6 +2429,7 @@ class Cache:
changed_books |= self._books_for_field(field, id_) changed_books |= self._books_for_field(field, id_)
if changed_books: if changed_books:
self._mark_as_dirty(changed_books) self._mark_as_dirty(changed_books)
self.clear_link_map_cache(changed_books)
return changed_books return changed_books
@read_api @read_api

View File

@ -330,9 +330,6 @@ class ProxyMetadata(Metadata):
sa(self, '_user_metadata', db.field_metadata) sa(self, '_user_metadata', db.field_metadata)
def __getattribute__(self, field): def __getattribute__(self, field):
if field == 'link_maps':
db = ga(self, '_db')()
return db.get_all_link_maps_for_book(ga(self, '_book_id'))
getter = getters.get(field, None) getter = getters.get(field, None)
if getter is not None: if getter is not None:
return getter(ga(self, '_db'), ga(self, '_book_id'), ga(self, '_cache')) return getter(ga(self, '_db'), ga(self, '_book_id'), ga(self, '_cache'))
@ -354,6 +351,10 @@ class ProxyMetadata(Metadata):
return custom_getter(field, ga(self, '_db'), ga(self, '_book_id'), ga(self, '_cache')) return custom_getter(field, ga(self, '_db'), ga(self, '_book_id'), ga(self, '_cache'))
return composite_getter(self, field, ga(self, '_db'), ga(self, '_book_id'), ga(self, '_cache'), ga(self, 'formatter'), ga(self, 'template_cache')) return composite_getter(self, field, ga(self, '_db'), ga(self, '_book_id'), ga(self, '_cache'), ga(self, 'formatter'), ga(self, 'template_cache'))
if field == 'link_maps':
db = ga(self, '_db')()
return db.get_all_link_maps_for_book(ga(self, '_book_id'))
try: try:
return ga(self, '_cache')[field] return ga(self, '_cache')[field]
except KeyError: except KeyError:

View File

@ -795,6 +795,7 @@ CREATE TRIGGER fkc_annot_update
self.db.reindex_annotations() self.db.reindex_annotations()
def upgrade_version_25(self): def upgrade_version_25(self):
alters = []
for record in self.db.execute( for record in self.db.execute(
'SELECT label,name,datatype,editable,display,normalized,id,is_multiple FROM custom_columns'): 'SELECT label,name,datatype,editable,display,normalized,id,is_multiple FROM custom_columns'):
data = { data = {
@ -809,12 +810,14 @@ CREATE TRIGGER fkc_annot_update
} }
if data['normalized']: if data['normalized']:
tn = 'custom_column_{}'.format(data['num']) tn = 'custom_column_{}'.format(data['num'])
self.db.execute(f'ALTER TABLE {tn} ADD COLUMN link TEXT NOT NULL DEFAULT "";') alters.append(f'ALTER TABLE {tn} ADD COLUMN link TEXT NOT NULL DEFAULT "";')
self.db.execute('ALTER TABLE publishers ADD COLUMN link TEXT NOT NULL DEFAULT "";')
self.db.execute('ALTER TABLE series ADD COLUMN link TEXT NOT NULL DEFAULT "";') alters.append('ALTER TABLE publishers ADD COLUMN link TEXT NOT NULL DEFAULT "";')
self.db.execute('ALTER TABLE tags ADD COLUMN link TEXT NOT NULL DEFAULT "";') alters.append('ALTER TABLE series ADD COLUMN link TEXT NOT NULL DEFAULT "";')
alters.append('ALTER TABLE tags ADD COLUMN link TEXT NOT NULL DEFAULT "";')
# These aren't necessary in that there is no UI to set links, but having them # These aren't necessary in that there is no UI to set links, but having them
# makes the code uniform # makes the code uniform
self.db.execute('ALTER TABLE languages ADD COLUMN link TEXT NOT NULL DEFAULT "";') alters.append('ALTER TABLE languages ADD COLUMN link TEXT NOT NULL DEFAULT "";')
self.db.execute('ALTER TABLE ratings ADD COLUMN link TEXT NOT NULL DEFAULT "";') alters.append('ALTER TABLE ratings ADD COLUMN link TEXT NOT NULL DEFAULT "";')
self.db.execute('\n'.join(alters))

View File

@ -946,6 +946,25 @@ class WritingTest(BaseTest):
self.assertEqual('url', links['tags']['foo'], 'link for tag foo is wrong') self.assertEqual('url', links['tags']['foo'], 'link for tag foo is wrong')
self.assertEqual('url2', links['publisher']['random'], 'link for publisher random is wrong') self.assertEqual('url2', links['publisher']['random'], 'link for publisher random is wrong')
# Check that renaming a tag keeps the link and clears the link map cache for the book
self.assertTrue(1 in cache.link_maps_cache, "book not in link_map_cache")
tag_id = cache.get_item_id('tags', 'foo')
cache.rename_items('tags', {tag_id: 'foobar'})
self.assertTrue(1 not in cache.link_maps_cache, "book still in link_map_cache")
links = cache.get_link_map('tags')
self.assertTrue('foobar' in links, "rename foo lost the link")
self.assertEqual(links['foobar'], 'url', "The link changed contents")
links = cache.get_all_link_maps_for_book(1)
self.assertTrue(1 in cache.link_maps_cache, "book not put back into link_map_cache")
self.assertDictEqual({'publisher': {'random': 'url2'}, 'tags': {'foobar': 'url'}},
links, "book links incorrect after tag rename")
# Check ProxyMetadata
mi = cache.get_proxy_metadata(1)
self.assertDictEqual({'publisher': {'random': 'url2'}, 'tags': {'foobar': 'url'}},
mi.link_maps, "ProxyMetadata didn't return the right link map")
# Now test deleting the links. # Now test deleting the links.
links = cache.get_link_map('tags') links = cache.get_link_map('tags')
to_del = {l:'' for l in links.keys()} to_del = {l:'' for l in links.keys()}