A framework for listening to change events from the database

Fixes #1937898 [[Enhancement request] pyqt signlas in calibre source](https://bugs.launchpad.net/calibre/+bug/1937898)
This commit is contained in:
Kovid Goyal 2021-08-10 11:09:38 +05:30
parent c91be6ed11
commit 559d467556
No known key found for this signature in database
GPG Key ID: 06BC317B515ACE7C
2 changed files with 110 additions and 1 deletions

View File

@ -30,6 +30,7 @@ from calibre.db.categories import get_categories
from calibre.db.errors import NoSuchBook, NoSuchFormat from calibre.db.errors import NoSuchBook, NoSuchFormat
from calibre.db.fields import IDENTITY, InvalidLinkTable, create_field from calibre.db.fields import IDENTITY, InvalidLinkTable, create_field
from calibre.db.lazy import FormatMetadata, FormatsList, ProxyMetadata from calibre.db.lazy import FormatMetadata, FormatsList, ProxyMetadata
from calibre.db.listeners import EventDispatcher, EventType
from calibre.db.locking import ( from calibre.db.locking import (
DowngradeLockError, LockingError, SafeReadLock, create_locks, try_lock DowngradeLockError, LockingError, SafeReadLock, create_locks, try_lock
) )
@ -139,6 +140,7 @@ class Cache(object):
def __init__(self, backend): def __init__(self, backend):
self.backend = backend self.backend = backend
self.event_dispatcher = EventDispatcher()
self.fields = {} self.fields = {}
self.composites = {} self.composites = {}
self.read_lock, self.write_lock = create_locks() self.read_lock, self.write_lock = create_locks()
@ -421,6 +423,20 @@ class Cache(object):
# Cache Layer API {{{ # Cache Layer API {{{
@write_api
def add_listener(self, event_callback_function):
'''
Register a callback function that will be called after certain actions are
taken on this database. The function must take two arguments, the first of
which is the event type (:class:`EventType`) and the second is a tuple
containing event type specific data.
'''
self.event_dispatcher.add_listener(event_callback_function)
@write_api
def remove_listener(self, event_callback_function):
self.event_dispatcher.remove_listener(event_callback_function)
@read_api @read_api
def field_for(self, name, book_id, default_value=None): def field_for(self, name, book_id, default_value=None):
''' '''
@ -1231,7 +1247,7 @@ class Cache(object):
self._update_path(dirtied, mark_as_dirtied=False) self._update_path(dirtied, mark_as_dirtied=False)
self._mark_as_dirty(dirtied) self._mark_as_dirty(dirtied)
self.event_dispatcher(EventType.metadata_changed, name, dirtied)
return dirtied return dirtied
@write_api @write_api
@ -1568,6 +1584,7 @@ class Cache(object):
max_size = self.fields['formats'].table.update_fmt(book_id, fmt, fname, size, self.backend) max_size = self.fields['formats'].table.update_fmt(book_id, fmt, fname, size, self.backend)
self.fields['size'].table.update_sizes({book_id: max_size}) self.fields['size'].table.update_sizes({book_id: max_size})
self._update_last_modified((book_id,)) self._update_last_modified((book_id,))
self.event_dispatcher(EventType.format_added, book_id, fmt)
if run_hooks: if run_hooks:
# Run post import plugins, the write lock is released so the plugin # Run post import plugins, the write lock is released so the plugin
@ -1612,6 +1629,7 @@ class Cache(object):
size_map = table.remove_formats(formats_map, self.backend) size_map = table.remove_formats(formats_map, self.backend)
self.fields['size'].table.update_sizes(size_map) self.fields['size'].table.update_sizes(size_map)
self._update_last_modified(tuple(formats_map)) self._update_last_modified(tuple(formats_map))
self.event_dispatcher(EventType.formats_removed, formats_map)
@read_api @read_api
def get_next_series_num_for(self, series, field='series', current_indices=False): def get_next_series_num_for(self, series, field='series', current_indices=False):
@ -1713,6 +1731,7 @@ class Cache(object):
self.backend.execute('INSERT INTO books(id, title, series_index, author_sort) VALUES (?, ?, ?, ?)', self.backend.execute('INSERT INTO books(id, title, series_index, author_sort) VALUES (?, ?, ?, ?)',
(force_id, mi.title, series_index, aus)) (force_id, mi.title, series_index, aus))
book_id = self.backend.last_insert_rowid() book_id = self.backend.last_insert_rowid()
self.event_dispatcher(EventType.book_created, book_id)
mi.timestamp = utcnow() if mi.timestamp is None else mi.timestamp mi.timestamp = utcnow() if mi.timestamp is None else mi.timestamp
mi.pubdate = UNDEFINED_DATE if mi.pubdate is None else mi.pubdate mi.pubdate = UNDEFINED_DATE if mi.pubdate is None else mi.pubdate
@ -1787,6 +1806,7 @@ class Cache(object):
self._clear_caches(book_ids=book_ids, template_cache=False, search_cache=False) self._clear_caches(book_ids=book_ids, template_cache=False, search_cache=False)
for cc in self.cover_caches: for cc in self.cover_caches:
cc.invalidate(book_ids) cc.invalidate(book_ids)
self.event_dispatcher(EventType.books_removed, book_ids)
@read_api @read_api
def author_sort_strings_for_books(self, book_ids): def author_sort_strings_for_books(self, book_ids):
@ -1866,6 +1886,7 @@ class Cache(object):
ab, idm = self._rename_items(field, default_process_map, change_index=change_index) ab, idm = self._rename_items(field, default_process_map, change_index=change_index)
affected_books.update(ab) affected_books.update(ab)
id_map.update(idm) id_map.update(idm)
self.event_dispatcher(EventType.items_renamed, field, affected_books, id_map)
return affected_books, id_map return affected_books, id_map
try: try:
@ -1895,6 +1916,7 @@ class Cache(object):
for book_id in moved_books: for book_id in moved_books:
self._set_field(f.index_field.name, {book_id:self._get_next_series_num_for(self._fast_field_for(f, book_id), field=field)}) self._set_field(f.index_field.name, {book_id:self._get_next_series_num_for(self._fast_field_for(f, book_id), field=field)})
self._mark_as_dirty(affected_books) self._mark_as_dirty(affected_books)
self.event_dispatcher(EventType.items_renamed, field, affected_books, id_map)
return affected_books, id_map return affected_books, id_map
@write_api @write_api
@ -1913,6 +1935,7 @@ class Cache(object):
self._set_field(field.index_field.name, {bid:1.0 for bid in affected_books}) self._set_field(field.index_field.name, {bid:1.0 for bid in affected_books})
else: else:
self._mark_as_dirty(affected_books) self._mark_as_dirty(affected_books)
self.event_dispatcher(EventType.items_removed, field, affected_books, item_ids)
return affected_books return affected_books
@write_api @write_api
@ -2216,6 +2239,7 @@ class Cache(object):
@write_api @write_api
def close(self): def close(self):
self.event_dispatcher.close()
from calibre.customize.ui import available_library_closed_plugins from calibre.customize.ui import available_library_closed_plugins
for plugin in available_library_closed_plugins(): for plugin in available_library_closed_plugins():
try: try:

View File

@ -0,0 +1,85 @@
#!/usr/bin/env python
# vim:fileencoding=utf-8
# License: GPL v3 Copyright: 2021, Kovid Goyal <kovid at kovidgoyal.net>
import weakref
from contextlib import suppress
from queue import Queue
from threading import Thread
from enum import Enum, auto
class EventType(Enum):
#: When some metadata is changed for some books, with
#: arguments: (name of changed field, set of affected book ids)
metadata_changed = auto()
#: When a format is added to a book, with arguments:
#: (book_id, format)
format_added = auto()
#: When formats are removed from a book, with arguments:
#: (mapping of book id to set of formats removed from the book)
formats_removed = auto()
#: When a new book record is created in the database, with the
#: book id as the only argument
book_created = auto()
#: When books are removed from the database with the list of book
#: ids as the only argument
books_removed = auto()
#: When items such as tags or authors are renamed in some or all books.
#: Arguments: (field_name, affected book ids, map of old item id to new item id)
items_renamed = auto()
#: When items such as tags or authors are removed from some books.
#: Arguments: (field_name, affected book ids, ids of removed items)
items_removed = auto()
class EventDispatcher:
def __init__(self):
Thread.__init__(self, name='DBListener', daemon=True)
self.refs = []
self.queue = Queue()
self.activated = False
def add_listener(self, callback):
# note that we intentionally leak dead weakrefs. To not do so would
# require using a lock to serialize access to self.refs. Given that
# currently the use case for listeners is register one and leave it
# forever, this is a worthwhile tradeoff
self.remove_listener(callback)
ref = weakref.ref(callback)
self.refs.append(ref)
if not self.activated:
self.activated = True
self.start()
def remove_listener(self, callback):
ref = weakref.ref(callback)
with suppress(ValueError):
self.refs.remove(ref)
def __call__(self, event_name, *args):
if self.activated:
self.queue.put((event_name, args))
def close(self):
if self.activated:
self.queue.put(None)
self.join()
self.refs = []
def run(self):
while True:
val = self.queue.get()
if val is None:
break
for ref in self.refs:
listener = ref()
if listener is not None:
listener(*val)