Fix write_lock being help on db when postimport plugins are run

This commit is contained in:
Kovid Goyal 2013-08-05 00:00:34 +05:30
parent d03bfd7f3d
commit 27b36ebefc
2 changed files with 56 additions and 54 deletions

View File

@ -106,26 +106,27 @@ def add_catalog(cache, path, title):
from calibre.utils.date import utcnow from calibre.utils.date import utcnow
fmt = os.path.splitext(path)[1][1:].lower() fmt = os.path.splitext(path)[1][1:].lower()
with lopen(path, 'rb') as stream, cache.write_lock: with lopen(path, 'rb') as stream:
matches = cache._search('title:="%s" and tags:="%s"' % (title.replace('"', '\\"'), _('Catalog')), None) with cache.write_lock:
db_id = None matches = cache._search('title:="%s" and tags:="%s"' % (title.replace('"', '\\"'), _('Catalog')), None)
if matches: db_id = None
db_id = list(matches)[0] if matches:
try: db_id = list(matches)[0]
mi = get_metadata(stream, fmt) try:
mi.authors = ['calibre'] mi = get_metadata(stream, fmt)
except: mi.authors = ['calibre']
mi = Metadata(title, ['calibre']) except:
mi.title, mi.authors = title, ['calibre'] mi = Metadata(title, ['calibre'])
mi.tags = [_('Catalog')] mi.title, mi.authors = title, ['calibre']
mi.pubdate = mi.timestamp = utcnow() mi.tags = [_('Catalog')]
if fmt == 'mobi': mi.pubdate = mi.timestamp = utcnow()
mi.cover, mi.cover_data = None, (None, None) if fmt == 'mobi':
if db_id is None: mi.cover, mi.cover_data = None, (None, None)
db_id = cache._create_book_entry(mi, apply_import_tags=False) if db_id is None:
else: db_id = cache._create_book_entry(mi, apply_import_tags=False)
cache._set_metadata(db_id, mi) else:
cache._add_format(db_id, fmt, stream) cache._set_metadata(db_id, mi)
cache.add_format(db_id, fmt, stream) # Cant keep write lock since post-import hooks might run
return db_id return db_id
@ -156,7 +157,7 @@ def add_news(cache, path, arg):
mi.timestamp = utcnow() mi.timestamp = utcnow()
db_id = cache._create_book_entry(mi, apply_import_tags=False) db_id = cache._create_book_entry(mi, apply_import_tags=False)
cache._add_format(db_id, fmt, stream) cache.add_format(db_id, fmt, stream) # Cant keep write lock since post-import hooks might run
if not hasattr(path, 'read'): if not hasattr(path, 'read'):
stream.close() stream.close()

View File

@ -679,11 +679,10 @@ class Cache(object):
fmtfile = self.format(book_id, original_fmt, as_file=True) fmtfile = self.format(book_id, original_fmt, as_file=True)
if fmtfile is not None: if fmtfile is not None:
fmt = original_fmt.partition('_')[2] fmt = original_fmt.partition('_')[2]
with self.write_lock: with fmtfile:
with fmtfile: self.add_format(book_id, fmt, fmtfile, run_hooks=False)
self._add_format(book_id, fmt, fmtfile, run_hooks=False) self.remove_formats({book_id:(original_fmt,)})
self._remove_formats({book_id:(original_fmt,)}) return True
return True
return False return False
@read_api @read_api
@ -1150,38 +1149,40 @@ class Cache(object):
self._reload_from_db() self._reload_from_db()
raise raise
@write_api @api
def add_format(self, book_id, fmt, stream_or_path, replace=True, run_hooks=True, dbapi=None): def add_format(self, book_id, fmt, stream_or_path, replace=True, run_hooks=True, dbapi=None):
if run_hooks: with self.write_lock:
# Run import plugins if run_hooks:
npath = run_import_plugins(stream_or_path, fmt) # Run import plugins
fmt = os.path.splitext(npath)[-1].lower().replace('.', '').upper() npath = run_import_plugins(stream_or_path, fmt)
stream_or_path = lopen(npath, 'rb') fmt = os.path.splitext(npath)[-1].lower().replace('.', '').upper()
fmt = check_ebook_format(stream_or_path, fmt) stream_or_path = lopen(npath, 'rb')
fmt = check_ebook_format(stream_or_path, fmt)
fmt = (fmt or '').upper() fmt = (fmt or '').upper()
self.format_metadata_cache[book_id].pop(fmt, None) self.format_metadata_cache[book_id].pop(fmt, None)
try: try:
name = self.fields['formats'].format_fname(book_id, fmt) name = self.fields['formats'].format_fname(book_id, fmt)
except: except:
name = None name = None
if name and not replace: if name and not replace:
return False return False
path = self._field_for('path', book_id).replace('/', os.sep) path = self._field_for('path', book_id).replace('/', os.sep)
title = self._field_for('title', book_id, default_value=_('Unknown')) title = self._field_for('title', book_id, default_value=_('Unknown'))
author = self._field_for('authors', book_id, default_value=(_('Unknown'),))[0] author = self._field_for('authors', book_id, default_value=(_('Unknown'),))[0]
stream = stream_or_path if hasattr(stream_or_path, 'read') else lopen(stream_or_path, 'rb') stream = stream_or_path if hasattr(stream_or_path, 'read') else lopen(stream_or_path, 'rb')
size, fname = self.backend.add_format(book_id, fmt, stream, title, author, path) size, fname = self.backend.add_format(book_id, fmt, stream, title, author, path)
del stream del stream
max_size = self.fields['formats'].table.update_fmt(book_id, fmt, fname, size, self.backend) max_size = self.fields['formats'].table.update_fmt(book_id, fmt, fname, size, self.backend)
self.fields['size'].table.update_sizes({book_id: max_size}) self.fields['size'].table.update_sizes({book_id: max_size})
self._update_last_modified((book_id,)) self._update_last_modified((book_id,))
if run_hooks: if run_hooks:
# Run post import plugins # Run post import plugins, the write lock is released so the plugin
# can call api without a locking violation.
run_plugins_on_postimport(dbapi or self, book_id, fmt) run_plugins_on_postimport(dbapi or self, book_id, fmt)
stream_or_path.close() stream_or_path.close()
@ -1305,17 +1306,17 @@ class Cache(object):
return book_id return book_id
@write_api @api
def add_books(self, books, add_duplicates=True, apply_import_tags=True, preserve_uuid=False, run_hooks=True, dbapi=None): def add_books(self, books, add_duplicates=True, apply_import_tags=True, preserve_uuid=False, run_hooks=True, dbapi=None):
duplicates, ids = [], [] duplicates, ids = [], []
for mi, format_map in books: for mi, format_map in books:
book_id = self._create_book_entry(mi, add_duplicates=add_duplicates, apply_import_tags=apply_import_tags, preserve_uuid=preserve_uuid) book_id = self.create_book_entry(mi, add_duplicates=add_duplicates, apply_import_tags=apply_import_tags, preserve_uuid=preserve_uuid)
if book_id is None: if book_id is None:
duplicates.append((mi, format_map)) duplicates.append((mi, format_map))
else: else:
ids.append(book_id) ids.append(book_id)
for fmt, stream_or_path in format_map.iteritems(): for fmt, stream_or_path in format_map.iteritems():
self._add_format(book_id, fmt, stream_or_path, dbapi=dbapi, run_hooks=run_hooks) self.add_format(book_id, fmt, stream_or_path, dbapi=dbapi, run_hooks=run_hooks)
return ids, duplicates return ids, duplicates
@write_api @write_api