Replace old add code

This commit is contained in:
Kovid Goyal 2014-11-12 16:37:47 +05:30
parent ba2daf3e02
commit b1fb8596b2
3 changed files with 409 additions and 905 deletions

View File

@ -161,7 +161,7 @@ class AddAction(InterfaceAction):
self.do_add_recursive(root, single)
def do_add_recursive(self, root, single, list_of_archives=False):
from calibre.gui2.add2 import Adder
from calibre.gui2.add import Adder
Adder(root, single_book_per_directory=single, db=self.gui.current_db, list_of_archives=list_of_archives,
callback=self._files_added, parent=self.gui, pool=self.gui.spare_pool())
@ -365,7 +365,7 @@ class AddAction(InterfaceAction):
'cardb' if self.gui.stack.currentIndex() == 3 else None
if not paths:
return
from calibre.gui2.add2 import Adder
from calibre.gui2.add import Adder
Adder(paths, db=None if to_device else self.gui.current_db,
parent=self.gui, callback=partial(self._files_added, on_card=on_card), pool=self.gui.spare_pool())
@ -485,6 +485,6 @@ class AddAction(InterfaceAction):
show=True)
if ok_paths:
from calibre.gui2.add2 import Adder
from calibre.gui2.add import Adder
callback = partial(self._add_from_device_adder, on_card=None, model=view.model())
Adder(ok_paths, db=None, parent=self.gui, callback=callback, pool=self.gui.spare_pool())

View File

@ -1,496 +1,437 @@
'''
UI for adding books to the database and saving books to disk
'''
import os, shutil, time
from Queue import Queue, Empty
from functools import partial
#!/usr/bin/env python
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
from PyQt5.Qt import QThread, QObject, Qt, QProgressDialog, pyqtSignal, QTimer
__license__ = 'GPL v3'
__copyright__ = '2014, Kovid Goyal <kovid at kovidgoyal.net>'
import shutil, os, weakref, traceback, tempfile, time
from threading import Thread
from collections import OrderedDict
from Queue import Empty
from io import BytesIO
from PyQt5.Qt import QObject, Qt, pyqtSignal
from calibre import prints
from calibre.constants import DEBUG
from calibre.customize.ui import run_plugins_on_postimport
from calibre.ptempfile import PersistentTemporaryDirectory
from calibre.gui2.dialogs.progress import ProgressDialog
from calibre.gui2 import (error_dialog, info_dialog, gprefs,
warning_dialog, available_width)
from calibre.db.adding import find_books_in_directory
from calibre.db.utils import find_identical_books
from calibre.ebooks.metadata.book.base import Metadata
from calibre.ebooks.metadata.opf2 import OPF
from calibre.ebooks.metadata import MetaInformation
from calibre.constants import preferred_encoding, filesystem_encoding, DEBUG
from calibre.gui2 import error_dialog, warning_dialog, gprefs
from calibre.gui2.dialogs.duplicates import DuplicatesQuestion
from calibre.gui2.dialogs.progress import ProgressDialog
from calibre.ptempfile import PersistentTemporaryDirectory
from calibre.utils import join_with_timeout
from calibre.utils.config import prefs
from calibre import prints, force_unicode, as_unicode
from calibre.utils.ipc.pool import Pool, Failure
single_shot = partial(QTimer.singleShot, 75)
def validate_source(source, parent=None): # {{{
if isinstance(source, basestring):
if not os.path.exists(source):
error_dialog(parent, _('Cannot add books'), _(
'The path %s does not exist') % source, show=True)
return False
if not os.access(source, os.X_OK|os.R_OK):
error_dialog(parent, _('Cannot add books'), _(
'You do not have permission to read %s') % source, show=True)
return False
else:
ok = False
for path in source:
if os.access(path, os.R_OK):
ok = True
break
if not ok:
error_dialog(parent, _('Cannot add books'), _(
'You do not have permission to read any of the selected files'),
det_msg='\n'.join(source), show=True)
return False
return True
# }}}
class DuplicatesAdder(QObject): # {{{
class Adder(QObject):
added = pyqtSignal(object)
adding_done = pyqtSignal()
do_one_signal = pyqtSignal()
def __init__(self, parent, db, duplicates, db_adder):
QObject.__init__(self, parent)
self.db, self.db_adder = db, db_adder
self.duplicates = list(duplicates)
self.count = 0
single_shot(self.add_one)
def add_one(self):
if not self.duplicates:
self.adding_done.emit()
def __init__(self, source, single_book_per_directory=True, db=None, parent=None, callback=None, pool=None, list_of_archives=False):
if not validate_source(source, parent):
return
mi, cover, formats = self.duplicates.pop()
formats = [f for f in formats if not f.lower().endswith('.opf')]
id = self.db.create_book_entry(mi, cover=cover,
add_duplicates=True)
# here we add all the formats for dupe book record created above
self.db_adder.add_formats(id, formats)
self.db_adder.number_of_books_added += 1
self.db_adder.auto_convert_books.add(id)
self.count += 1
self.added.emit(self.count)
single_shot(self.add_one)
# }}}
class RecursiveFind(QThread): # {{{
update = pyqtSignal(object)
found = pyqtSignal(object)
def __init__(self, parent, db, root, single, tdir=None):
QThread.__init__(self, parent)
self.db = db
self.path = root
self.tdir = tdir
self.single_book_per_directory = single
self.canceled = False
def walk(self, root):
self.books = []
for dirpath in os.walk(root):
if self.canceled:
return
self.update.emit(
_('Searching in')+' '+force_unicode(dirpath[0],
filesystem_encoding))
self.books += list(self.db.find_books_in_directory(dirpath[0],
self.single_book_per_directory))
def extract(self):
if self.path.lower().endswith('.zip'):
from calibre.utils.zipfile import ZipFile
try:
with ZipFile(self.path) as zf:
zf.extractall(self.tdir)
except Exception:
prints('Corrupt ZIP file, trying to use local headers')
from calibre.utils.localunzip import extractall
extractall(self.path, self.tdir)
elif self.path.lower().endswith('.rar'):
from calibre.utils.unrar import extract
extract(self.path, self.tdir)
else:
raise ValueError('Can only process ZIP or RAR archives')
def run(self):
if self.tdir is not None:
try:
self.extract()
except Exception as err:
import traceback
traceback.print_exc()
msg = as_unicode(err)
self.found.emit(msg)
return
self.path = self.tdir
root = os.path.abspath(self.path)
try:
self.walk(root)
except:
try:
if isinstance(root, unicode):
root = root.encode(filesystem_encoding)
self.walk(root)
except Exception as err:
import traceback
traceback.print_exc()
msg = as_unicode(err)
self.found.emit(msg)
return
self.books = [formats for formats in self.books if formats]
if not self.canceled:
self.found.emit(self.books)
# }}}
class DBAdder(QObject): # {{{
def __init__(self, parent, db, ids, nmap):
QObject.__init__(self, parent)
self.db, self.ids, self.nmap = db, dict(**ids), dict(**nmap)
self.critical = {}
self.number_of_books_added = 0
self.single_book_per_directory = single_book_per_directory
self.list_of_archives = list_of_archives
self.callback = callback
self.add_formats_to_existing = prefs['add_formats_to_existing']
self.do_one_signal.connect(self.tick, type=Qt.QueuedConnection)
self.tdir = PersistentTemporaryDirectory('_add_books')
self.pool = pool
self.pd = ProgressDialog(_('Adding books...'), _('Scanning for files...'), min=0, max=0, parent=parent, icon='add_book.png')
self.db = getattr(db, 'new_api', None)
if self.db is not None:
self.dbref = weakref.ref(db)
self.source = source
self.tdir = PersistentTemporaryDirectory('_add_books')
self.scan_error = None
self.file_groups = OrderedDict()
self.abort_scan = False
self.duplicates = []
self.names, self.paths, self.infos = [], [], []
self.input_queue = Queue()
self.output_queue = Queue()
self.merged_books = set([])
self.auto_convert_books = set()
self.report = []
self.items = []
self.added_book_ids = set()
self.merged_books = set()
self.added_duplicate_info = set()
self.pd.show()
def end(self):
if (gprefs['manual_add_auto_convert'] and
self.auto_convert_books):
from calibre.gui2.ui import get_gui
gui = get_gui()
gui.iactions['Convert Books'].auto_convert_auto_add(
self.auto_convert_books)
self.scan_thread = Thread(target=self.scan, name='ScanBooks')
self.scan_thread.daemon = True
self.scan_thread.start()
self.do_one = self.monitor_scan
self.do_one_signal.emit()
if DEBUG:
self.start_time = time.time()
self.input_queue.put((None, None, None))
def break_cycles(self):
self.abort_scan = True
self.pd.close()
self.pd.deleteLater()
shutil.rmtree(self.tdir, ignore_errors=True)
if self.pool is not None:
self.pool.shutdown()
if not self.items:
shutil.rmtree(self.tdir, ignore_errors=True)
self.setParent(None)
self.find_identical_books_data = self.merged_books = self.added_duplicate_info = self.pool = self.items = self.duplicates = self.pd = self.db = self.dbref = self.tdir = self.file_groups = self.scan_thread = None # noqa
self.deleteLater()
def tick(self):
if self.pd.canceled:
try:
if callable(self.callback):
self.callback(self)
finally:
self.break_cycles()
return
self.do_one()
# Filesystem scan {{{
def scan(self):
def find_files(root):
for dirpath, dirnames, filenames in os.walk(root):
for files in find_books_in_directory(dirpath, self.single_book_per_directory):
if self.abort_scan:
return
if files:
self.file_groups[len(self.file_groups)] = files
def extract(source):
tdir = tempfile.mkdtemp(suffix='_archive', dir=self.tdir)
if source.lower().endswith('.zip'):
from calibre.utils.zipfile import ZipFile
try:
with ZipFile(source) as zf:
zf.extractall(tdir)
except Exception:
prints('Corrupt ZIP file, trying to use local headers')
from calibre.utils.localunzip import extractall
extractall(source, tdir)
elif source.lower().endswith('.rar'):
from calibre.utils.unrar import extract
extract(source, tdir)
return tdir
def start(self):
try:
id, opf, cover = self.input_queue.get_nowait()
if isinstance(self.source, basestring):
find_files(self.source)
else:
unreadable_files = []
for path in self.source:
if self.abort_scan:
return
if os.access(path, os.R_OK):
if self.list_of_archives:
find_files(extract(path))
else:
self.file_groups[len(self.file_groups)] = [path]
else:
unreadable_files.append(path)
if unreadable_files:
if not self.file_groups:
self.scan_error = _('You do not have permission to read the selected file(s).') + '\n'
self.scan_error += '\n'.join(unreadable_files)
else:
a = self.report.append
for f in unreadable_files:
a(_('Could not add %s as you do not have permission to read the file' % f))
a('')
except Exception:
self.scan_error = traceback.format_exc()
def monitor_scan(self):
self.scan_thread.join(0.05)
if self.scan_thread.is_alive():
self.do_one_signal.emit()
return
if self.scan_error is not None:
error_dialog(self.pd, _('Cannot add books'), _(
'Failed to add any books, click "Show details" for more information.'),
det_msg=self.scan_error, show=True)
self.break_cycles()
return
if not self.file_groups:
error_dialog(self.pd, _('Could not add'), _(
'No ebook files were found in %s') % self.source, show=True)
self.break_cycles()
return
self.pd.max = len(self.file_groups)
self.pd.title = _('Reading metadata and adding to library (%d books)...') % self.pd.max
self.pd.msg = ''
self.pd.value = 0
self.pool = Pool(name='AddBooks') if self.pool is None else self.pool
if self.db is not None:
if self.add_formats_to_existing:
self.find_identical_books_data = self.db.data_for_find_identical_books()
else:
try:
self.pool.set_common_data(self.db.data_for_has_book())
except Failure as err:
error_dialog(self.pd, _('Cannot add books'), _(
'Failed to add any books, click "Show details" for more information.'),
det_msg=unicode(err.failure_message) + '\n' + unicode(err.details), show=True)
self.pd.canceled = True
self.groups_to_add = iter(self.file_groups)
self.do_one = self.do_one_group
self.do_one_signal.emit()
# }}}
def do_one_group(self):
try:
group_id = next(self.groups_to_add)
except StopIteration:
self.do_one = self.monitor_pool
self.do_one_signal.emit()
return
try:
self.pool(group_id, 'calibre.ebooks.metadata.worker', 'read_metadata',
self.file_groups[group_id], group_id, self.tdir)
except Failure as err:
error_dialog(self.pd, _('Cannot add books'), _(
'Failed to add any books, click "Show details" for more information.'),
det_msg=unicode(err.failure_message) + '\n' + unicode(err.details), show=True)
self.pd.canceled = True
self.do_one_signal.emit()
def monitor_pool(self):
try:
worker_result = self.pool.results.get(True, 0.05)
self.pool.results.task_done()
except Empty:
single_shot(self.start)
try:
self.pool.wait_for_tasks(timeout=0.01)
except RuntimeError:
pass # Tasks still remaining
except Failure as err:
error_dialog(self.pd, _('Cannot add books'), _(
'Failed to add some books, click "Show details" for more information.'),
det_msg=unicode(err.failure_message) + '\n' + unicode(err.details), show=True)
self.pd.canceled = True
else:
# All tasks completed
try:
join_with_timeout(self.pool.results, 0.01)
except RuntimeError:
pass # There are results remaining
else:
# No results left
self.process_duplicates()
return
else:
group_id = worker_result.id
if worker_result.is_terminal_failure:
error_dialog(self.pd, _('Critical failure'), _(
'The read metadata worker process crashed while processing'
' some files. Adding of books is aborted. Click "Show details"'
' to see which files caused the problem.'), show=True,
det_msg='\n'.join(self.file_groups[group_id]))
self.pd.canceled = True
else:
try:
self.process_result(group_id, worker_result.result)
except Exception:
self.report_metadata_failure(group_id, traceback.format_exc())
self.pd.value += 1
self.do_one_signal.emit()
def report_metadata_failure(self, group_id, details):
a = self.report.append
paths = self.file_groups[group_id]
a(''), a('-' * 70)
a(_('Failed to read metadata from the file(s):'))
[a('\t' + f) for f in paths]
a(_('With error:')), a(details)
mi = Metadata(_('Unknown'))
mi.read_metadata_failed = False
return mi
def process_result(self, group_id, result):
if result.err:
mi = self.report_metadata_failure(group_id, result.traceback)
paths = self.file_groups[group_id]
has_cover = False
duplicate_info = set() if self.add_formats_to_existing else False
else:
paths, opf, has_cover, duplicate_info = result.value
try:
mi = OPF(BytesIO(opf), basedir=self.tdir, populate_spine=False, try_to_guess_cover=False).to_book_metadata()
mi.read_metadata_failed = False
except Exception:
mi = self.report_metadata_failure(group_id, traceback.format_exc())
if mi.is_null('title'):
for path in paths:
mi.title = os.path.splitext(os.path.basename(path))[0]
break
if mi.application_id == '__calibre_dummy__':
mi.application_id = None
self.pd.msg = mi.title
cover_path = os.path.join(self.tdir, '%s.cdata' % group_id) if has_cover else None
if self.db is None:
if paths:
self.items.append((mi, cover_path, paths))
return
if id is None and opf is None and cover is None:
return
name = self.nmap.pop(id)
title = None
if self.add_formats_to_existing:
identical_book_ids = find_identical_books(mi, self.find_identical_books_data)
if identical_book_ids:
try:
self.merge_books(mi, cover_path, paths, identical_book_ids)
except Exception:
a = self.report.append
a(''), a('-' * 70)
a(_('Failed to merge the book: ') + mi.title)
[a('\t' + f) for f in paths]
a(_('With error:')), a(traceback.format_exc())
else:
self.add_book(mi, cover_path, paths)
else:
if duplicate_info or icu_lower(mi.title or _('Unknown')) in self.added_duplicate_info:
self.duplicates.append((mi, cover_path, paths))
else:
self.add_book(mi, cover_path, paths)
def merge_books(self, mi, cover_path, paths, identical_book_ids):
self.merged_books.add((mi.title, ' & '.join(mi.authors)))
seen_fmts = set()
replace = gprefs['automerge'] == 'overwrite'
for identical_book_id in identical_book_ids:
ib_fmts = {fmt.upper() for fmt in self.db.formats(identical_book_id)}
seen_fmts |= ib_fmts
self.add_formats(identical_book_id, paths, mi, replace=replace)
if gprefs['automerge'] == 'new record':
incoming_fmts = {path.rpartition(os.extsep)[-1].upper() for path in paths}
if incoming_fmts.intersection(seen_fmts):
# There was at least one duplicate format so create a new
# record and put the incoming formats into it We should
# arguably put only the duplicate formats, but no real harm is
# done by having all formats
self.add_book(mi, cover_path, paths)
def add_book(self, mi, cover_path, paths):
if DEBUG:
st = time.time()
try:
title = self.add(id, opf, cover, name)
except:
import traceback
self.critical[name] = traceback.format_exc()
title = name
self.output_queue.put(title)
if DEBUG:
prints('Added', title, 'to db in:', time.time() - st, 'seconds')
single_shot(self.start)
def process_formats(self, opf, formats):
imp = opf[:-4]+'.import'
if not os.access(imp, os.R_OK):
return formats
fmt_map = {}
for line in open(imp, 'rb').readlines():
if ':' not in line:
continue
f, _, p = line.partition(':')
fmt_map[f] = p.rstrip()
fmts = []
for fmt in formats:
e = os.path.splitext(fmt)[1].replace('.', '').lower()
fmts.append(fmt_map.get(e, fmt))
if not os.access(fmts[-1], os.R_OK):
fmts[-1] = fmt
return fmts
def add(self, id, opf, cover, name):
formats = self.ids.pop(id)
if opf.endswith('.error'):
mi = MetaInformation('', [_('Unknown')])
self.critical[name] = open(opf, 'rb').read().decode('utf-8', 'replace')
else:
try:
mi = OPF(opf, try_to_guess_cover=False, basedir=os.path.dirname(opf)).to_book_metadata()
except:
import traceback
mi = MetaInformation('', [_('Unknown')])
self.critical[name] = traceback.format_exc()
formats = self.process_formats(opf, formats)
if not mi.title:
mi.title = os.path.splitext(name)[0]
mi.title = mi.title if isinstance(mi.title, unicode) else \
mi.title.decode(preferred_encoding, 'replace')
if mi.application_id == '__calibre_dummy__':
mi.application_id = None
if self.db is not None:
if cover:
with open(cover, 'rb') as f:
cover = f.read()
orig_formats = formats
formats = [f2 for f2 in formats if not f2.lower().endswith('.opf')]
if prefs['add_formats_to_existing']: # automerge is on
identical_book_list = self.db.find_identical_books(mi)
if identical_book_list: # books with same author and nearly same title exist in db
self.merged_books.add((mi.title, ' & '.join(mi.authors)))
seen_fmts = set([])
for identical_book in identical_book_list:
ib_fmts = self.db.formats(identical_book, index_is_id=True)
if ib_fmts:
seen_fmts |= set(ib_fmts.split(','))
replace = gprefs['automerge'] == 'overwrite'
self.add_formats(identical_book, formats,
replace=replace)
if gprefs['automerge'] == 'new record':
incoming_fmts = \
set([os.path.splitext(path)[-1].replace('.',
'').upper() for path in formats])
if incoming_fmts.intersection(seen_fmts):
# There was at least one duplicate format
# so create a new record and put the
# incoming formats into it
# We should arguably put only the duplicate
# formats, but no real harm is done by having
# all formats
id_ = self.db.create_book_entry(mi, cover=cover,
add_duplicates=True)
self.number_of_books_added += 1
self.add_formats(id_, formats)
else:
# books with same author and nearly same title do not exist in db
id_ = self.db.create_book_entry(mi, cover=cover, add_duplicates=True)
self.number_of_books_added += 1
self.add_formats(id_, formats)
else: # automerge is off
id_ = self.db.create_book_entry(mi, cover=cover, add_duplicates=False)
if id_ is None:
self.duplicates.append((mi, cover, orig_formats))
else:
self.add_formats(id_, formats)
self.auto_convert_books.add(id_)
self.number_of_books_added += 1
else:
self.names.append(name)
self.paths.append(formats[0])
self.infos.append(mi)
return mi.title
def add_formats(self, id, formats, replace=True):
for path in formats:
fmt = os.path.splitext(path)[-1].replace('.', '').upper()
with open(path, 'rb') as f:
# At this point, the filetype on import plugins have already
# been run by the metadata reading code, so we only need to run
# the postimport plugins, on a successful add.
if self.db.add_format(id, fmt, f, index_is_id=True, notify=False, replace=replace):
run_plugins_on_postimport(self.db, id, fmt)
# }}}
class Adder(QObject): # {{{
ADD_TIMEOUT = 900 # seconds (15 minutes)
def __init__(self, parent, db, callback, spare_server=None):
QObject.__init__(self, parent)
self.pd = ProgressDialog(_('Adding...'), parent=parent, icon='add_book.png')
self.pd.setMaximumWidth(min(600, int(available_width()*0.75)))
self.spare_server = spare_server
self.db = db
self.pd.setModal(True)
self.pd.show()
self._parent = parent
self.rfind = self.worker = None
self.callback = callback
self.callback_called = False
self.pd.canceled_signal.connect(self.canceled)
def add_recursive(self, root, single=True):
if os.path.exists(root) and os.path.isfile(root) and root.lower().rpartition('.')[-1] in {'zip', 'rar'}:
self.path = tdir = PersistentTemporaryDirectory('_arcv_')
else:
self.path = root
tdir = None
self.pd.set_msg(_('Searching in all sub-directories...'))
self.pd.set_min(0)
self.pd.set_max(0)
self.pd.value = 0
self.rfind = RecursiveFind(self, self.db, root, single, tdir=tdir)
self.rfind.update.connect(self.pd.set_msg, type=Qt.QueuedConnection)
self.rfind.found.connect(self.add, type=Qt.QueuedConnection)
self.rfind.start()
def add(self, books):
if isinstance(books, basestring):
error_dialog(self.pd, _('Path error'),
_('The specified directory could not be processed.'),
det_msg=books, show=True)
return self.canceled()
if not books:
info_dialog(self.pd, _('No books'),
_('No books found'), show=True)
return self.canceled()
books = [[b] if isinstance(b, basestring) else b for b in books]
restricted = set()
for i in xrange(len(books)):
files = books[i]
restrictedi = set(f for f in files if not os.access(f, os.R_OK))
if restrictedi:
files = [f for f in files if os.access(f, os.R_OK)]
books[i] = files
restricted |= restrictedi
if restrictedi:
det_msg = u'\n'.join(restrictedi)
warning_dialog(self.pd, _('No permission'),
_('Cannot add some files as you do not have '
' permission to access them. Click Show'
' Details to see the list of such files.'),
det_msg=det_msg, show=True)
books = list(filter(None, books))
if not books:
return self.canceled()
self.rfind = None
from calibre.ebooks.metadata.worker import read_metadata
self.rq = Queue()
tasks = []
self.ids = {}
self.nmap = {}
self.duplicates = []
for i, b in enumerate(books):
tasks.append((i, b))
self.ids[i] = b
self.nmap[i] = os.path.basename(b[0])
self.worker = read_metadata(tasks, self.rq,
spare_server=self.spare_server)
self.pd.set_min(0)
self.pd.set_max(len(self.ids))
self.pd.value = 0
self.db_adder = DBAdder(self, self.db, self.ids, self.nmap)
self.db_adder.start()
self.last_added_at = time.time()
self.entry_count = len(self.ids)
self.continue_updating = True
single_shot(self.update)
def canceled(self):
self.continue_updating = False
if self.rfind is not None:
self.rfind.canceled = True
if self.worker is not None:
self.worker.canceled = True
if hasattr(self, 'db_adder'):
self.db_adder.end()
self.pd.hide()
if not self.callback_called:
self.callback(self.paths, self.names, self.infos)
self.callback_called = True
def duplicates_processed(self):
self.db_adder.end()
if not self.callback_called:
self.callback(self.paths, self.names, self.infos)
self.callback_called = True
if hasattr(self, '__p_d'):
self.__p_d.hide()
def update(self):
if self.entry_count <= 0:
self.continue_updating = False
self.pd.hide()
self.process_duplicates()
cdata = None
if cover_path:
with open(cover_path, 'rb') as f:
cdata = f.read()
book_id = self.dbref().create_book_entry(mi, cover=cdata)
self.added_book_ids.add(book_id)
except Exception:
a = self.report.append
a(''), a('-' * 70)
a(_('Failed to add the book: ') + mi.title)
[a('\t' + f) for f in paths]
a(_('With error:')), a(traceback.format_exc())
return
self.add_formats(book_id, paths, mi)
try:
id, opf, cover = self.rq.get_nowait()
self.db_adder.input_queue.put((id, opf, cover))
self.last_added_at = time.time()
except Empty:
pass
if self.add_formats_to_existing:
self.db.update_data_for_find_identical_books(book_id, self.find_identical_books_data)
else:
self.added_duplicate_info.add(icu_lower(mi.title or _('Unknown')))
except Exception:
# Ignore this exception since all it means is that duplicate
# detection/automerge will fail for this book.
traceback.print_exc()
if DEBUG:
prints('Added', mi.title, 'to db in: %.1f' % time.time() - st)
try:
title = self.db_adder.output_queue.get_nowait()
self.pd.value += 1
self.pd.set_msg(_('Added')+' '+title)
self.last_added_at = time.time()
self.entry_count -= 1
except Empty:
pass
if (time.time() - self.last_added_at) > self.ADD_TIMEOUT:
self.continue_updating = False
self.pd.hide()
self.db_adder.end()
if not self.callback_called:
self.callback([], [], [])
self.callback_called = True
error_dialog(self._parent, _('Adding failed'),
_('The add books process seems to have hung.'
' Try restarting calibre and adding the '
'books in smaller increments, until you '
'find the problem book.'), show=True)
if self.continue_updating:
single_shot(self.update)
def add_formats(self, book_id, paths, mi, replace=True):
fmap = {p.rpartition(os.path.extsep)[-1].lower():p for p in paths}
for fmt, path in fmap.iteritems():
# The onimport plugins have already been run by the read metadata
# worker
try:
if self.db.add_format(book_id, fmt, path, run_hooks=False, replace=replace):
run_plugins_on_postimport(self.dbref(), book_id, fmt)
except Exception:
a = self.report.append
a(''), a('-' * 70)
a(_('Failed to add the file {0} to the book: {1}').format(path, mi.title))
a(_('With error:')), a(traceback.format_exc())
def process_duplicates(self):
duplicates = self.db_adder.duplicates
if not duplicates:
return self.duplicates_processed()
self.pd.hide()
from calibre.gui2.dialogs.duplicates import DuplicatesQuestion
self.__d_q = d = DuplicatesQuestion(self.db, duplicates, self._parent)
duplicates = tuple(d.duplicates)
if duplicates:
pd = QProgressDialog(_('Adding duplicates...'), '', 0, len(duplicates),
self._parent)
pd.setCancelButton(None)
pd.setValue(0)
pd.show()
self.__p_d = pd
self.__d_a = DuplicatesAdder(self._parent, self.db, duplicates,
self.db_adder)
self.__d_a.added.connect(pd.setValue)
self.__d_a.adding_done.connect(self.duplicates_processed)
else:
return self.duplicates_processed()
if self.duplicates:
d = DuplicatesQuestion(self.dbref(), self.duplicates, self.pd)
duplicates = tuple(d.duplicates)
d.deleteLater()
if duplicates:
self.do_one = self.process_duplicate
self.duplicates_to_process = iter(duplicates)
self.do_one_signal.emit()
return
self.finish()
def cleanup(self):
if hasattr(self, 'pd'):
self.pd.hide()
if hasattr(self, 'worker') and hasattr(self.worker, 'tdir') and \
self.worker.tdir is not None:
if os.path.exists(self.worker.tdir):
try:
shutil.rmtree(self.worker.tdir)
except:
pass
self._parent = None
self.pd.setParent(None)
del self.pd
self.pd = None
if hasattr(self, 'db_adder'):
self.db_adder.setParent(None)
del self.db_adder
self.db_adder = None
def process_duplicate(self):
try:
mi, cover_path, paths = next(self.duplicates_to_process)
except StopIteration:
self.finish()
return
self.add_book(mi, cover_path, paths)
self.do_one_signal.emit()
def finish(self):
if DEBUG:
prints('Added %s books in %.1f seconds' % (len(self.added_book_ids or self.items), time.time() - self.start_time))
if self.report:
added_some = self.items or self.added_book_ids
d = warning_dialog if added_some else error_dialog
msg = _('There were problems adding some files, click "Show details" for more information') if added_some else _(
'Failed to add any books, click "Show details" for more information')
d(self.pd, _('Errors while adding'), msg, det_msg='\n'.join(self.report), show=True)
if gprefs['manual_add_auto_convert'] and self.added_book_ids and self.parent() is not None:
self.parent().iactions['Convert Books'].auto_convert_auto_add(
self.added_book_ids)
try:
if callable(self.callback):
self.callback(self)
finally:
self.break_cycles()
@property
def number_of_books_added(self):
return getattr(getattr(self, 'db_adder', None), 'number_of_books_added',
0)
@property
def merged_books(self):
return getattr(getattr(self, 'db_adder', None), 'merged_books',
set([]))
@property
def critical(self):
return getattr(getattr(self, 'db_adder', None), 'critical',
{})
@property
def paths(self):
return getattr(getattr(self, 'db_adder', None), 'paths',
[])
@property
def names(self):
return getattr(getattr(self, 'db_adder', None), 'names',
[])
@property
def infos(self):
return getattr(getattr(self, 'db_adder', None), 'infos',
[])
# }}}
return len(self.added_book_ids)
# TODO: Test direct add of books to device
# TODO: Test adding form device to library

View File

@ -1,437 +0,0 @@
#!/usr/bin/env python
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2014, Kovid Goyal <kovid at kovidgoyal.net>'
import shutil, os, weakref, traceback, tempfile, time
from threading import Thread
from collections import OrderedDict
from Queue import Empty
from io import BytesIO
from PyQt5.Qt import QObject, Qt, pyqtSignal
from calibre import prints
from calibre.constants import DEBUG
from calibre.customize.ui import run_plugins_on_postimport
from calibre.db.adding import find_books_in_directory
from calibre.db.utils import find_identical_books
from calibre.ebooks.metadata.book.base import Metadata
from calibre.ebooks.metadata.opf2 import OPF
from calibre.gui2 import error_dialog, warning_dialog, gprefs
from calibre.gui2.dialogs.duplicates import DuplicatesQuestion
from calibre.gui2.dialogs.progress import ProgressDialog
from calibre.ptempfile import PersistentTemporaryDirectory
from calibre.utils import join_with_timeout
from calibre.utils.config import prefs
from calibre.utils.ipc.pool import Pool, Failure
def validate_source(source, parent=None): # {{{
if isinstance(source, basestring):
if not os.path.exists(source):
error_dialog(parent, _('Cannot add books'), _(
'The path %s does not exist') % source, show=True)
return False
if not os.access(source, os.X_OK|os.R_OK):
error_dialog(parent, _('Cannot add books'), _(
'You do not have permission to read %s') % source, show=True)
return False
else:
ok = False
for path in source:
if os.access(path, os.R_OK):
ok = True
break
if not ok:
error_dialog(parent, _('Cannot add books'), _(
'You do not have permission to read any of the selected files'),
det_msg='\n'.join(source), show=True)
return False
return True
# }}}
class Adder(QObject):
do_one_signal = pyqtSignal()
def __init__(self, source, single_book_per_directory=True, db=None, parent=None, callback=None, pool=None, list_of_archives=False):
if not validate_source(source, parent):
return
QObject.__init__(self, parent)
self.single_book_per_directory = single_book_per_directory
self.list_of_archives = list_of_archives
self.callback = callback
self.add_formats_to_existing = prefs['add_formats_to_existing']
self.do_one_signal.connect(self.tick, type=Qt.QueuedConnection)
self.tdir = PersistentTemporaryDirectory('_add_books')
self.pool = pool
self.pd = ProgressDialog(_('Adding books...'), _('Scanning for files...'), min=0, max=0, parent=parent, icon='add_book.png')
self.db = getattr(db, 'new_api', None)
if self.db is not None:
self.dbref = weakref.ref(db)
self.source = source
self.tdir = PersistentTemporaryDirectory('_add_books')
self.scan_error = None
self.file_groups = OrderedDict()
self.abort_scan = False
self.duplicates = []
self.report = []
self.items = []
self.added_book_ids = set()
self.merged_books = set()
self.added_duplicate_info = set()
self.pd.show()
self.scan_thread = Thread(target=self.scan, name='ScanBooks')
self.scan_thread.daemon = True
self.scan_thread.start()
self.do_one = self.monitor_scan
self.do_one_signal.emit()
if DEBUG:
self.start_time = time.time()
def break_cycles(self):
self.abort_scan = True
self.pd.close()
self.pd.deleteLater()
shutil.rmtree(self.tdir, ignore_errors=True)
if self.pool is not None:
self.pool.shutdown()
if not self.items:
shutil.rmtree(self.tdir, ignore_errors=True)
self.setParent(None)
self.find_identical_books_data = self.merged_books = self.added_duplicate_info = self.pool = self.items = self.duplicates = self.pd = self.db = self.dbref = self.tdir = self.file_groups = self.scan_thread = None # noqa
self.deleteLater()
def tick(self):
if self.pd.canceled:
try:
if callable(self.callback):
self.callback(self)
finally:
self.break_cycles()
return
self.do_one()
# Filesystem scan {{{
def scan(self):
def find_files(root):
for dirpath, dirnames, filenames in os.walk(root):
for files in find_books_in_directory(dirpath, self.single_book_per_directory):
if self.abort_scan:
return
if files:
self.file_groups[len(self.file_groups)] = files
def extract(source):
tdir = tempfile.mkdtemp(suffix='_archive', dir=self.tdir)
if source.lower().endswith('.zip'):
from calibre.utils.zipfile import ZipFile
try:
with ZipFile(source) as zf:
zf.extractall(tdir)
except Exception:
prints('Corrupt ZIP file, trying to use local headers')
from calibre.utils.localunzip import extractall
extractall(source, tdir)
elif source.lower().endswith('.rar'):
from calibre.utils.unrar import extract
extract(source, tdir)
return tdir
try:
if isinstance(self.source, basestring):
find_files(self.source)
else:
unreadable_files = []
for path in self.source:
if self.abort_scan:
return
if os.access(path, os.R_OK):
if self.list_of_archives:
find_files(extract(path))
else:
self.file_groups[len(self.file_groups)] = [path]
else:
unreadable_files.append(path)
if unreadable_files:
if not self.file_groups:
self.scan_error = _('You do not have permission to read the selected file(s).') + '\n'
self.scan_error += '\n'.join(unreadable_files)
else:
a = self.report.append
for f in unreadable_files:
a(_('Could not add %s as you do not have permission to read the file' % f))
a('')
except Exception:
self.scan_error = traceback.format_exc()
def monitor_scan(self):
self.scan_thread.join(0.05)
if self.scan_thread.is_alive():
self.do_one_signal.emit()
return
if self.scan_error is not None:
error_dialog(self.pd, _('Cannot add books'), _(
'Failed to add any books, click "Show details" for more information.'),
det_msg=self.scan_error, show=True)
self.break_cycles()
return
if not self.file_groups:
error_dialog(self.pd, _('Could not add'), _(
'No ebook files were found in %s') % self.source, show=True)
self.break_cycles()
return
self.pd.max = len(self.file_groups)
self.pd.title = _('Reading metadata and adding to library (%d books)...') % self.pd.max
self.pd.msg = ''
self.pd.value = 0
self.pool = Pool(name='AddBooks') if self.pool is None else self.pool
if self.db is not None:
if self.add_formats_to_existing:
self.find_identical_books_data = self.db.data_for_find_identical_books()
else:
try:
self.pool.set_common_data(self.db.data_for_has_book())
except Failure as err:
error_dialog(self.pd, _('Cannot add books'), _(
'Failed to add any books, click "Show details" for more information.'),
det_msg=unicode(err.failure_message) + '\n' + unicode(err.details), show=True)
self.pd.canceled = True
self.groups_to_add = iter(self.file_groups)
self.do_one = self.do_one_group
self.do_one_signal.emit()
# }}}
def do_one_group(self):
try:
group_id = next(self.groups_to_add)
except StopIteration:
self.do_one = self.monitor_pool
self.do_one_signal.emit()
return
try:
self.pool(group_id, 'calibre.ebooks.metadata.worker', 'read_metadata',
self.file_groups[group_id], group_id, self.tdir)
except Failure as err:
error_dialog(self.pd, _('Cannot add books'), _(
'Failed to add any books, click "Show details" for more information.'),
det_msg=unicode(err.failure_message) + '\n' + unicode(err.details), show=True)
self.pd.canceled = True
self.do_one_signal.emit()
def monitor_pool(self):
try:
worker_result = self.pool.results.get(True, 0.05)
self.pool.results.task_done()
except Empty:
try:
self.pool.wait_for_tasks(timeout=0.01)
except RuntimeError:
pass # Tasks still remaining
except Failure as err:
error_dialog(self.pd, _('Cannot add books'), _(
'Failed to add some books, click "Show details" for more information.'),
det_msg=unicode(err.failure_message) + '\n' + unicode(err.details), show=True)
self.pd.canceled = True
else:
# All tasks completed
try:
join_with_timeout(self.pool.results, 0.01)
except RuntimeError:
pass # There are results remaining
else:
# No results left
self.process_duplicates()
return
else:
group_id = worker_result.id
if worker_result.is_terminal_failure:
error_dialog(self.pd, _('Critical failure'), _(
'The read metadata worker process crashed while processing'
' some files. Adding of books is aborted. Click "Show details"'
' to see which files caused the problem.'), show=True,
det_msg='\n'.join(self.file_groups[group_id]))
self.pd.canceled = True
else:
try:
self.process_result(group_id, worker_result.result)
except Exception:
self.report_metadata_failure(group_id, traceback.format_exc())
self.pd.value += 1
self.do_one_signal.emit()
def report_metadata_failure(self, group_id, details):
a = self.report.append
paths = self.file_groups[group_id]
a(''), a('-' * 70)
a(_('Failed to read metadata from the file(s):'))
[a('\t' + f) for f in paths]
a(_('With error:')), a(details)
mi = Metadata(_('Unknown'))
mi.read_metadata_failed = False
return mi
def process_result(self, group_id, result):
if result.err:
mi = self.report_metadata_failure(group_id, result.traceback)
paths = self.file_groups[group_id]
has_cover = False
duplicate_info = set() if self.add_formats_to_existing else False
else:
paths, opf, has_cover, duplicate_info = result.value
try:
mi = OPF(BytesIO(opf), basedir=self.tdir, populate_spine=False, try_to_guess_cover=False).to_book_metadata()
mi.read_metadata_failed = False
except Exception:
mi = self.report_metadata_failure(group_id, traceback.format_exc())
if mi.is_null('title'):
for path in paths:
mi.title = os.path.splitext(os.path.basename(path))[0]
break
if mi.application_id == '__calibre_dummy__':
mi.application_id = None
self.pd.msg = mi.title
cover_path = os.path.join(self.tdir, '%s.cdata' % group_id) if has_cover else None
if self.db is None:
if paths:
self.items.append((mi, cover_path, paths))
return
if self.add_formats_to_existing:
identical_book_ids = find_identical_books(mi, self.find_identical_books_data)
if identical_book_ids:
try:
self.merge_books(mi, cover_path, paths, identical_book_ids)
except Exception:
a = self.report.append
a(''), a('-' * 70)
a(_('Failed to merge the book: ') + mi.title)
[a('\t' + f) for f in paths]
a(_('With error:')), a(traceback.format_exc())
else:
self.add_book(mi, cover_path, paths)
else:
if duplicate_info or icu_lower(mi.title or _('Unknown')) in self.added_duplicate_info:
self.duplicates.append((mi, cover_path, paths))
else:
self.add_book(mi, cover_path, paths)
def merge_books(self, mi, cover_path, paths, identical_book_ids):
self.merged_books.add((mi.title, ' & '.join(mi.authors)))
seen_fmts = set()
replace = gprefs['automerge'] == 'overwrite'
for identical_book_id in identical_book_ids:
ib_fmts = {fmt.upper() for fmt in self.db.formats(identical_book_id)}
seen_fmts |= ib_fmts
self.add_formats(identical_book_id, paths, mi, replace=replace)
if gprefs['automerge'] == 'new record':
incoming_fmts = {path.rpartition(os.extsep)[-1].upper() for path in paths}
if incoming_fmts.intersection(seen_fmts):
# There was at least one duplicate format so create a new
# record and put the incoming formats into it We should
# arguably put only the duplicate formats, but no real harm is
# done by having all formats
self.add_book(mi, cover_path, paths)
def add_book(self, mi, cover_path, paths):
if DEBUG:
st = time.time()
try:
cdata = None
if cover_path:
with open(cover_path, 'rb') as f:
cdata = f.read()
book_id = self.dbref().create_book_entry(mi, cover=cdata)
self.added_book_ids.add(book_id)
except Exception:
a = self.report.append
a(''), a('-' * 70)
a(_('Failed to add the book: ') + mi.title)
[a('\t' + f) for f in paths]
a(_('With error:')), a(traceback.format_exc())
return
self.add_formats(book_id, paths, mi)
try:
if self.add_formats_to_existing:
self.db.update_data_for_find_identical_books(book_id, self.find_identical_books_data)
else:
self.added_duplicate_info.add(icu_lower(mi.title or _('Unknown')))
except Exception:
# Ignore this exception since all it means is that duplicate
# detection/automerge will fail for this book.
traceback.print_exc()
if DEBUG:
prints('Added', mi.title, 'to db in: %.1f' % time.time() - st)
def add_formats(self, book_id, paths, mi, replace=True):
fmap = {p.rpartition(os.path.extsep)[-1].lower():p for p in paths}
for fmt, path in fmap.iteritems():
# The onimport plugins have already been run by the read metadata
# worker
try:
if self.db.add_format(book_id, fmt, path, run_hooks=False, replace=replace):
run_plugins_on_postimport(self.dbref(), book_id, fmt)
except Exception:
a = self.report.append
a(''), a('-' * 70)
a(_('Failed to add the file {0} to the book: {1}').format(path, mi.title))
a(_('With error:')), a(traceback.format_exc())
def process_duplicates(self):
if self.duplicates:
d = DuplicatesQuestion(self.dbref(), self.duplicates, self.pd)
duplicates = tuple(d.duplicates)
d.deleteLater()
if duplicates:
self.do_one = self.process_duplicate
self.duplicates_to_process = iter(duplicates)
self.do_one_signal.emit()
return
self.finish()
def process_duplicate(self):
try:
mi, cover_path, paths = next(self.duplicates_to_process)
except StopIteration:
self.finish()
return
self.add_book(mi, cover_path, paths)
self.do_one_signal.emit()
def finish(self):
if DEBUG:
prints('Added %s books in %.1f seconds' % (len(self.added_book_ids or self.items), time.time() - self.start_time))
if self.report:
added_some = self.items or self.added_book_ids
d = warning_dialog if added_some else error_dialog
msg = _('There were problems adding some files, click "Show details" for more information') if added_some else _(
'Failed to add any books, click "Show details" for more information')
d(self.pd, _('Errors while adding'), msg, det_msg='\n'.join(self.report), show=True)
if gprefs['manual_add_auto_convert'] and self.added_book_ids and self.parent() is not None:
self.parent().iactions['Convert Books'].auto_convert_auto_add(
self.added_book_ids)
try:
if callable(self.callback):
self.callback(self)
finally:
self.break_cycles()
@property
def number_of_books_added(self):
return len(self.added_book_ids)
# TODO: Test direct add of books to device
# TODO: Test adding form device to library