mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-09 03:04:10 -04:00
Initial implementation of new adding logic
This commit is contained in:
parent
5d1adac683
commit
bba5cbf11e
@ -6,178 +6,62 @@ __license__ = 'GPL v3'
|
||||
__copyright__ = '2009, Kovid Goyal <kovid@kovidgoyal.net>'
|
||||
__docformat__ = 'restructuredtext en'
|
||||
|
||||
from threading import Thread
|
||||
from Queue import Empty
|
||||
import os, time, sys, shutil
|
||||
import os, shutil, errno
|
||||
|
||||
from calibre.utils.ipc.job import ParallelJob
|
||||
from calibre.utils.ipc.server import Server
|
||||
from calibre.ptempfile import PersistentTemporaryDirectory, TemporaryDirectory
|
||||
from calibre import prints
|
||||
from calibre.constants import filesystem_encoding
|
||||
from calibre.customize.ui import run_plugins_on_import
|
||||
from calibre.db.utils import find_identical_books
|
||||
from calibre.ebooks.metadata.meta import metadata_from_formats
|
||||
from calibre.ebooks.metadata.opf2 import metadata_to_opf
|
||||
from calibre.utils.filenames import samefile
|
||||
|
||||
def debug(*args):
|
||||
prints(*args)
|
||||
sys.stdout.flush()
|
||||
|
||||
def serialize_metadata_for(formats, tdir, id_):
|
||||
from calibre.ebooks.metadata.meta import metadata_from_formats
|
||||
from calibre.ebooks.metadata.opf2 import metadata_to_opf
|
||||
mi = metadata_from_formats(formats)
|
||||
def serialize_metadata_for(paths, tdir, group_id):
|
||||
mi = metadata_from_formats(paths)
|
||||
mi.cover = None
|
||||
cdata = None
|
||||
if mi.cover_data:
|
||||
cdata = mi.cover_data[-1]
|
||||
mi.cover_data = None
|
||||
mi.cover_data = (None, None)
|
||||
if not mi.application_id:
|
||||
mi.application_id = '__calibre_dummy__'
|
||||
with open(os.path.join(tdir, '%s.opf'%id_), 'wb') as f:
|
||||
f.write(metadata_to_opf(mi, default_lang='und'))
|
||||
opf = metadata_to_opf(mi, default_lang='und')
|
||||
has_cover = False
|
||||
if cdata:
|
||||
with open(os.path.join(tdir, str(id_)), 'wb') as f:
|
||||
with open(os.path.join(tdir, '%s.cdata' % group_id), 'wb') as f:
|
||||
f.write(cdata)
|
||||
has_cover = True
|
||||
return mi, opf, has_cover
|
||||
|
||||
def read_metadata_(task, tdir, notification=lambda x,y:x):
|
||||
with TemporaryDirectory() as mdir:
|
||||
do_read_metadata(task, tdir, mdir, notification)
|
||||
|
||||
def do_read_metadata(task, tdir, mdir, notification):
|
||||
from calibre.customize.ui import run_plugins_on_import
|
||||
for x in task:
|
||||
try:
|
||||
id_, formats = x
|
||||
except:
|
||||
def run_import_plugins(paths, group_id, tdir):
|
||||
final_paths = []
|
||||
for path in paths:
|
||||
if not os.access(path, os.R_OK):
|
||||
continue
|
||||
try:
|
||||
if isinstance(formats, basestring):
|
||||
formats = [formats]
|
||||
import_map = {}
|
||||
fmts, metadata_fmts = [], []
|
||||
for format in formats:
|
||||
mfmt = format
|
||||
name, ext = os.path.splitext(os.path.basename(format))
|
||||
nfp = run_plugins_on_import(format)
|
||||
if not nfp or nfp == format or not os.access(nfp, os.R_OK):
|
||||
nfp = None
|
||||
else:
|
||||
# Ensure that the filename is preserved so that
|
||||
# reading metadata from filename is not broken
|
||||
nfp = os.path.abspath(nfp)
|
||||
nfext = os.path.splitext(nfp)[1]
|
||||
mfmt = os.path.join(mdir, name + nfext)
|
||||
shutil.copyfile(nfp, mfmt)
|
||||
metadata_fmts.append(mfmt)
|
||||
fmts.append(nfp)
|
||||
nfp = run_plugins_on_import(path)
|
||||
if nfp and os.access(nfp, os.R_OK) and not samefile(nfp, path):
|
||||
# Ensure that the filename is preserved so that
|
||||
# reading metadata from filename is not broken
|
||||
name = os.path.splitext(os.path.basename(path))[0]
|
||||
ext = os.path.splitext(nfp)[1]
|
||||
path = os.path.join(tdir, '%s' % group_id, name + ext)
|
||||
try:
|
||||
os.mkdir(os.path.dirname(path))
|
||||
except EnvironmentError as err:
|
||||
if err.errno != errno.EEXIST:
|
||||
raise
|
||||
try:
|
||||
os.rename(nfp, path)
|
||||
except EnvironmentError:
|
||||
shutil.copyfile(nfp, path)
|
||||
final_paths.append(path)
|
||||
return final_paths
|
||||
|
||||
serialize_metadata_for(metadata_fmts, tdir, id_)
|
||||
|
||||
for format, nfp in zip(formats, fmts):
|
||||
if not nfp:
|
||||
continue
|
||||
if isinstance(nfp, unicode):
|
||||
nfp.encode(filesystem_encoding)
|
||||
x = lambda j : os.path.abspath(os.path.normpath(os.path.normcase(j)))
|
||||
if x(nfp) != x(format) and os.access(nfp, os.R_OK|os.W_OK):
|
||||
fmt = os.path.splitext(format)[1].replace('.', '').lower()
|
||||
nfmt = os.path.splitext(nfp)[1].replace('.', '').lower()
|
||||
dest = os.path.join(tdir, '%s.%s'%(id_, nfmt))
|
||||
shutil.copyfile(nfp, dest)
|
||||
import_map[fmt] = dest
|
||||
if import_map:
|
||||
with open(os.path.join(tdir, str(id_)+'.import'), 'wb') as f:
|
||||
for fmt, nfp in import_map.items():
|
||||
f.write(fmt+':'+nfp+'\n')
|
||||
notification(0.5, id_)
|
||||
except:
|
||||
import traceback
|
||||
with open(os.path.join(tdir, '%s.error'%id_), 'wb') as f:
|
||||
f.write(traceback.format_exc())
|
||||
|
||||
class Progress(object):
|
||||
|
||||
def __init__(self, result_queue, tdir):
|
||||
self.result_queue = result_queue
|
||||
self.tdir = tdir
|
||||
|
||||
def __call__(self, id):
|
||||
cover = os.path.join(self.tdir, str(id))
|
||||
if not os.path.exists(cover):
|
||||
cover = None
|
||||
res = os.path.join(self.tdir, '%s.error'%id)
|
||||
if not os.path.exists(res):
|
||||
res = res.replace('.error', '.opf')
|
||||
self.result_queue.put((id, res, cover))
|
||||
|
||||
class ReadMetadata(Thread):
|
||||
|
||||
def __init__(self, tasks, result_queue, spare_server=None):
|
||||
self.tasks, self.result_queue = tasks, result_queue
|
||||
self.spare_server = spare_server
|
||||
self.canceled = False
|
||||
Thread.__init__(self)
|
||||
self.daemon = True
|
||||
self.failure_details = {}
|
||||
self.tdir = PersistentTemporaryDirectory('_rm_worker')
|
||||
|
||||
def run(self):
|
||||
jobs, ids = set([]), set([])
|
||||
for t in self.tasks:
|
||||
for b in t:
|
||||
ids.add(b[0])
|
||||
progress = Progress(self.result_queue, self.tdir)
|
||||
server = Server() if self.spare_server is None else self.spare_server
|
||||
try:
|
||||
for i, task in enumerate(self.tasks):
|
||||
job = ParallelJob('read_metadata',
|
||||
'Read metadata (%d of %d)'%(i, len(self.tasks)),
|
||||
lambda x,y:x, args=[task, self.tdir])
|
||||
jobs.add(job)
|
||||
server.add_job(job)
|
||||
|
||||
while not self.canceled:
|
||||
time.sleep(0.2)
|
||||
running = False
|
||||
for job in jobs:
|
||||
while True:
|
||||
try:
|
||||
id = job.notifications.get_nowait()[-1]
|
||||
if id in ids:
|
||||
progress(id)
|
||||
ids.remove(id)
|
||||
except Empty:
|
||||
break
|
||||
job.update(consume_notifications=False)
|
||||
if not job.is_finished:
|
||||
running = True
|
||||
|
||||
if not running:
|
||||
break
|
||||
finally:
|
||||
server.close()
|
||||
time.sleep(1)
|
||||
|
||||
if self.canceled:
|
||||
return
|
||||
|
||||
for id in ids:
|
||||
progress(id)
|
||||
|
||||
for job in jobs:
|
||||
if job.failed:
|
||||
prints(job.details)
|
||||
if os.path.exists(job.log_path):
|
||||
try:
|
||||
os.remove(job.log_path)
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def read_metadata(paths, result_queue, chunk=50, spare_server=None):
|
||||
tasks = []
|
||||
pos = 0
|
||||
while pos < len(paths):
|
||||
tasks.append(paths[pos:pos+chunk])
|
||||
pos += chunk
|
||||
t = ReadMetadata(tasks, result_queue, spare_server=spare_server)
|
||||
t.start()
|
||||
return t
|
||||
def read_metadata(paths, group_id, tdir, common_data=None):
|
||||
paths = run_import_plugins(paths, group_id, tdir)
|
||||
mi, opf, has_cover = serialize_metadata_for(paths, tdir, group_id)
|
||||
duplicate_info = None
|
||||
if common_data is not None:
|
||||
if isinstance(common_data, (set, frozenset)):
|
||||
duplicate_info = mi.title and icu_lower(mi.title) in common_data
|
||||
else:
|
||||
duplicate_info = find_identical_books(mi, common_data)
|
||||
return paths, opf, has_cover, duplicate_info
|
||||
|
@ -7,7 +7,6 @@ __docformat__ = 'restructuredtext en'
|
||||
|
||||
import os
|
||||
from functools import partial
|
||||
from collections import defaultdict
|
||||
|
||||
from PyQt5.Qt import QPixmap, QTimer
|
||||
|
||||
@ -20,8 +19,6 @@ from calibre.gui2.dialogs.progress import ProgressDialog
|
||||
from calibre.gui2.widgets import IMAGE_EXTENSIONS
|
||||
from calibre.ebooks import BOOK_EXTENSIONS
|
||||
from calibre.utils.filenames import ascii_filename
|
||||
from calibre.utils.icu import sort_key
|
||||
from calibre.constants import filesystem_encoding
|
||||
from calibre.gui2.actions import InterfaceAction
|
||||
from calibre.gui2 import question_dialog
|
||||
from calibre.ebooks.metadata import MetaInformation
|
||||
@ -154,15 +151,15 @@ class AddAction(InterfaceAction):
|
||||
_('Select root folder'))
|
||||
if not root:
|
||||
return
|
||||
lp = os.path.normcase(os.path.abspath(self.gui.current_db.library_path))
|
||||
if lp.startswith(os.path.normcase(os.path.abspath(root)) + os.pathsep):
|
||||
return error_dialog(self.gui, _('Cannot add'), _(
|
||||
'Cannot add books from the folder: %s as it contains the currently opened calibre library') % root, show=True)
|
||||
self.do_add_recursive(root, single)
|
||||
|
||||
def do_add_recursive(self, root, single):
|
||||
from calibre.gui2.add import Adder
|
||||
self._adder = Adder(self.gui,
|
||||
self.gui.library_view.model().db,
|
||||
self.Dispatcher(self._files_added), spare_server=self.gui.spare_server)
|
||||
self.gui.tags_view.disable_recounting = True
|
||||
self._adder.add_recursive(root, single)
|
||||
from calibre.gui2.add2 import Adder
|
||||
Adder(root, single_book_per_directory=single, db=self.gui.current_db, callback=self._files_added, parent=self.gui)
|
||||
|
||||
def add_recursive_single(self, *args):
|
||||
'''
|
||||
@ -364,75 +361,58 @@ class AddAction(InterfaceAction):
|
||||
'cardb' if self.gui.stack.currentIndex() == 3 else None
|
||||
if not paths:
|
||||
return
|
||||
from calibre.gui2.add import Adder
|
||||
self.__adder_func = partial(self._files_added, on_card=on_card)
|
||||
self._adder = Adder(self.gui,
|
||||
None if to_device else self.gui.library_view.model().db,
|
||||
self.Dispatcher(self.__adder_func), spare_server=self.gui.spare_server)
|
||||
self.gui.tags_view.disable_recounting = True
|
||||
self._adder.add(paths)
|
||||
from calibre.gui2.add2 import Adder
|
||||
Adder(paths, db=None if to_device else self.gui.current_db, parent=self.gui, callback=partial(self._files_added, on_card=on_card))
|
||||
|
||||
def _files_added(self, paths=[], names=[], infos=[], on_card=None):
|
||||
self.gui.tags_view.disable_recounting = False
|
||||
if paths:
|
||||
self.gui.upload_books(paths,
|
||||
list(map(ascii_filename, names)),
|
||||
infos, on_card=on_card)
|
||||
def _files_added(self, adder, on_card=None):
|
||||
if adder.items:
|
||||
paths, infos, names = [], [], []
|
||||
for mi, cover_path, format_paths in adder.items:
|
||||
mi.cover = cover_path
|
||||
paths.append(format_paths[0]), infos.append(mi)
|
||||
names.append(ascii_filename(os.path.basename(paths[-1])))
|
||||
self.gui.upload_books(paths, names, infos, on_card=on_card)
|
||||
self.gui.status_bar.show_message(
|
||||
_('Uploading books to device.'), 2000)
|
||||
if getattr(self._adder, 'number_of_books_added', 0) > 0:
|
||||
self.gui.library_view.model().books_added(self._adder.number_of_books_added)
|
||||
return
|
||||
|
||||
if adder.number_of_books_added > 0:
|
||||
self.gui.library_view.model().books_added(adder.number_of_books_added)
|
||||
self.gui.library_view.set_current_row(0)
|
||||
if hasattr(self.gui, 'db_images'):
|
||||
self.gui.db_images.beginResetModel(), self.gui.db_images.endResetModel()
|
||||
self.gui.tags_view.recount()
|
||||
|
||||
if getattr(self._adder, 'merged_books', False):
|
||||
merged = defaultdict(list)
|
||||
for title, author in self._adder.merged_books:
|
||||
merged[author].append(title)
|
||||
lines = []
|
||||
for author in sorted(merged, key=sort_key):
|
||||
lines.append(author)
|
||||
for title in sorted(merged[author], key=sort_key):
|
||||
lines.append('\t' + title)
|
||||
lines.append('')
|
||||
info_dialog(self.gui, _('Merged some books'),
|
||||
_('The following %d duplicate books were found and incoming '
|
||||
'book formats were processed and merged into your '
|
||||
'Calibre database according to your automerge '
|
||||
'settings:')%len(self._adder.merged_books),
|
||||
det_msg='\n'.join(lines), show=True)
|
||||
# if getattr(self._adder, 'merged_books', False):
|
||||
# merged = defaultdict(list)
|
||||
# for title, author in self._adder.merged_books:
|
||||
# merged[author].append(title)
|
||||
# lines = []
|
||||
# for author in sorted(merged, key=sort_key):
|
||||
# lines.append(author)
|
||||
# for title in sorted(merged[author], key=sort_key):
|
||||
# lines.append('\t' + title)
|
||||
# lines.append('')
|
||||
# info_dialog(self.gui, _('Merged some books'),
|
||||
# _('The following %d duplicate books were found and incoming '
|
||||
# 'book formats were processed and merged into your '
|
||||
# 'Calibre database according to your automerge '
|
||||
# 'settings:')%len(self._adder.merged_books),
|
||||
# det_msg='\n'.join(lines), show=True)
|
||||
#
|
||||
|
||||
if getattr(self._adder, 'number_of_books_added', 0) > 0 or \
|
||||
getattr(self._adder, 'merged_books', False):
|
||||
# The formats of the current book could have changed if
|
||||
# automerge is enabled
|
||||
current_idx = self.gui.library_view.currentIndex()
|
||||
if current_idx.isValid():
|
||||
self.gui.library_view.model().current_changed(current_idx,
|
||||
current_idx)
|
||||
# if getattr(self._adder, 'number_of_books_added', 0) > 0 or \
|
||||
# getattr(self._adder, 'merged_books', False):
|
||||
# # The formats of the current book could have changed if
|
||||
# # automerge is enabled
|
||||
# current_idx = self.gui.library_view.currentIndex()
|
||||
# if current_idx.isValid():
|
||||
# self.gui.library_view.model().current_changed(current_idx,
|
||||
# current_idx)
|
||||
#
|
||||
|
||||
if getattr(self._adder, 'critical', None):
|
||||
det_msg = []
|
||||
for name, log in self._adder.critical.items():
|
||||
if isinstance(name, str):
|
||||
name = name.decode(filesystem_encoding, 'replace')
|
||||
det_msg.append(name+'\n'+log)
|
||||
|
||||
warning_dialog(self.gui, _('Failed to read metadata'),
|
||||
_('Failed to read metadata from the following')+':',
|
||||
det_msg='\n\n'.join(det_msg), show=True)
|
||||
|
||||
if hasattr(self._adder, 'cleanup'):
|
||||
self._adder.cleanup()
|
||||
self._adder.setParent(None)
|
||||
del self._adder
|
||||
self._adder = None
|
||||
|
||||
def _add_from_device_adder(self, paths=[], names=[], infos=[],
|
||||
on_card=None, model=None):
|
||||
self._files_added(paths, names, infos, on_card=on_card)
|
||||
def _add_from_device_adder(self, adder, on_card=None, model=None):
|
||||
self._files_added(adder, on_card=on_card)
|
||||
# set the in-library flags, and as a consequence send the library's
|
||||
# metadata for this book to the device. This sets the uuid to the
|
||||
# correct value. Note that set_books_in_library might sync_booklists
|
||||
@ -503,12 +483,6 @@ class AddAction(InterfaceAction):
|
||||
show=True)
|
||||
|
||||
if ok_paths:
|
||||
from calibre.gui2.add import Adder
|
||||
self.__adder_func = partial(self._add_from_device_adder, on_card=None,
|
||||
model=view.model())
|
||||
self._adder = Adder(self.gui, self.gui.library_view.model().db,
|
||||
self.Dispatcher(self.__adder_func), spare_server=self.gui.spare_server)
|
||||
self._adder.add(ok_paths)
|
||||
|
||||
|
||||
|
||||
from calibre.gui2.add2 import Adder
|
||||
callback = partial(self._add_from_device_adder, on_card=None, model=view.model())
|
||||
Adder(ok_paths, db=None, parent=self.gui, callback=callback)
|
||||
|
388
src/calibre/gui2/add2.py
Normal file
388
src/calibre/gui2/add2.py
Normal file
@ -0,0 +1,388 @@
|
||||
#!/usr/bin/env python
|
||||
# vim:fileencoding=utf-8
|
||||
from __future__ import (unicode_literals, division, absolute_import,
|
||||
print_function)
|
||||
|
||||
__license__ = 'GPL v3'
|
||||
__copyright__ = '2014, Kovid Goyal <kovid at kovidgoyal.net>'
|
||||
|
||||
import shutil, os, weakref, traceback
|
||||
from threading import Thread
|
||||
from collections import OrderedDict
|
||||
from Queue import Empty
|
||||
from io import BytesIO
|
||||
|
||||
from PyQt5.Qt import QObject, Qt, pyqtSignal
|
||||
|
||||
from calibre import prints
|
||||
from calibre.customize.ui import run_plugins_on_postimport
|
||||
from calibre.db.adding import find_books_in_directory
|
||||
from calibre.ebooks.metadata.book.base import Metadata
|
||||
from calibre.ebooks.metadata.opf2 import OPF
|
||||
from calibre.gui2 import error_dialog, warning_dialog
|
||||
from calibre.gui2.dialogs.duplicates import DuplicatesQuestion
|
||||
from calibre.gui2.dialogs.progress import ProgressDialog
|
||||
from calibre.ptempfile import PersistentTemporaryDirectory
|
||||
from calibre.utils import join_with_timeout
|
||||
from calibre.utils.config import prefs
|
||||
from calibre.utils.ipc.pool import Pool, Failure
|
||||
|
||||
def validate_source(source, parent=None): # {{{
|
||||
if isinstance(source, basestring):
|
||||
if not os.path.exists(source):
|
||||
error_dialog(parent, _('Cannot add books'), _(
|
||||
'The path %s does not exist') % source, show=True)
|
||||
return False
|
||||
if os.path.isdir(source):
|
||||
if not os.access(source, os.X_OK|os.R_OK):
|
||||
error_dialog(parent, _('Cannot add books'), _(
|
||||
'You do not have permission to read %s') % source, show=True)
|
||||
return False
|
||||
else:
|
||||
if not os.access(source, os.R_OK):
|
||||
error_dialog(parent, _('Cannot add books'), _(
|
||||
'You do not have permission to read %s') % source, show=True)
|
||||
return False
|
||||
if not source.lower().rpartition(os.extsep) in {'zip', 'rar'}:
|
||||
error_dialog(parent, _('Cannot add books'), _(
|
||||
'The file %s is not a recognized archive format') % source, show=True)
|
||||
return False
|
||||
|
||||
return True
|
||||
# }}}
|
||||
|
||||
class Adder(QObject):
|
||||
|
||||
do_one_signal = pyqtSignal()
|
||||
|
||||
def __init__(self, source, single_book_per_directory=True, db=None, parent=None, callback=None):
|
||||
if not validate_source(source, parent):
|
||||
raise ValueError('Bad source')
|
||||
QObject.__init__(self, parent)
|
||||
self.single_book_per_directory = single_book_per_directory
|
||||
self.callback = callback
|
||||
self.add_formats_to_existing = prefs['add_formats_to_existing']
|
||||
self.do_one_signal.connect(self.tick, type=Qt.QueuedConnection)
|
||||
self.tdir = PersistentTemporaryDirectory('_add_books')
|
||||
self.pool = None
|
||||
self.pd = ProgressDialog(_('Adding books...'), _('Scanning for files...'), min=0, max=0, parent=parent, icon='add_book.png')
|
||||
self.db = getattr(db, 'new_api', None)
|
||||
if self.db is not None:
|
||||
self.dbref = weakref.ref(db)
|
||||
self.source = source
|
||||
self.tdir = PersistentTemporaryDirectory('_add_books')
|
||||
self.scan_error = None
|
||||
self.file_groups = OrderedDict()
|
||||
self.abort_scan = False
|
||||
self.duplicates = []
|
||||
self.report = []
|
||||
self.items = []
|
||||
self.added_book_ids = set()
|
||||
self.added_duplicate_info = ({}, {}, {}) if self.add_formats_to_existing else set()
|
||||
self.pd.show()
|
||||
|
||||
self.scan_thread = Thread(target=self.scan, name='ScanBooks')
|
||||
self.scan_thread.daemon = True
|
||||
self.scan_thread.start()
|
||||
self.do_one = self.monitor_scan
|
||||
self.do_one_signal.emit()
|
||||
|
||||
def break_cycles(self):
|
||||
self.abort_scan = True
|
||||
self.pd.close()
|
||||
self.pd.deleteLater()
|
||||
shutil.rmtree(self.tdir, ignore_errors=True)
|
||||
if self.pool is not None:
|
||||
self.pool.shutdown()
|
||||
if not self.items:
|
||||
shutil.rmtree(self.tdir, ignore_errors=True)
|
||||
self.setParent(None)
|
||||
self.added_duplicate_info = self.pool = self.items = self.duplicates = self.pd = self.db = self.dbref = self.tdir = self.file_groups = self.scan_thread = None # noqa
|
||||
self.deleteLater()
|
||||
|
||||
def tick(self):
|
||||
if self.pd.canceled:
|
||||
try:
|
||||
if callable(self.callback):
|
||||
self.callback(self)
|
||||
finally:
|
||||
self.break_cycles()
|
||||
return
|
||||
self.do_one()
|
||||
|
||||
# Filesystem scan {{{
|
||||
def scan(self):
|
||||
try:
|
||||
if isinstance(self.source, basestring):
|
||||
if os.path.isdir(self.source):
|
||||
root = self.source
|
||||
else:
|
||||
root = self.extract()
|
||||
for dirpath, dirnames, filenames in os.walk(root):
|
||||
for files in find_books_in_directory(dirpath, self.single_book_per_directory):
|
||||
if self.abort_scan:
|
||||
return
|
||||
if files:
|
||||
self.file_groups[len(self.file_groups)] = files
|
||||
else:
|
||||
unreadable_files = []
|
||||
for path in self.source:
|
||||
if self.abort_scan:
|
||||
return
|
||||
if os.access(path, os.R_OK):
|
||||
self.file_groups[len(self.file_groups)] = [path]
|
||||
else:
|
||||
unreadable_files.append(path)
|
||||
if unreadable_files:
|
||||
if not self.file_groups:
|
||||
self.scan_error = _('You do not have permission to read the selected file(s).') + '\n'
|
||||
self.scan_error += '\n'.join(unreadable_files)
|
||||
else:
|
||||
a = self.report.append
|
||||
for f in unreadable_files:
|
||||
a(_('Could not add %s as you do not have permission to read the file' % f))
|
||||
a('')
|
||||
except Exception:
|
||||
self.scan_error = traceback.format_exc()
|
||||
|
||||
def extract(self):
|
||||
tdir = os.path.join(self.tdir, 'archive')
|
||||
if self.source.lower().endswith('.zip'):
|
||||
from calibre.utils.zipfile import ZipFile
|
||||
try:
|
||||
with ZipFile(self.source) as zf:
|
||||
zf.extractall(tdir)
|
||||
except Exception:
|
||||
prints('Corrupt ZIP file, trying to use local headers')
|
||||
from calibre.utils.localunzip import extractall
|
||||
extractall(self.source, tdir)
|
||||
elif self.path.lower().endswith('.rar'):
|
||||
from calibre.utils.unrar import extract
|
||||
extract(self.source, tdir)
|
||||
return tdir
|
||||
|
||||
def monitor_scan(self):
|
||||
self.scan_thread.join(0.05)
|
||||
if self.scan_thread.is_alive():
|
||||
self.do_one_signal.emit()
|
||||
return
|
||||
if self.scan_error is not None:
|
||||
error_dialog(self.pd, _('Cannot add books'), _(
|
||||
'Failed to add any books, click "Show details" for more information.'),
|
||||
det_msg=self.scan_error, show=True)
|
||||
self.break_cycles()
|
||||
return
|
||||
if not self.file_groups:
|
||||
error_dialog(self.pd, _('Could not add'), _(
|
||||
'No ebook files were found in %s') % self.source, show=True)
|
||||
self.break_cycles()
|
||||
return
|
||||
self.pd.msg = _('Reading metadata and adding to library...')
|
||||
self.pd.max = len(self.file_groups)
|
||||
self.pd.value = 0
|
||||
self.pool = Pool(name='AddBooks') if self.pool is None else self.pool
|
||||
if self.db is not None:
|
||||
data = self.db.data_for_find_identical_books() if self.add_formats_to_existing else self.db.data_for_has_book()
|
||||
try:
|
||||
self.pool.set_common_data(data)
|
||||
except Failure as err:
|
||||
error_dialog(self.pd, _('Cannot add books'), _(
|
||||
'Failed to add any books, click "Show details" for more information.'),
|
||||
det_msg=unicode(err.failure_message) + '\n' + unicode(err.details), show=True)
|
||||
self.pd.canceled = True
|
||||
self.groups_to_add = iter(self.file_groups)
|
||||
self.do_one = self.do_one_group
|
||||
self.do_one_signal.emit()
|
||||
# }}}
|
||||
|
||||
def do_one_group(self):
|
||||
try:
|
||||
group_id = next(self.groups_to_add)
|
||||
except StopIteration:
|
||||
self.do_one = self.monitor_pool
|
||||
self.do_one_signal.emit()
|
||||
return
|
||||
try:
|
||||
self.pool(group_id, 'calibre.ebooks.metadata.worker', 'read_metadata',
|
||||
self.file_groups[group_id], group_id, self.tdir)
|
||||
except Failure as err:
|
||||
error_dialog(self.pd, _('Cannot add books'), _(
|
||||
'Failed to add any books, click "Show details" for more information.'),
|
||||
det_msg=unicode(err.failure_message) + '\n' + unicode(err.details), show=True)
|
||||
self.pd.canceled = True
|
||||
self.do_one_signal.emit()
|
||||
|
||||
def monitor_pool(self):
|
||||
try:
|
||||
worker_result = self.pool.results.get(True, 0.05)
|
||||
self.pool.results.task_done()
|
||||
except Empty:
|
||||
try:
|
||||
self.pool.wait_for_tasks(timeout=0.01)
|
||||
except RuntimeError:
|
||||
pass # Tasks still remaining
|
||||
except Failure as err:
|
||||
error_dialog(self.pd, _('Cannot add books'), _(
|
||||
'Failed to add some books, click "Show details" for more information.'),
|
||||
det_msg=unicode(err.failure_message) + '\n' + unicode(err.details), show=True)
|
||||
self.pd.canceled = True
|
||||
else:
|
||||
# All tasks completed
|
||||
try:
|
||||
join_with_timeout(self.pool.results, 0.01)
|
||||
except RuntimeError:
|
||||
pass # There are results remaining
|
||||
else:
|
||||
# No results left
|
||||
self.process_duplicates()
|
||||
return
|
||||
else:
|
||||
group_id = worker_result.id
|
||||
if worker_result.is_terminal_failure:
|
||||
error_dialog(self.pd, _('Critical failure'), _(
|
||||
'The read metadata worker process crashed while processing'
|
||||
' some files. Adding of books is aborted. Click "Show details"'
|
||||
' to see which files caused the problem.'), show=True,
|
||||
det_msg='\n'.join(self.file_groups[group_id]))
|
||||
self.pd.canceled = True
|
||||
else:
|
||||
try:
|
||||
self.process_result(group_id, worker_result.result)
|
||||
except Exception:
|
||||
self.report_metadata_failure(group_id, traceback.format_exc())
|
||||
self.pd.value += 1
|
||||
|
||||
self.do_one_signal.emit()
|
||||
|
||||
def report_metadata_failure(self, group_id, details):
|
||||
a = self.report.append
|
||||
paths = self.file_groups[group_id]
|
||||
a(''), a('-' * 70)
|
||||
a(_('Failed to read metadata from the file(s):'))
|
||||
[a('\t' + f) for f in paths]
|
||||
a(_('With error:')), a(details)
|
||||
mi = Metadata(_('Unknown'))
|
||||
mi.read_metadata_failed = False
|
||||
return mi
|
||||
|
||||
def process_result(self, group_id, result):
|
||||
if result.err:
|
||||
mi = self.report_metadata_failure(group_id, result.traceback)
|
||||
paths = self.file_groups[group_id]
|
||||
has_cover = False
|
||||
duplicate_info = set() if self.add_formats_to_existing else False
|
||||
else:
|
||||
paths, opf, has_cover, duplicate_info = result.value
|
||||
try:
|
||||
mi = OPF(BytesIO(opf), basedir=self.tdir, populate_spine=False, try_to_guess_cover=False).to_book_metadata()
|
||||
mi.read_metadata_failed = False
|
||||
except Exception:
|
||||
mi = self.report_metadata_failure(group_id, traceback.format_exc())
|
||||
|
||||
if mi.is_null('title'):
|
||||
for path in paths:
|
||||
mi.title = os.path.splitext(os.path.basename(path))[0]
|
||||
break
|
||||
if mi.application_id == '__calibre_dummy__':
|
||||
mi.application_id = None
|
||||
|
||||
self.pd.msg = mi.title
|
||||
|
||||
cover_path = os.path.join(self.tdir, '%s.cdata' % group_id) if has_cover else None
|
||||
|
||||
if self.db is None:
|
||||
if paths:
|
||||
self.items.append((mi, cover_path, paths))
|
||||
return
|
||||
|
||||
if self.add_formats_to_existing:
|
||||
pass # TODO: Implement this
|
||||
else:
|
||||
if duplicate_info or icu_lower(mi.title or _('Unknown')) in self.added_duplicate_info:
|
||||
self.duplicates.append((mi, cover_path, paths))
|
||||
else:
|
||||
self.add_book(mi, cover_path, paths)
|
||||
|
||||
def add_book(self, mi, cover_path, paths):
|
||||
try:
|
||||
cdata = None
|
||||
if cover_path:
|
||||
with open(cover_path, 'rb') as f:
|
||||
cdata = f.read()
|
||||
book_id = self.dbref().create_book_entry(mi, cover=cdata)
|
||||
self.added_book_ids.add(book_id)
|
||||
except Exception:
|
||||
a = self.report.append
|
||||
a(''), a('-' * 70)
|
||||
a(_('Failed to add the book: ') + mi.title)
|
||||
[a('\t' + f) for f in paths]
|
||||
a(_('With error:')), a(traceback.format_exc())
|
||||
return
|
||||
else:
|
||||
self.add_formats(book_id, paths, mi)
|
||||
if self.add_formats_to_existing:
|
||||
pass # TODO: Implement this
|
||||
else:
|
||||
self.added_duplicate_info.add(icu_lower(mi.title or _('Unknown')))
|
||||
|
||||
def add_formats(self, book_id, paths, mi):
|
||||
fmap = {p.rpartition(os.path.extsep)[-1].lower():p for p in paths}
|
||||
for fmt, path in fmap.iteritems():
|
||||
# The onimport plugins have already been run by the read metadata
|
||||
# worker
|
||||
try:
|
||||
if self.db.add_format(book_id, fmt, path, run_hooks=False):
|
||||
run_plugins_on_postimport(self.dbref(), book_id, fmt)
|
||||
except Exception:
|
||||
a = self.report.append
|
||||
a(''), a('-' * 70)
|
||||
a(_('Failed to add the file {0} to the book: {1}').format(path, mi.title))
|
||||
a(_('With error:')), a(traceback.format_exc())
|
||||
|
||||
def process_duplicates(self):
|
||||
if self.duplicates:
|
||||
d = DuplicatesQuestion(self.dbref(), self.duplicates, self.pd)
|
||||
duplicates = tuple(d.duplicates)
|
||||
d.deleteLater()
|
||||
if duplicates:
|
||||
self.do_one = self.process_duplicate
|
||||
self.duplicates_to_process = iter(duplicates)
|
||||
self.do_one_signal.emit()
|
||||
return
|
||||
self.finish()
|
||||
|
||||
def process_duplicate(self):
|
||||
try:
|
||||
mi, cover_path, paths = next(self.duplicates_to_process)
|
||||
except StopIteration:
|
||||
self.finish()
|
||||
return
|
||||
self.add_book(mi, cover_path, paths)
|
||||
self.do_one_signal.emit()
|
||||
|
||||
def finish(self):
|
||||
if self.report:
|
||||
added_some = self.items or self.added_book_ids
|
||||
d = warning_dialog if added_some else error_dialog
|
||||
msg = _('There were problems adding some files, click "Show details" for more information') if added_some else _(
|
||||
'Failed to add any books, click "Show details" for more information')
|
||||
d(self.pd, _('Errors while adding'), msg, det_msg='\n'.join(self.report), show=True)
|
||||
|
||||
try:
|
||||
if callable(self.callback):
|
||||
self.callback(self)
|
||||
finally:
|
||||
self.break_cycles()
|
||||
|
||||
@property
|
||||
def number_of_books_added(self):
|
||||
return len(self.added_book_ids)
|
||||
|
||||
# TODO: Duplicates and auto-merge (in particular adding duplicated files as well as adding files already in the db)
|
||||
# TODO: Test importing with filetype plugin (archive, de-obfuscate)
|
||||
# TODO: Test recursive adding when no books are found
|
||||
# TODO: Test handling of exception in metadata read function
|
||||
# TODO: Report terminal erros where some books have been added better
|
||||
# TODO: Test direct add of books to device
|
||||
# TODO: Test adding form device to library
|
||||
# TODO: Check aborting after a few books have been added
|
@ -109,6 +109,12 @@ class MainWindow(QMainWindow):
|
||||
if disable_automatic_gc:
|
||||
self._gc = GarbageCollector(self, debug=False)
|
||||
|
||||
def enable_garbage_collection(self, enabled=True):
|
||||
if hasattr(self, '_gc'):
|
||||
self._gc.timer.blockSignals(not enabled)
|
||||
else:
|
||||
gc.enable() if enabled else gc.disable()
|
||||
|
||||
def unhandled_exception(self, type, value, tb):
|
||||
if type == KeyboardInterrupt:
|
||||
self.keyboard_interrupt.emit()
|
||||
|
@ -19,41 +19,38 @@ from calibre.constants import iswindows, isosx
|
||||
from calibre.utils.ipc import eintr_retry_call
|
||||
|
||||
PARALLEL_FUNCS = {
|
||||
'lrfviewer' :
|
||||
('calibre.gui2.lrf_renderer.main', 'main', None),
|
||||
'lrfviewer' :
|
||||
('calibre.gui2.lrf_renderer.main', 'main', None),
|
||||
|
||||
'ebook-viewer' :
|
||||
('calibre.gui_launch', 'ebook_viewer', None),
|
||||
'ebook-viewer' :
|
||||
('calibre.gui_launch', 'ebook_viewer', None),
|
||||
|
||||
'ebook-edit' :
|
||||
('calibre.gui_launch', 'gui_ebook_edit', None),
|
||||
'ebook-edit' :
|
||||
('calibre.gui_launch', 'gui_ebook_edit', None),
|
||||
|
||||
'render_pages' :
|
||||
('calibre.ebooks.comic.input', 'render_pages', 'notification'),
|
||||
'render_pages' :
|
||||
('calibre.ebooks.comic.input', 'render_pages', 'notification'),
|
||||
|
||||
'gui_convert' :
|
||||
('calibre.gui2.convert.gui_conversion', 'gui_convert', 'notification'),
|
||||
'gui_convert' :
|
||||
('calibre.gui2.convert.gui_conversion', 'gui_convert', 'notification'),
|
||||
|
||||
'gui_polish' :
|
||||
('calibre.ebooks.oeb.polish.main', 'gui_polish', None),
|
||||
'gui_polish' :
|
||||
('calibre.ebooks.oeb.polish.main', 'gui_polish', None),
|
||||
|
||||
'gui_convert_override' :
|
||||
('calibre.gui2.convert.gui_conversion', 'gui_convert_override', 'notification'),
|
||||
'gui_convert_override' :
|
||||
('calibre.gui2.convert.gui_conversion', 'gui_convert_override', 'notification'),
|
||||
|
||||
'gui_catalog' :
|
||||
('calibre.gui2.convert.gui_conversion', 'gui_catalog', 'notification'),
|
||||
'gui_catalog' :
|
||||
('calibre.gui2.convert.gui_conversion', 'gui_catalog', 'notification'),
|
||||
|
||||
'move_library' :
|
||||
('calibre.library.move', 'move_library', 'notification'),
|
||||
'move_library' :
|
||||
('calibre.library.move', 'move_library', 'notification'),
|
||||
|
||||
'read_metadata' :
|
||||
('calibre.ebooks.metadata.worker', 'read_metadata_', 'notification'),
|
||||
'arbitrary' :
|
||||
('calibre.utils.ipc.worker', 'arbitrary', None),
|
||||
|
||||
'arbitrary' :
|
||||
('calibre.utils.ipc.worker', 'arbitrary', None),
|
||||
|
||||
'arbitrary_n' :
|
||||
('calibre.utils.ipc.worker', 'arbitrary_n', 'notification'),
|
||||
'arbitrary_n' :
|
||||
('calibre.utils.ipc.worker', 'arbitrary_n', 'notification'),
|
||||
}
|
||||
|
||||
class Progress(Thread):
|
||||
|
Loading…
x
Reference in New Issue
Block a user