Copy to Library: Do not abort the copy process if only some books fail to copy, instead report which books failed and copy the rest. See #1594100 (Copy of multiple books to Library fail if one book corrupted)

This commit is contained in:
Kovid Goyal 2016-06-21 21:51:26 +05:30
parent 71171486b6
commit 6f362f1567

View File

@ -15,6 +15,7 @@ from PyQt5.Qt import (
QToolButton, QDialog, QGridLayout, QIcon, QLabel, QDialogButtonBox, QApplication, QToolButton, QDialog, QGridLayout, QIcon, QLabel, QDialogButtonBox, QApplication,
QFormLayout, QCheckBox, QWidget, QScrollArea, QVBoxLayout, Qt, QListWidgetItem, QListWidget) QFormLayout, QCheckBox, QWidget, QScrollArea, QVBoxLayout, Qt, QListWidgetItem, QListWidget)
from calibre import as_unicode
from calibre.constants import isosx from calibre.constants import isosx
from calibre.db.utils import find_identical_books from calibre.db.utils import find_identical_books
from calibre.gui2.actions import InterfaceAction from calibre.gui2.actions import InterfaceAction
@ -107,6 +108,7 @@ class Worker(Thread): # {{{
self.add_duplicates = add_duplicates self.add_duplicates = add_duplicates
self.duplicate_ids = {} self.duplicate_ids = {}
self.check_for_duplicates = not add_duplicates and (prefs['add_formats_to_existing'] or prefs['check_for_dupes_on_ctl']) self.check_for_duplicates = not add_duplicates and (prefs['add_formats_to_existing'] or prefs['check_for_dupes_on_ctl'])
self.failed_books = {}
def run(self): def run(self):
try: try:
@ -140,12 +142,19 @@ class Worker(Thread): # {{{
def _doit(self, newdb): def _doit(self, newdb):
for i, x in enumerate(self.ids): for i, x in enumerate(self.ids):
mi = self.db.get_metadata(x, index_is_id=True, get_cover=True, try:
cover_as_data=True) self.do_one(i, x, newdb)
except Exception as err:
import traceback
err = as_unicode(err)
self.failed_books[x] = (err, as_unicode(traceback.format_exc()))
def do_one(self, num, book_id, newdb):
mi = self.db.get_metadata(book_id, index_is_id=True, get_cover=True, cover_as_data=True)
if not gprefs['preserve_date_on_ctl']: if not gprefs['preserve_date_on_ctl']:
mi.timestamp = now() mi.timestamp = now()
self.progress(i, mi.title) self.progress(num, mi.title)
fmts = self.db.formats(x, index_is_id=True) fmts = self.db.formats(book_id, index_is_id=True)
if not fmts: if not fmts:
fmts = [] fmts = []
else: else:
@ -153,7 +162,7 @@ class Worker(Thread): # {{{
identical_book_list = set() identical_book_list = set()
paths = [] paths = []
for fmt in fmts: for fmt in fmts:
p = self.db.format(x, fmt, index_is_id=True, p = self.db.format(book_id, fmt, index_is_id=True,
as_path=True) as_path=True)
if p: if p:
paths.append(p) paths.append(p)
@ -164,10 +173,10 @@ class Worker(Thread): # {{{
identical_book_list = find_identical_books(mi, self.find_identical_books_data) identical_book_list = find_identical_books(mi, self.find_identical_books_data)
if identical_book_list: # books with same author and nearly same title exist in newdb if identical_book_list: # books with same author and nearly same title exist in newdb
if prefs['add_formats_to_existing']: if prefs['add_formats_to_existing']:
self.automerge_book(x, mi, identical_book_list, paths, newdb) self.automerge_book(book_id, mi, identical_book_list, paths, newdb)
else: # Report duplicates for later processing else: # Report duplicates for later processing
self.duplicate_ids[x] = (mi.title, mi.authors) self.duplicate_ids[book_id] = (mi.title, mi.authors)
continue return
new_authors = {k for k, v in newdb.new_api.get_item_ids('authors', mi.authors).iteritems() if v is None} new_authors = {k for k, v in newdb.new_api.get_item_ids('authors', mi.authors).iteritems() if v is None}
new_book_id = newdb.import_book(mi, paths, notify=False, import_hooks=False, new_book_id = newdb.import_book(mi, paths, notify=False, import_hooks=False,
@ -193,12 +202,12 @@ class Worker(Thread): # {{{
if link_map: if link_map:
newdb.new_api.set_link_for_authors(link_map) newdb.new_api.set_link_for_authors(link_map)
co = self.db.conversion_options(x, 'PIPE') co = self.db.conversion_options(book_id, 'PIPE')
if co is not None: if co is not None:
newdb.set_conversion_options(new_book_id, 'PIPE', co) newdb.set_conversion_options(new_book_id, 'PIPE', co)
if self.check_for_duplicates: if self.check_for_duplicates:
newdb.new_api.update_data_for_find_identical_books(new_book_id, self.find_identical_books_data) newdb.new_api.update_data_for_find_identical_books(new_book_id, self.find_identical_books_data)
self.processed.add(x) self.processed.add(book_id)
finally: finally:
for path in paths: for path in paths:
try: try:
@ -490,7 +499,7 @@ class CopyToLibraryAction(InterfaceAction):
return return
self.gui.status_bar.show_message(donemsg % self.gui.status_bar.show_message(donemsg %
dict(num=len(ids), loc=loc), 2000) dict(num=len(self.worker.processed), loc=loc), 2000)
if self.worker.auto_merged_ids: if self.worker.auto_merged_ids:
books = '\n'.join(self.worker.auto_merged_ids.itervalues()) books = '\n'.join(self.worker.auto_merged_ids.itervalues())
info_dialog(self.gui, _('Auto merged'), info_dialog(self.gui, _('Auto merged'),
@ -511,6 +520,18 @@ class CopyToLibraryAction(InterfaceAction):
permanent=True) permanent=True)
self.gui.iactions['Remove Books'].library_ids_deleted( self.gui.iactions['Remove Books'].library_ids_deleted(
self.worker.processed, row) self.worker.processed, row)
if self.worker.failed_books:
def fmt_err(book_id):
err, tb = self.worker.failed_books[book_id]
title = db.title(book_id, index_is_id=True)
return _('Copying: {0} failed, with error:\n{1}').format(title, tb)
title, msg = _('Failed to copy some books'), _('Could not copy some books, click "Show Details" for more information.')
tb = '\n\n'.join(map(fmt_err, self.worker.failed_books))
tb = _('Failed to copy {0} book(s), see below for details').format(len(self.worker.failed_books)) + '\n\n' + tb
if len(ids) == len(self.worker.failed_books):
title, msg = _('Failed to copy books'), _('Could not copy any books, click "Show Details" for more information.')
error_dialog(self.gui, title, msg, det_msg=tb, show=True)
return self.worker.duplicate_ids return self.worker.duplicate_ids
def cannot_do_dialog(self): def cannot_do_dialog(self):