Merge from trunk

This commit is contained in:
Charles Haley 2010-09-26 18:25:00 +01:00
commit c03b9247cf
5 changed files with 25 additions and 25 deletions

View File

@ -492,7 +492,6 @@ class BulkSeries(BulkBase):
if clear or val != '': if clear or val != '':
extras = [] extras = []
next_index = self.db.get_next_cc_series_num_for(val, num=self.col_id) next_index = self.db.get_next_cc_series_num_for(val, num=self.col_id)
print 'cc commit next index', next_index
for book_id in book_ids: for book_id in book_ids:
if clear: if clear:
extras.append(None) extras.append(None)

View File

@ -95,11 +95,7 @@ class ConfigWidget(ConfigWidgetBase, Ui_Form):
def mark_dirty(self): def mark_dirty(self):
db = self.gui.library_view.model().db db = self.gui.library_view.model().db
ids = [id for id in db.data.iterallids()] db.dirtied(list(db.data.iterallids()))
db.dirtied(ids)
info_dialog(self, _('Backup metadata'),
_('Metadata will be backed up while calibre is running, at the '
'rate of 30 books per minute.'), show=True)
def debug_device_detection(self, *args): def debug_device_detection(self, *args):
from calibre.gui2.preferences.device_debug import DebugDevice from calibre.gui2.preferences.device_debug import DebugDevice

View File

@ -48,7 +48,7 @@ class MetadataBackup(Thread): # {{{
while self.keep_running: while self.keep_running:
try: try:
time.sleep(0.5) # Limit to two per second time.sleep(0.5) # Limit to two per second
id_ = self.db.dirtied_queue.get(True, 2) id_ = self.db.dirtied_queue.get(True, 1.45)
except Empty: except Empty:
continue continue
except: except:

View File

@ -566,7 +566,7 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
def metadata_for_field(self, key): def metadata_for_field(self, key):
return self.field_metadata[key] return self.field_metadata[key]
def clear_dirtied(self, book_ids=None): def clear_dirtied(self, book_ids):
''' '''
Clear the dirtied indicator for the books. This is used when fetching Clear the dirtied indicator for the books. This is used when fetching
metadata, creating an OPF, and writing a file are separated into steps. metadata, creating an OPF, and writing a file are separated into steps.
@ -585,7 +585,7 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
self.conn.commit() self.conn.commit()
def dump_metadata(self, book_ids=None, remove_from_dirtied=True, def dump_metadata(self, book_ids=None, remove_from_dirtied=True,
commit=True, dump_to=None): commit=True):
''' '''
Write metadata for each record to an individual OPF file Write metadata for each record to an individual OPF file
@ -598,19 +598,12 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
for book_id in book_ids: for book_id in book_ids:
if not self.data.has_id(book_id): if not self.data.has_id(book_id):
continue continue
mi = self.get_metadata(book_id, index_is_id=True, get_cover=False) path, mi = self.get_metadata_for_dump(book_id)
# Always set cover to cover.jpg. Even if cover doesn't exist, if path is None:
# no harm done. This way no need to call dirtied when continue
# cover is set/removed
mi.cover = 'cover.jpg'
raw = metadata_to_opf(mi) raw = metadata_to_opf(mi)
path = os.path.join(self.abspath(book_id, index_is_id=True), with open(path, 'wb') as f:
'metadata.opf') f.write(raw)
if dump_to is None:
with open(path, 'wb') as f:
f.write(raw)
else:
dump_to.append((path, raw))
if remove_from_dirtied: if remove_from_dirtied:
self.conn.execute('DELETE FROM metadata_dirtied WHERE book=?', self.conn.execute('DELETE FROM metadata_dirtied WHERE book=?',
(book_id,)) (book_id,))
@ -660,9 +653,13 @@ class LibraryDatabase2(LibraryDatabase, SchemaUpgrade, CustomColumns):
try: try:
path = os.path.join(self.abspath(idx, index_is_id=True), 'metadata.opf') path = os.path.join(self.abspath(idx, index_is_id=True), 'metadata.opf')
mi = self.get_metadata(idx, index_is_id=True) mi = self.get_metadata(idx, index_is_id=True)
# Always set cover to cover.jpg. Even if cover doesn't exist,
# no harm done. This way no need to call dirtied when
# cover is set/removed
mi.cover = 'cover.jpg'
except: except:
return ((None, None)) return (None, None)
return ((path, mi)) return (path, mi)
def get_metadata(self, idx, index_is_id=False, get_cover=False): def get_metadata(self, idx, index_is_id=False, get_cover=False):
''' '''

View File

@ -48,12 +48,13 @@ class Restore(Thread):
self.books = [] self.books = []
self.conflicting_custom_cols = {} self.conflicting_custom_cols = {}
self.failed_restores = [] self.failed_restores = []
self.mismatched_dirs = []
self.successes = 0 self.successes = 0
self.tb = None self.tb = None
@property @property
def errors_occurred(self): def errors_occurred(self):
return self.failed_dirs or \ return self.failed_dirs or self.mismatched_dirs or \
self.conflicting_custom_cols or self.failed_restores self.conflicting_custom_cols or self.failed_restores
@property @property
@ -74,6 +75,13 @@ class Restore(Thread):
for x in self.conflicting_custom_cols: for x in self.conflicting_custom_cols:
ans += '\t#'+x+'\n' ans += '\t#'+x+'\n'
if self.mismatched_dirs:
ans += '\n\n'
ans += 'The following folders were ignored:\n'
for x in self.mismatched_dirs:
ans += '\t'+x+'\n'
return ans return ans
@ -140,7 +148,7 @@ class Restore(Thread):
'path': path, 'path': path,
}) })
else: else:
self.ignored_dirs.append(dirpath) self.mismatched_dirs.append(dirpath)
def create_cc_metadata(self): def create_cc_metadata(self):
self.books.sort(key=itemgetter('timestamp')) self.books.sort(key=itemgetter('timestamp'))