mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-08 10:44:09 -04:00
Export config dir, also preserve mtimes for format files
This commit is contained in:
parent
c901cb5300
commit
de6fe272ea
@ -1446,7 +1446,7 @@ class DB(object):
|
|||||||
if wam is not None:
|
if wam is not None:
|
||||||
wam.close_handles()
|
wam.close_handles()
|
||||||
|
|
||||||
def add_format(self, book_id, fmt, stream, title, author, path, current_name):
|
def add_format(self, book_id, fmt, stream, title, author, path, current_name, mtime=None):
|
||||||
fmt = ('.' + fmt.lower()) if fmt else ''
|
fmt = ('.' + fmt.lower()) if fmt else ''
|
||||||
fname = self.construct_file_name(book_id, title, author, len(fmt))
|
fname = self.construct_file_name(book_id, title, author, len(fmt))
|
||||||
path = os.path.join(self.library_path, path)
|
path = os.path.join(self.library_path, path)
|
||||||
@ -1475,8 +1475,12 @@ class DB(object):
|
|||||||
with lopen(dest, 'wb') as f:
|
with lopen(dest, 'wb') as f:
|
||||||
shutil.copyfileobj(stream, f)
|
shutil.copyfileobj(stream, f)
|
||||||
size = f.tell()
|
size = f.tell()
|
||||||
|
if mtime is not None:
|
||||||
|
os.utime(dest, (mtime, mtime))
|
||||||
elif os.path.exists(dest):
|
elif os.path.exists(dest):
|
||||||
size = os.path.getsize(dest)
|
size = os.path.getsize(dest)
|
||||||
|
if mtime is not None:
|
||||||
|
os.utime(dest, (mtime, mtime))
|
||||||
|
|
||||||
return size, fname
|
return size, fname
|
||||||
|
|
||||||
|
@ -1320,7 +1320,7 @@ class Cache(object):
|
|||||||
self._reload_from_db()
|
self._reload_from_db()
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def _do_add_format(self, book_id, fmt, stream, name=None):
|
def _do_add_format(self, book_id, fmt, stream, name=None, mtime=None):
|
||||||
path = self._field_for('path', book_id)
|
path = self._field_for('path', book_id)
|
||||||
if path is None:
|
if path is None:
|
||||||
# Theoretically, this should never happen, but apparently it
|
# Theoretically, this should never happen, but apparently it
|
||||||
@ -1335,7 +1335,7 @@ class Cache(object):
|
|||||||
except IndexError:
|
except IndexError:
|
||||||
author = _('Unknown')
|
author = _('Unknown')
|
||||||
|
|
||||||
size, fname = self.backend.add_format(book_id, fmt, stream, title, author, path, name)
|
size, fname = self.backend.add_format(book_id, fmt, stream, title, author, path, name, mtime=mtime)
|
||||||
return size, fname
|
return size, fname
|
||||||
|
|
||||||
@api
|
@api
|
||||||
@ -2122,9 +2122,10 @@ class Cache(object):
|
|||||||
progress(self._field_for('title', book_id), i + 1, total)
|
progress(self._field_for('title', book_id), i + 1, total)
|
||||||
format_metadata[book_id] = {}
|
format_metadata[book_id] = {}
|
||||||
for fmt in self._formats(book_id):
|
for fmt in self._formats(book_id):
|
||||||
|
mdata = self.format_metadata(book_id, fmt)
|
||||||
key = '%s:%s:%s' % (key_prefix, book_id, fmt)
|
key = '%s:%s:%s' % (key_prefix, book_id, fmt)
|
||||||
format_metadata[book_id][fmt] = key
|
format_metadata[book_id][fmt] = key
|
||||||
with exporter.start_file(key) as dest:
|
with exporter.start_file(key, mtime=mdata.get('mtime')) as dest:
|
||||||
self._copy_format_to(book_id, fmt, dest, report_file_size=dest.ensure_space)
|
self._copy_format_to(book_id, fmt, dest, report_file_size=dest.ensure_space)
|
||||||
cover_key = '%s:%s:%s' % (key_prefix, book_id, '.cover')
|
cover_key = '%s:%s:%s' % (key_prefix, book_id, '.cover')
|
||||||
with exporter.start_file(cover_key) as dest:
|
with exporter.start_file(cover_key) as dest:
|
||||||
@ -2133,7 +2134,6 @@ class Cache(object):
|
|||||||
else:
|
else:
|
||||||
format_metadata[book_id]['.cover'] = cover_key
|
format_metadata[book_id]['.cover'] = cover_key
|
||||||
exporter.set_metadata(library_key, metadata)
|
exporter.set_metadata(library_key, metadata)
|
||||||
exporter.commit()
|
|
||||||
if progress is not None:
|
if progress is not None:
|
||||||
progress(_('Completed'), total, total)
|
progress(_('Completed'), total, total)
|
||||||
|
|
||||||
@ -2162,7 +2162,7 @@ def import_library(library_key, importer, library_path, progress=None):
|
|||||||
cache.backend.set_cover(book_id, path, stream, no_processing=True)
|
cache.backend.set_cover(book_id, path, stream, no_processing=True)
|
||||||
else:
|
else:
|
||||||
stream = importer.start_file(fmtkey, _('{0} format for {1}').format(fmt.upper(), title))
|
stream = importer.start_file(fmtkey, _('{0} format for {1}').format(fmt.upper(), title))
|
||||||
size, fname = cache._do_add_format(book_id, fmt, stream)
|
size, fname = cache._do_add_format(book_id, fmt, stream, mtime=stream.mtime)
|
||||||
cache.fields['formats'].table.update_fmt(book_id, fmt, fname, size, cache.backend)
|
cache.fields['formats'].table.update_fmt(book_id, fmt, fname, size, cache.backend)
|
||||||
stream.close()
|
stream.close()
|
||||||
cache.dump_metadata({book_id})
|
cache.dump_metadata({book_id})
|
||||||
|
@ -152,6 +152,7 @@ class FilesystemTest(BaseTest):
|
|||||||
with TemporaryDirectory('export_lib') as tdir, TemporaryDirectory('import_lib') as idir:
|
with TemporaryDirectory('export_lib') as tdir, TemporaryDirectory('import_lib') as idir:
|
||||||
exporter = Exporter(tdir, part_size=part_size)
|
exporter = Exporter(tdir, part_size=part_size)
|
||||||
cache.export_library('l', exporter)
|
cache.export_library('l', exporter)
|
||||||
|
exporter.commit()
|
||||||
importer = Importer(tdir)
|
importer = Importer(tdir)
|
||||||
ic = import_library('l', importer, idir)
|
ic = import_library('l', importer, idir)
|
||||||
self.assertEqual(cache.all_book_ids(), ic.all_book_ids())
|
self.assertEqual(cache.all_book_ids(), ic.all_book_ids())
|
||||||
@ -159,3 +160,4 @@ class FilesystemTest(BaseTest):
|
|||||||
self.assertEqual(cache.cover(book_id), ic.cover(book_id), 'Covers not identical for book: %d' % book_id)
|
self.assertEqual(cache.cover(book_id), ic.cover(book_id), 'Covers not identical for book: %d' % book_id)
|
||||||
for fmt in cache.formats(book_id):
|
for fmt in cache.formats(book_id):
|
||||||
self.assertEqual(cache.format(book_id, fmt), ic.format(book_id, fmt))
|
self.assertEqual(cache.format(book_id, fmt), ic.format(book_id, fmt))
|
||||||
|
self.assertEqual(cache.format_metadata(book_id, fmt)['mtime'], cache.format_metadata(book_id, fmt)['mtime'])
|
||||||
|
@ -4,7 +4,15 @@
|
|||||||
|
|
||||||
from __future__ import (unicode_literals, division, absolute_import,
|
from __future__ import (unicode_literals, division, absolute_import,
|
||||||
print_function)
|
print_function)
|
||||||
import os, json, struct, hashlib
|
import os, json, struct, hashlib, sys
|
||||||
|
from binascii import hexlify
|
||||||
|
|
||||||
|
from calibre.constants import config_dir
|
||||||
|
from calibre.utils.config import prefs
|
||||||
|
from calibre.utils.filenames import samefile
|
||||||
|
|
||||||
|
|
||||||
|
# Export {{{
|
||||||
|
|
||||||
def send_file(from_obj, to_obj, chunksize=1<<20):
|
def send_file(from_obj, to_obj, chunksize=1<<20):
|
||||||
m = hashlib.sha1()
|
m = hashlib.sha1()
|
||||||
@ -18,11 +26,12 @@ def send_file(from_obj, to_obj, chunksize=1<<20):
|
|||||||
|
|
||||||
class FileDest(object):
|
class FileDest(object):
|
||||||
|
|
||||||
def __init__(self, key, exporter):
|
def __init__(self, key, exporter, mtime=None):
|
||||||
self.exporter, self.key = exporter, key
|
self.exporter, self.key = exporter, key
|
||||||
self.hasher = hashlib.sha1()
|
self.hasher = hashlib.sha1()
|
||||||
self.start_pos = exporter.f.tell()
|
self.start_pos = exporter.f.tell()
|
||||||
self._discard = False
|
self._discard = False
|
||||||
|
self.mtime = None
|
||||||
|
|
||||||
def discard(self):
|
def discard(self):
|
||||||
self._discard = True
|
self._discard = True
|
||||||
@ -43,7 +52,7 @@ class FileDest(object):
|
|||||||
if not self._discard:
|
if not self._discard:
|
||||||
size = self.exporter.f.tell() - self.start_pos
|
size = self.exporter.f.tell() - self.start_pos
|
||||||
digest = type('')(self.hasher.hexdigest())
|
digest = type('')(self.hasher.hexdigest())
|
||||||
self.exporter.file_metadata[self.key] = (len(self.exporter.parts), self.start_pos, size, digest)
|
self.exporter.file_metadata[self.key] = (len(self.exporter.parts), self.start_pos, size, digest, self.mtime)
|
||||||
del self.exporter, self.hasher
|
del self.exporter, self.hasher
|
||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
@ -87,8 +96,11 @@ class Exporter(object):
|
|||||||
self.parts[-1] = self.f.name
|
self.parts[-1] = self.f.name
|
||||||
|
|
||||||
def ensure_space(self, size):
|
def ensure_space(self, size):
|
||||||
if size + self.f.tell() < self.part_size:
|
try:
|
||||||
return
|
if size + self.f.tell() < self.part_size:
|
||||||
|
return
|
||||||
|
except AttributeError:
|
||||||
|
raise RuntimeError('This exporter has already been commited, cannot add to it')
|
||||||
self.commit_part()
|
self.commit_part()
|
||||||
self.new_part()
|
self.new_part()
|
||||||
|
|
||||||
@ -109,15 +121,82 @@ class Exporter(object):
|
|||||||
pos = self.f.tell()
|
pos = self.f.tell()
|
||||||
digest = send_file(fileobj, self.f)
|
digest = send_file(fileobj, self.f)
|
||||||
size = self.f.tell() - pos
|
size = self.f.tell() - pos
|
||||||
self.file_metadata[key] = (len(self.parts), pos, size, digest)
|
mtime = os.fstat(fileobj.fileno()).st_mtime
|
||||||
|
self.file_metadata[key] = (len(self.parts), pos, size, digest, mtime)
|
||||||
|
|
||||||
def start_file(self, key):
|
def start_file(self, key, mtime=None):
|
||||||
return FileDest(key, self)
|
return FileDest(key, self, mtime=mtime)
|
||||||
|
|
||||||
|
def export_dir(self, path, dir_key):
|
||||||
|
pkey = hexlify(dir_key)
|
||||||
|
self.metadata[dir_key] = files = []
|
||||||
|
for dirpath, dirnames, filenames in os.walk(path):
|
||||||
|
for fname in filenames:
|
||||||
|
fpath = os.path.join(dirpath, fname)
|
||||||
|
rpath = os.path.relpath(fpath, path).replace(os.sep, '/')
|
||||||
|
key = '%s:%s' % (pkey, rpath)
|
||||||
|
with lopen(fpath, 'rb') as f:
|
||||||
|
self.add_file(f, key)
|
||||||
|
files.append((key, rpath))
|
||||||
|
|
||||||
|
def all_known_libraries():
|
||||||
|
from calibre.gui2 import gprefs
|
||||||
|
paths = set(gprefs.get('library_usage_stats', ()))
|
||||||
|
if prefs['library_path']:
|
||||||
|
paths.add(prefs['library_path'])
|
||||||
|
added = set()
|
||||||
|
for path in paths:
|
||||||
|
mdb = os.path.join(path)
|
||||||
|
if os.path.isdir(path) and os.path.exists(mdb):
|
||||||
|
seen = False
|
||||||
|
for c in added:
|
||||||
|
if samefile(mdb, c):
|
||||||
|
seen = True
|
||||||
|
break
|
||||||
|
if not seen:
|
||||||
|
added.add(path)
|
||||||
|
return added
|
||||||
|
|
||||||
|
def export(destdir, library_paths=None, dbmap=None, progress1=None, progress2=None):
|
||||||
|
from calibre.db.cache import Cache
|
||||||
|
from calibre.db.backend import DB
|
||||||
|
if library_paths is None:
|
||||||
|
library_paths = all_known_libraries()
|
||||||
|
dbmap = dbmap or {}
|
||||||
|
dbmap = {os.path.normace(os.path.abspath(k)):v for k, v in dbmap.iteritems()}
|
||||||
|
exporter = Exporter(destdir)
|
||||||
|
exporter.metadata['libraries'] = libraries = []
|
||||||
|
total = len(library_paths) + 2
|
||||||
|
for i, lpath in enumerate(library_paths):
|
||||||
|
if progress1 is not None:
|
||||||
|
progress1(i + 1, total, lpath)
|
||||||
|
key = os.path.normcase(os.path.abspath(lpath))
|
||||||
|
db, closedb = dbmap.get(lpath), False
|
||||||
|
if db is None:
|
||||||
|
db = Cache(DB(lpath, load_user_formatter_functions=False))
|
||||||
|
db.init()
|
||||||
|
closedb = True
|
||||||
|
else:
|
||||||
|
db = db.new_api
|
||||||
|
db.export_library(key, exporter, progress=progress2)
|
||||||
|
if closedb:
|
||||||
|
db.close()
|
||||||
|
libraries.append(key)
|
||||||
|
if progress1 is not None:
|
||||||
|
progress1(total - 1, total, _('Settings and plugins'))
|
||||||
|
exporter.export_dir(config_dir, 'config_dir')
|
||||||
|
exporter.commit()
|
||||||
|
if progress1 is not None:
|
||||||
|
progress1(total, total, _('Completed'))
|
||||||
|
# }}}
|
||||||
|
|
||||||
|
# Import {{{
|
||||||
|
|
||||||
class FileSource(object):
|
class FileSource(object):
|
||||||
|
|
||||||
def __init__(self, f, size, digest, description, importer):
|
def __init__(self, f, size, digest, description, mtime, importer):
|
||||||
self.f, self.size, self.digest, self.description = f, size, digest, description
|
self.f, self.size, self.digest, self.description = f, size, digest, description
|
||||||
|
self.mtime = mtime
|
||||||
self.end = f.tell() + size
|
self.end = f.tell() + size
|
||||||
self.hasher = hashlib.sha1()
|
self.hasher = hashlib.sha1()
|
||||||
self.importer = importer
|
self.importer = importer
|
||||||
@ -180,7 +259,11 @@ class Importer(object):
|
|||||||
return lopen(self.part_map[num], 'rb')
|
return lopen(self.part_map[num], 'rb')
|
||||||
|
|
||||||
def start_file(self, key, description):
|
def start_file(self, key, description):
|
||||||
partnum, pos, size, digest = self.file_metadata[key]
|
partnum, pos, size, digest, mtime = self.file_metadata[key]
|
||||||
f = self.part(partnum)
|
f = self.part(partnum)
|
||||||
f.seek(pos)
|
f.seek(pos)
|
||||||
return FileSource(f, size, digest, description, self)
|
return FileSource(f, size, digest, description, mtime, self)
|
||||||
|
# }}}
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
export(sys.argv[-1], progress1=print, progress2=print)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user