mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-09 03:04:10 -04:00
Commit 1 of API for the extra files cache. Commit 2 will resolve conflicts with commit edbf95a
This commit is contained in:
parent
edbf95a902
commit
5468a465ee
@ -1887,11 +1887,12 @@ class DB:
|
||||
|
||||
def iter_extra_files(self, book_id, book_path, formats_field, yield_paths=False, pattern=''):
|
||||
known_files = {COVER_FILE_NAME, METADATA_FILE_NAME}
|
||||
for fmt in formats_field.for_book(book_id, default_value=()):
|
||||
fname = formats_field.format_fname(book_id, fmt)
|
||||
fpath = self.format_abspath(book_id, fmt, fname, book_path, do_file_rename=False)
|
||||
if fpath:
|
||||
known_files.add(os.path.basename(fpath))
|
||||
if formats_field is not None:
|
||||
for fmt in formats_field.for_book(book_id, default_value=()):
|
||||
fname = formats_field.format_fname(book_id, fmt)
|
||||
fpath = self.format_abspath(book_id, fmt, fname, book_path, do_file_rename=False)
|
||||
if fpath:
|
||||
known_files.add(os.path.basename(fpath))
|
||||
full_book_path = os.path.abspath(os.path.join(self.library_path, book_path))
|
||||
if pattern:
|
||||
from pathlib import Path
|
||||
@ -1910,9 +1911,11 @@ class DB:
|
||||
relpath = os.path.relpath(path, full_book_path)
|
||||
relpath = relpath.replace(os.sep, '/')
|
||||
if relpath not in known_files:
|
||||
mtime = os.path.getmtime(path)
|
||||
stat = os.stat(path)
|
||||
mtime = stat.st_mtime
|
||||
fsize = stat.st_size
|
||||
if yield_paths:
|
||||
yield relpath, path, mtime
|
||||
yield relpath, path, mtime, fsize
|
||||
else:
|
||||
try:
|
||||
src = open(path, 'rb')
|
||||
@ -1921,7 +1924,7 @@ class DB:
|
||||
time.sleep(1)
|
||||
src = open(path, 'rb')
|
||||
with src:
|
||||
yield relpath, src, mtime
|
||||
yield relpath, src, mtime, fsize
|
||||
|
||||
def add_extra_file(self, relpath, stream, book_path, replace=True, auto_rename=False):
|
||||
bookdir = os.path.join(self.library_path, book_path)
|
||||
|
@ -68,6 +68,9 @@ class MetadataBackup(Thread):
|
||||
if self.stop_running.is_set() or self.db.is_closed:
|
||||
return
|
||||
traceback.print_exc()
|
||||
|
||||
self.db.check_save_extra_files_cache_needed()
|
||||
|
||||
try:
|
||||
book_id = self.db.get_a_dirtied_book()
|
||||
if book_id is None:
|
||||
|
@ -154,6 +154,7 @@ class Cache:
|
||||
self.formatter_template_cache = {}
|
||||
self.dirtied_cache = {}
|
||||
self.link_maps_cache = {}
|
||||
self.extra_files_cache = {}
|
||||
self.vls_for_books_cache = None
|
||||
self.vls_for_books_lib_in_process = None
|
||||
self.vls_cache_lock = Lock()
|
||||
@ -252,6 +253,8 @@ class Cache:
|
||||
if self.dirtied_cache:
|
||||
self.dirtied_sequence = max(itervalues(self.dirtied_cache))+1
|
||||
self._initialize_dynamic_categories()
|
||||
self.extra_files_cache = self.backend.prefs.get('extra_files_cache', {})
|
||||
self.extra_files_cache_dirty = False
|
||||
|
||||
@write_api
|
||||
def initialize_template_cache(self):
|
||||
@ -273,6 +276,33 @@ class Cache:
|
||||
self.vls_for_books_cache = None
|
||||
self.vls_for_books_lib_in_process = None
|
||||
|
||||
@write_api
|
||||
def clear_extra_files_cache(self, book_id=None):
|
||||
if book_id is None:
|
||||
pref_changed = bool(self.extra_files_cache)
|
||||
self.extra_files_cache = {}
|
||||
else:
|
||||
pref_changed = self.extra_files_cache.pop(str(book_id), False)
|
||||
if pref_changed:
|
||||
# self.backend.prefs.set('extra_files_cache', self.extra_files_cache)
|
||||
self.extra_files_cache_dirty = True
|
||||
|
||||
@write_api
|
||||
def add_to_extra_files_cache(self, book_id, data):
|
||||
self.extra_files_cache[str(book_id)] = data
|
||||
# self.backend.prefs.set('extra_files_cache', self.extra_files_cache)
|
||||
self.extra_files_cache_dirty = True
|
||||
|
||||
@write_api
|
||||
def save_extra_files_cache_if_needed(self):
|
||||
if self.extra_files_cache_dirty:
|
||||
self.backend.prefs.set('extra_files_cache', self.extra_files_cache)
|
||||
self.extra_files_cache_dirty = False
|
||||
|
||||
@read_api
|
||||
def get_extra_files_from_cache(self, book_id):
|
||||
return self.extra_files_cache.get(str(book_id), {})
|
||||
|
||||
@read_api
|
||||
def last_modified(self):
|
||||
return self.backend.last_modified()
|
||||
@ -293,6 +323,7 @@ class Cache:
|
||||
self._clear_search_caches(book_ids)
|
||||
self.clear_link_map_cache(book_ids)
|
||||
|
||||
@write_api
|
||||
def clear_link_map_cache(self, book_ids=None):
|
||||
if book_ids is None:
|
||||
self.link_maps_cache = {}
|
||||
@ -560,7 +591,6 @@ class Cache:
|
||||
has_more = do_one()
|
||||
except Exception:
|
||||
if self.backend.fts_enabled:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
sleep(self.fts_indexing_sleep_time)
|
||||
|
||||
@ -1540,7 +1570,6 @@ class Cache:
|
||||
except:
|
||||
# This almost certainly means that the book has been deleted while
|
||||
# the backup operation sat in the queue.
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return mi, sequence
|
||||
|
||||
@ -2063,7 +2092,6 @@ class Cache:
|
||||
raw = metadata_to_opf(mi)
|
||||
self.backend.write_backup(path, raw)
|
||||
except Exception:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
self.backend.remove_books(path_map, permanent=permanent)
|
||||
for field in itervalues(self.fields):
|
||||
@ -2577,7 +2605,6 @@ class Cache:
|
||||
if progress is not None:
|
||||
progress(item_name, item_count, total)
|
||||
except Exception:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
|
||||
all_paths = {self._field_for('path', book_id).partition('/')[0] for book_id in self._all_book_ids()}
|
||||
@ -2666,8 +2693,9 @@ class Cache:
|
||||
try:
|
||||
plugin.run(self)
|
||||
except Exception:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
# do this last in case a plugin changes the extra files
|
||||
self.check_save_extra_files_cache_needed()
|
||||
self._shutdown_fts(stage=2)
|
||||
with self.write_lock:
|
||||
self.backend.close()
|
||||
@ -2966,7 +2994,7 @@ class Cache:
|
||||
bp = self.field_for('path', book_id)
|
||||
extra_files[book_id] = ef = {}
|
||||
if bp:
|
||||
for (relpath, fobj, mtime) in self.backend.iter_extra_files(book_id, bp, self.fields['formats']):
|
||||
for (relpath, fobj, mtime, fsize) in self.backend.iter_extra_files(book_id, bp, self.fields['formats']):
|
||||
key = f'{key_prefix}:{book_id}:.|{relpath}'
|
||||
with exporter.start_file(key, mtime=mtime) as dest:
|
||||
shutil.copyfileobj(fobj, dest)
|
||||
@ -3070,6 +3098,7 @@ class Cache:
|
||||
added = {}
|
||||
for relpath, stream_or_path in map_of_relpath_to_stream_or_path.items():
|
||||
added[relpath] = bool(self.backend.add_extra_file(relpath, stream_or_path, path, replace, auto_rename))
|
||||
self.clear_extra_files_cache(book_id)
|
||||
return added
|
||||
|
||||
@write_api
|
||||
@ -3083,11 +3112,37 @@ class Cache:
|
||||
book_path = self._field_for('path', src_id)
|
||||
if book_path:
|
||||
book_path = book_path.replace('/', os.sep)
|
||||
for (relpath, file_path, mtime) in self.backend.iter_extra_files(
|
||||
for (relpath, file_path, mtime, fsize) in self.backend.iter_extra_files(
|
||||
src_id, book_path, self.fields['formats'], yield_paths=True):
|
||||
added.add(self.backend.add_extra_file(relpath, file_path, path, replace=replace, auto_rename=True))
|
||||
self.clear_extra_files_cache(dest_id)
|
||||
return added
|
||||
|
||||
@write_api
|
||||
def list_extra_files(self, book_id):
|
||||
'''
|
||||
For book_id, returns the dict {
|
||||
'relpath': file's relative path from the book's 'data' directory,
|
||||
'file_path': full path to the file,
|
||||
'mtime': the file's modification time as a floating point number,
|
||||
'fsize': the file's size in bytes
|
||||
}
|
||||
'''
|
||||
ans = self.get_extra_files_from_cache(book_id)
|
||||
if not ans:
|
||||
print('not cached', book_id)
|
||||
path = self._field_for('path', book_id)
|
||||
if path:
|
||||
book_path = (path + '/data').replace('/', os.sep)
|
||||
for (relpath, file_path, mtime, fsize) in self.backend.iter_extra_files(
|
||||
book_id, book_path, None, yield_paths=True):
|
||||
ans = dict(zip(('relpath', 'file_path', 'mtime', 'fsize'),
|
||||
(relpath, file_path, mtime, fsize)))
|
||||
self.add_to_extra_files_cache(book_id, ans)
|
||||
else:
|
||||
print('cached', book_id)
|
||||
return ans
|
||||
|
||||
@read_api
|
||||
def list_extra_files_matching(self, book_id, pattern=''):
|
||||
' List extra data files matching the specified pattern. Empty pattern matches all. Recursive globbing with ** is supported. '
|
||||
@ -3095,7 +3150,7 @@ class Cache:
|
||||
ans = {}
|
||||
if path:
|
||||
book_path = path.replace('/', os.sep)
|
||||
for (relpath, file_path, mtime) in self.backend.iter_extra_files(
|
||||
for (relpath, file_path, mtime, fsize) in self.backend.iter_extra_files(
|
||||
book_id, book_path, self.fields['formats'], yield_paths=True, pattern=pattern):
|
||||
ans[relpath] = file_path
|
||||
return ans
|
||||
|
@ -112,7 +112,7 @@ def copy_one_book(
|
||||
preserve_uuid=preserve_uuid, run_hooks=False)[0][0]
|
||||
bp = db.field_for('path', book_id)
|
||||
if bp:
|
||||
for (relpath, src_path, mtime) in db.backend.iter_extra_files(book_id, bp, db.fields['formats'], yield_paths=True):
|
||||
for (relpath, src_path, mtime, fsize) in db.backend.iter_extra_files(book_id, bp, db.fields['formats'], yield_paths=True):
|
||||
nbp = newdb.field_for('path', new_book_id)
|
||||
if nbp:
|
||||
newdb.backend.add_extra_file(relpath, src_path, nbp)
|
||||
|
@ -381,7 +381,7 @@ class WritingTest(BaseTest):
|
||||
def read_all_extra_files(book_id=1):
|
||||
ans = {}
|
||||
bp = cache.field_for('path', book_id)
|
||||
for (relpath, fobj, mtime) in cache.backend.iter_extra_files(book_id, bp, cache.fields['formats']):
|
||||
for (relpath, fobj, mtime, fsize) in cache.backend.iter_extra_files(book_id, bp, cache.fields['formats']):
|
||||
ans[relpath] = fobj.read()
|
||||
return ans
|
||||
|
||||
|
@ -329,6 +329,10 @@ class ChooseLibraryAction(InterfaceAction):
|
||||
None, None),
|
||||
attr='action_restore_database')
|
||||
ac.triggered.connect(self.restore_database, type=Qt.ConnectionType.QueuedConnection)
|
||||
ac = self.create_action(spec=(_('Clear extra files cache'), 'lt.png',
|
||||
None, None),
|
||||
attr='action_clear_extra_files_cache')
|
||||
ac.triggered.connect(self.clear_extra_files_cache, type=Qt.ConnectionType.QueuedConnection)
|
||||
self.maintenance_menu.addAction(ac)
|
||||
|
||||
self.choose_menu.addMenu(self.maintenance_menu)
|
||||
@ -649,6 +653,10 @@ class ChooseLibraryAction(InterfaceAction):
|
||||
if restore_database(db, self.gui):
|
||||
self.gui.library_moved(db.library_path)
|
||||
|
||||
def clear_extra_files_cache(self):
|
||||
db = self.gui.library_view.model().db
|
||||
db.new_api.clear_extra_files_cache()
|
||||
|
||||
def check_library(self):
|
||||
from calibre.gui2.dialogs.check_library import CheckLibraryDialog, DBCheck
|
||||
self.gui.library_view.save_state()
|
||||
|
@ -273,7 +273,9 @@ class ViewAction(InterfaceAction):
|
||||
if not self._view_check(len(rows), max_=10, skip_dialog_name='open-folder-many-check'):
|
||||
return
|
||||
for i, row in enumerate(rows):
|
||||
path = self.gui.library_view.model().db.abspath(row.row())
|
||||
db = self.gui.library_view.model().db
|
||||
db.new_api.clear_extra_files_cache(self.gui.library_view.model().id(row))
|
||||
path = db.abspath(row.row())
|
||||
open_local_file(path)
|
||||
if ismacos and i < len(rows) - 1:
|
||||
time.sleep(0.1) # Finder cannot handle multiple folder opens
|
||||
@ -283,7 +285,9 @@ class ViewAction(InterfaceAction):
|
||||
open_local_file(path)
|
||||
|
||||
def view_data_folder_for_id(self, id_):
|
||||
path = self.gui.library_view.model().db.abspath(id_, index_is_id=True)
|
||||
db = self.gui.library_view.model().db
|
||||
db.new_api.clear_extra_files_cache(id_)
|
||||
path = db.abspath(id_, index_is_id=True)
|
||||
open_local_file(os.path.join(path, DATA_DIR_NAME))
|
||||
|
||||
def view_book(self, triggered):
|
||||
|
Loading…
x
Reference in New Issue
Block a user