mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-09 03:04:10 -04:00
Use a NamedTuple for the list of extra files
This commit is contained in:
parent
0437378bfa
commit
1457f8ecc9
@ -20,6 +20,7 @@ from io import DEFAULT_BUFFER_SIZE, BytesIO
|
|||||||
from queue import Queue
|
from queue import Queue
|
||||||
from threading import Lock
|
from threading import Lock
|
||||||
from time import monotonic, sleep, time
|
from time import monotonic, sleep, time
|
||||||
|
from typing import NamedTuple, Tuple
|
||||||
|
|
||||||
from calibre import as_unicode, detect_ncpus, isbytestring
|
from calibre import as_unicode, detect_ncpus, isbytestring
|
||||||
from calibre.constants import iswindows, preferred_encoding
|
from calibre.constants import iswindows, preferred_encoding
|
||||||
@ -53,6 +54,12 @@ from calibre.utils.localization import canonicalize_lang
|
|||||||
from polyglot.builtins import cmp, iteritems, itervalues, string_or_bytes
|
from polyglot.builtins import cmp, iteritems, itervalues, string_or_bytes
|
||||||
|
|
||||||
|
|
||||||
|
class ExtraFile(NamedTuple):
|
||||||
|
relpath: str
|
||||||
|
file_path: str
|
||||||
|
stat_result: os.stat_result
|
||||||
|
|
||||||
|
|
||||||
def api(f):
|
def api(f):
|
||||||
f.is_cache_api = True
|
f.is_cache_api = True
|
||||||
return f
|
return f
|
||||||
@ -3096,7 +3103,7 @@ class Cache:
|
|||||||
return added
|
return added
|
||||||
|
|
||||||
@read_api
|
@read_api
|
||||||
def list_extra_files(self, book_id, use_cache=False, pattern=''):
|
def list_extra_files(self, book_id, use_cache=False, pattern='') -> Tuple[ExtraFile, ...]:
|
||||||
'''
|
'''
|
||||||
Get information about extra files in the book's directory.
|
Get information about extra files in the book's directory.
|
||||||
|
|
||||||
@ -3104,8 +3111,9 @@ class Cache:
|
|||||||
:param pattern: the pattern of filenames to search for. Empty pattern matches all extra files. Patterns must use / as separator.
|
:param pattern: the pattern of filenames to search for. Empty pattern matches all extra files. Patterns must use / as separator.
|
||||||
Use the DATA_FILE_PATTERN constant to match files inside the data directory.
|
Use the DATA_FILE_PATTERN constant to match files inside the data directory.
|
||||||
|
|
||||||
:return: A tuple of all extra files matching the specified pattern. Each element of the tuple is (relpath, file_path, stat_result)
|
:return: A tuple of all extra files matching the specified pattern. Each element of the tuple is
|
||||||
where relpath is the relative path of the file to the book directory using / as a separator.
|
ExtraFile(relpath, file_path, stat_result). Where relpath is the relative path of the file
|
||||||
|
to the book directory using / as a separator.
|
||||||
stat_result is the result of calling os.stat() on the file.
|
stat_result is the result of calling os.stat() on the file.
|
||||||
'''
|
'''
|
||||||
ans = self.extra_files_cache.setdefault(book_id, {}).get(pattern)
|
ans = self.extra_files_cache.setdefault(book_id, {}).get(pattern)
|
||||||
@ -3116,7 +3124,7 @@ class Cache:
|
|||||||
for (relpath, file_path, stat_result) in self.backend.iter_extra_files(
|
for (relpath, file_path, stat_result) in self.backend.iter_extra_files(
|
||||||
book_id, path, self.fields['formats'], yield_paths=True, pattern=pattern
|
book_id, path, self.fields['formats'], yield_paths=True, pattern=pattern
|
||||||
):
|
):
|
||||||
ans.append((relpath, file_path, stat_result))
|
ans.append(ExtraFile(relpath, file_path, stat_result))
|
||||||
self.extra_files_cache[book_id][pattern] = ans = tuple(ans)
|
self.extra_files_cache[book_id][pattern] = ans = tuple(ans)
|
||||||
return ans
|
return ans
|
||||||
|
|
||||||
|
@ -27,7 +27,7 @@ def implementation(db, notify_changes, action, *args):
|
|||||||
mi = db.get_metadata(book_id)
|
mi = db.get_metadata(book_id)
|
||||||
plugboards = db.pref('plugboards', {})
|
plugboards = db.pref('plugboards', {})
|
||||||
formats = get_formats(db.formats(book_id), formats)
|
formats = get_formats(db.formats(book_id), formats)
|
||||||
extra_files_for_export = tuple(relpath for (relpath, file_path, stat_result) in db.list_extra_files(book_id, pattern=DATA_FILE_PATTERN))
|
extra_files_for_export = tuple(ef.relpath for ef in db.list_extra_files(book_id, pattern=DATA_FILE_PATTERN))
|
||||||
plugboards['extra_files_for_export'] = extra_files_for_export
|
plugboards['extra_files_for_export'] = extra_files_for_export
|
||||||
return mi, plugboards, formats, db.library_id, db.pref(
|
return mi, plugboards, formats, db.library_id, db.pref(
|
||||||
'user_template_functions', []
|
'user_template_functions', []
|
||||||
|
@ -80,8 +80,8 @@ def copy_one_book(
|
|||||||
format_map = {}
|
format_map = {}
|
||||||
fmts = list(db.formats(book_id, verify_formats=False))
|
fmts = list(db.formats(book_id, verify_formats=False))
|
||||||
extra_file_map = {}
|
extra_file_map = {}
|
||||||
for (relpath, file_path, stat_result) in db.list_extra_files(book_id):
|
for ef in db.list_extra_files(book_id):
|
||||||
extra_file_map[relpath] = file_path
|
extra_file_map[ef.relpath] = ef.file_path
|
||||||
for fmt in fmts:
|
for fmt in fmts:
|
||||||
path = db.format_abspath(book_id, fmt)
|
path = db.format_abspath(book_id, fmt)
|
||||||
if path:
|
if path:
|
||||||
|
@ -410,8 +410,8 @@ class AddRemoveTest(BaseTest):
|
|||||||
self.assertFalse(os.path.exists(os.path.join(bookdir, 'sub', 'recurse')))
|
self.assertFalse(os.path.exists(os.path.join(bookdir, 'sub', 'recurse')))
|
||||||
|
|
||||||
def clear_extra_files(book_id):
|
def clear_extra_files(book_id):
|
||||||
for (relpath, file_path, stat_result) in dest_db.list_extra_files(book_id):
|
for ef in dest_db.list_extra_files(book_id):
|
||||||
os.remove(file_path)
|
os.remove(ef.file_path)
|
||||||
|
|
||||||
assert_does_not_have_extra_files(1)
|
assert_does_not_have_extra_files(1)
|
||||||
|
|
||||||
@ -468,9 +468,9 @@ class AddRemoveTest(BaseTest):
|
|||||||
|
|
||||||
def extra_files_for(book_id):
|
def extra_files_for(book_id):
|
||||||
ans = {}
|
ans = {}
|
||||||
for relpath, file_path, stat_result in db.list_extra_files(book_id):
|
for ef in db.list_extra_files(book_id):
|
||||||
with open(file_path) as f:
|
with open(ef.file_path) as f:
|
||||||
ans[relpath] = f.read()
|
ans[ef.relpath] = f.read()
|
||||||
return ans
|
return ans
|
||||||
|
|
||||||
add_extra(1, 'one'), add_extra(1, 'sub/one')
|
add_extra(1, 'one'), add_extra(1, 'sub/one')
|
||||||
|
@ -104,9 +104,9 @@ class FilesystemTest(BaseTest):
|
|||||||
# test only formats being changed
|
# test only formats being changed
|
||||||
init_cache()
|
init_cache()
|
||||||
ef = set()
|
ef = set()
|
||||||
for (relpath, file_path, stat_result) in cache.list_extra_files(1):
|
for efx in cache.list_extra_files(1):
|
||||||
ef.add(relpath)
|
ef.add(efx.relpath)
|
||||||
self.assertTrue(os.path.exists(file_path))
|
self.assertTrue(os.path.exists(efx.file_path))
|
||||||
self.assertEqual(ef, {'a.side', 'subdir/a.fmt1'})
|
self.assertEqual(ef, {'a.side', 'subdir/a.fmt1'})
|
||||||
fname = cache.fields['formats'].table.fname_map[1]['FMT1']
|
fname = cache.fields['formats'].table.fname_map[1]['FMT1']
|
||||||
cache.fields['formats'].table.fname_map[1]['FMT1'] = 'some thing else'
|
cache.fields['formats'].table.fname_map[1]['FMT1'] = 'some thing else'
|
||||||
@ -229,8 +229,8 @@ class FilesystemTest(BaseTest):
|
|||||||
os.mkdir(os.path.join(bookdir, 'sub'))
|
os.mkdir(os.path.join(bookdir, 'sub'))
|
||||||
with open(os.path.join(bookdir, 'sub', 'recurse'), 'w') as f:
|
with open(os.path.join(bookdir, 'sub', 'recurse'), 'w') as f:
|
||||||
f.write('recurse')
|
f.write('recurse')
|
||||||
self.assertEqual({relpath for (relpath, _, _) in cache.list_extra_files(1, pattern='sub/**/*')}, {'sub/recurse'})
|
self.assertEqual({ef.relpath for ef in cache.list_extra_files(1, pattern='sub/**/*')}, {'sub/recurse'})
|
||||||
self.assertEqual({relpath for (relpath, _, _) in cache.list_extra_files(1)}, {'exf', 'sub/recurse'})
|
self.assertEqual({ef.relpath for ef in cache.list_extra_files(1)}, {'exf', 'sub/recurse'})
|
||||||
for part_size in (1 << 30, 100, 1):
|
for part_size in (1 << 30, 100, 1):
|
||||||
with TemporaryDirectory('export_lib') as tdir, TemporaryDirectory('import_lib') as idir:
|
with TemporaryDirectory('export_lib') as tdir, TemporaryDirectory('import_lib') as idir:
|
||||||
exporter = Exporter(tdir, part_size=part_size)
|
exporter = Exporter(tdir, part_size=part_size)
|
||||||
|
@ -56,8 +56,8 @@ class ExtraFilesWatcher(QObject):
|
|||||||
|
|
||||||
def get_extra_files(self, book_id):
|
def get_extra_files(self, book_id):
|
||||||
db = self.gui.current_db.new_api
|
db = self.gui.current_db.new_api
|
||||||
return tuple(ExtraFile(relpath, stat_result.st_mtime, stat_result.st_size) for
|
return tuple(ExtraFile(ef.relpath, ef.stat_result.st_mtime, ef.stat_result.st_size) for
|
||||||
relpath, file_path, stat_result in db.list_extra_files(book_id, pattern=DATA_FILE_PATTERN))
|
ef in db.list_extra_files(book_id, pattern=DATA_FILE_PATTERN))
|
||||||
|
|
||||||
def check_registered_books(self):
|
def check_registered_books(self):
|
||||||
changed = {}
|
changed = {}
|
||||||
|
@ -215,8 +215,8 @@ class Saver(QObject):
|
|||||||
extra_files = {}
|
extra_files = {}
|
||||||
if self.opts.save_extra_files:
|
if self.opts.save_extra_files:
|
||||||
extra_files = {}
|
extra_files = {}
|
||||||
for (relpath, file_path, stat_result) in self.db.new_api.list_extra_files(int(book_id), pattern=DATA_FILE_PATTERN):
|
for efx in self.db.new_api.list_extra_files(int(book_id), pattern=DATA_FILE_PATTERN):
|
||||||
extra_files[relpath] = file_path
|
extra_files[efx.relpath] = efx.file_path
|
||||||
if not fmts and not self.opts.write_opf and not self.opts.save_cover and not extra_files:
|
if not fmts and not self.opts.write_opf and not self.opts.save_cover and not extra_files:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -2400,8 +2400,7 @@ class BuiltinHasExtraFiles(BuiltinFormatterFunction):
|
|||||||
def evaluate(self, formatter, kwargs, mi, locals):
|
def evaluate(self, formatter, kwargs, mi, locals):
|
||||||
db = self.get_database(mi).new_api
|
db = self.get_database(mi).new_api
|
||||||
try:
|
try:
|
||||||
files = db.list_extra_files(mi.id, use_cache=True, pattern=DATA_FILE_PATTERN)
|
return 'Yes' if db.list_extra_files(mi.id, use_cache=True, pattern=DATA_FILE_PATTERN) else ''
|
||||||
return 'Yes' if files else ''
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
raise ValueError(e)
|
raise ValueError(e)
|
||||||
@ -2419,7 +2418,7 @@ class BuiltinExtraFileNames(BuiltinFormatterFunction):
|
|||||||
db = self.get_database(mi).new_api
|
db = self.get_database(mi).new_api
|
||||||
try:
|
try:
|
||||||
files = db.list_extra_files(mi.id, use_cache=True, pattern=DATA_FILE_PATTERN)
|
files = db.list_extra_files(mi.id, use_cache=True, pattern=DATA_FILE_PATTERN)
|
||||||
return sep.join(file[0].partition('/')[-1] for file in files)
|
return sep.join(file.relpath.partition('/')[-1] for file in files)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
raise ValueError(e)
|
raise ValueError(e)
|
||||||
@ -2438,10 +2437,9 @@ class BuiltinExtraFileSize(BuiltinFormatterFunction):
|
|||||||
db = self.get_database(mi).new_api
|
db = self.get_database(mi).new_api
|
||||||
try:
|
try:
|
||||||
q = posixpath.join(DATA_DIR_NAME, file_name)
|
q = posixpath.join(DATA_DIR_NAME, file_name)
|
||||||
files = db.list_extra_files(mi.id, use_cache=True, pattern=DATA_FILE_PATTERN)
|
for f in db.list_extra_files(mi.id, use_cache=True, pattern=DATA_FILE_PATTERN):
|
||||||
for f in files:
|
if f.relpath == q:
|
||||||
if f[0] == q:
|
return str(f.stat_result.st_size)
|
||||||
return str(f[2].st_size)
|
|
||||||
return str(-1)
|
return str(-1)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
@ -2465,10 +2463,9 @@ class BuiltinExtraFileModtime(BuiltinFormatterFunction):
|
|||||||
db = self.get_database(mi).new_api
|
db = self.get_database(mi).new_api
|
||||||
try:
|
try:
|
||||||
q = posixpath.join(DATA_DIR_NAME, file_name)
|
q = posixpath.join(DATA_DIR_NAME, file_name)
|
||||||
files = db.list_extra_files(mi.id, use_cache=True, pattern=DATA_FILE_PATTERN)
|
for f in db.list_extra_files(mi.id, use_cache=True, pattern=DATA_FILE_PATTERN):
|
||||||
for f in files:
|
if f.relpath == q:
|
||||||
if f[0] == q:
|
val = f.stat_result.st_mtime
|
||||||
val = f[2].st_mtime
|
|
||||||
if format_string:
|
if format_string:
|
||||||
return format_date(datetime.fromtimestamp(val), format_string)
|
return format_date(datetime.fromtimestamp(val), format_string)
|
||||||
return str(val)
|
return str(val)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user