mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-08-07 09:01:38 -04:00
When cleaning viewer cache remove all but the newest entry for a particular book path
This commit is contained in:
parent
793021fcfd
commit
9950856380
@ -90,21 +90,46 @@ def expire_cache(path, instances, max_age):
|
|||||||
instances.remove(instance)
|
instances.remove(instance)
|
||||||
|
|
||||||
|
|
||||||
def expire_cache_and_temp(temp_path, finished_path, metadata, max_age):
|
def expire_old_versions(path, instances):
|
||||||
|
instances = filter(lambda x: x['status'] == 'finished', instances)
|
||||||
|
remove = sorted(instances, key=lambda x: x['atime'], reverse=True)[1:]
|
||||||
|
for instance in remove:
|
||||||
|
if robust_rmtree(os.path.join(path, instance['path'])):
|
||||||
|
yield instance
|
||||||
|
|
||||||
|
|
||||||
|
def expire_cache_and_temp(temp_path, finished_path, metadata, max_age, force_expire):
|
||||||
now = time.time()
|
now = time.time()
|
||||||
if now - metadata['last_clear_at'] < DAY and max_age >= 0:
|
if now - metadata['last_clear_at'] < DAY and max_age >= 0 and not force_expire:
|
||||||
return
|
return
|
||||||
clear_temp(temp_path)
|
clear_temp(temp_path)
|
||||||
entries = metadata['entries']
|
entries = metadata['entries']
|
||||||
|
path_key_map = {}
|
||||||
for key, instances in tuple(entries.items()):
|
for key, instances in tuple(entries.items()):
|
||||||
if instances:
|
if instances:
|
||||||
expire_cache(finished_path, instances, max_age)
|
expire_cache(finished_path, instances, max_age)
|
||||||
if not instances:
|
if not instances:
|
||||||
del entries[key]
|
del entries[key]
|
||||||
|
else:
|
||||||
|
for x in instances:
|
||||||
|
book_path = x.get('book_path')
|
||||||
|
if book_path:
|
||||||
|
path_key_map.setdefault(book_path, []).append(key)
|
||||||
|
for keys in path_key_map.values():
|
||||||
|
instances = []
|
||||||
|
for key in keys:
|
||||||
|
instances += entries[key]
|
||||||
|
if len(instances) > 1:
|
||||||
|
removed = tuple(expire_old_versions(finished_path, instances))
|
||||||
|
if removed:
|
||||||
|
for r in removed:
|
||||||
|
entries[r['key']].remove(r)
|
||||||
|
if not entries[r['key']]:
|
||||||
|
del entries[r['key']]
|
||||||
metadata['last_clear_at'] = now
|
metadata['last_clear_at'] = now
|
||||||
|
|
||||||
|
|
||||||
def prepare_convert(temp_path, key, st):
|
def prepare_convert(temp_path, key, st, book_path):
|
||||||
tdir = tempfile.mkdtemp(dir=temp_path)
|
tdir = tempfile.mkdtemp(dir=temp_path)
|
||||||
now = time.time()
|
now = time.time()
|
||||||
return {
|
return {
|
||||||
@ -117,6 +142,7 @@ def prepare_convert(temp_path, key, st):
|
|||||||
'file_mtime': st.st_mtime,
|
'file_mtime': st.st_mtime,
|
||||||
'file_size': st.st_size,
|
'file_size': st.st_size,
|
||||||
'cache_size': 0,
|
'cache_size': 0,
|
||||||
|
'book_path': book_path,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -170,7 +196,7 @@ def save_metadata(metadata, f):
|
|||||||
f.seek(0), f.truncate(), f.write(as_bytes(json.dumps(metadata, indent=2)))
|
f.seek(0), f.truncate(), f.write(as_bytes(json.dumps(metadata, indent=2)))
|
||||||
|
|
||||||
|
|
||||||
def prepare_book(path, convert_func=do_convert, max_age=30 * DAY, force=False, prepare_notify=None):
|
def prepare_book(path, convert_func=do_convert, max_age=30 * DAY, force=False, prepare_notify=None, force_expire=False):
|
||||||
st = os.stat(path)
|
st = os.stat(path)
|
||||||
key = book_hash(path, st.st_size, st.st_mtime)
|
key = book_hash(path, st.st_size, st.st_mtime)
|
||||||
finished_path = safe_makedirs(os.path.join(book_cache_dir(), 'f'))
|
finished_path = safe_makedirs(os.path.join(book_cache_dir(), 'f'))
|
||||||
@ -194,7 +220,7 @@ def prepare_book(path, convert_func=do_convert, max_age=30 * DAY, force=False, p
|
|||||||
return os.path.join(finished_path, instance['path'])
|
return os.path.join(finished_path, instance['path'])
|
||||||
if prepare_notify:
|
if prepare_notify:
|
||||||
prepare_notify()
|
prepare_notify()
|
||||||
instance = prepare_convert(temp_path, key, st)
|
instance = prepare_convert(temp_path, key, st, path)
|
||||||
instances.append(instance)
|
instances.append(instance)
|
||||||
save_metadata(metadata, f)
|
save_metadata(metadata, f)
|
||||||
convert_func(path, temp_path, key, instance)
|
convert_func(path, temp_path, key, instance)
|
||||||
@ -220,7 +246,7 @@ def prepare_book(path, convert_func=do_convert, max_age=30 * DAY, force=False, p
|
|||||||
if q['id'] == instance['id']:
|
if q['id'] == instance['id']:
|
||||||
q.update(instance)
|
q.update(instance)
|
||||||
break
|
break
|
||||||
expire_cache_and_temp(temp_path, finished_path, metadata, max_age)
|
expire_cache_and_temp(temp_path, finished_path, metadata, max_age, force_expire)
|
||||||
save_metadata(metadata, f)
|
save_metadata(metadata, f)
|
||||||
return ans
|
return ans
|
||||||
|
|
||||||
@ -304,6 +330,23 @@ def find_tests():
|
|||||||
prepare_book(book_src, convert_func=convert_mock, max_age=-1000)
|
prepare_book(book_src, convert_func=convert_mock, max_age=-1000)
|
||||||
self.ae([], os.listdir(os.path.join(book_cache_dir(), 'f')))
|
self.ae([], os.listdir(os.path.join(book_cache_dir(), 'f')))
|
||||||
|
|
||||||
|
# Test modifying a book and opening it repeatedly leaves only
|
||||||
|
# a single entry for it in the cache
|
||||||
|
prepare_book(book_src, convert_func=convert_mock, force_expire=True)
|
||||||
|
finished_entries = os.listdir(os.path.join(book_cache_dir(), 'f'))
|
||||||
|
self.ae(len(finished_entries), 1)
|
||||||
|
open(book_src, 'wb').write(b'bcde')
|
||||||
|
prepare_book(book_src, convert_func=convert_mock, force_expire=True)
|
||||||
|
new_finished_entries = os.listdir(os.path.join(book_cache_dir(), 'f'))
|
||||||
|
self.ae(len(new_finished_entries), 1)
|
||||||
|
self.assertNotEqual(finished_entries, new_finished_entries)
|
||||||
|
open(book_src, 'wb').write(b'bcdef')
|
||||||
|
prepare_book(book_src, convert_func=convert_mock, max_age=-1000, force_expire=True)
|
||||||
|
self.ae([], os.listdir(os.path.join(book_cache_dir(), 'f')))
|
||||||
|
with cache_lock() as f:
|
||||||
|
metadata = json.loads(f.read())
|
||||||
|
self.assertEqual(metadata['entries'], {})
|
||||||
|
|
||||||
# Test updating cached book
|
# Test updating cached book
|
||||||
book_src = os.path.join(self.tdir, 'book2.epub')
|
book_src = os.path.join(self.tdir, 'book2.epub')
|
||||||
open(book_src, 'wb').write(b'bb')
|
open(book_src, 'wb').write(b'bb')
|
||||||
|
Loading…
x
Reference in New Issue
Block a user