Make various global caches used by the server thread-safe

This commit is contained in:
Kovid Goyal 2015-06-16 08:16:14 +05:30
parent 8405be4ebb
commit 043d345287
2 changed files with 59 additions and 50 deletions

View File

@ -8,6 +8,7 @@ __copyright__ = '2015, Kovid Goyal <kovid at kovidgoyal.net>'
import os
from io import BytesIO
from threading import Lock
from calibre import fit_image
from calibre.constants import config_dir
@ -27,6 +28,7 @@ from calibre.utils.magick.draw import thumbnail, Image
plugboard_content_server_value = 'content_server'
plugboard_content_server_formats = ['epub', 'mobi', 'azw3']
update_metadata_in_fmts = frozenset(plugboard_content_server_formats)
lock = Lock()
# Get book formats/cover as a cached filesystem file {{{
@ -45,6 +47,8 @@ def create_file_copy(ctx, rd, prefix, library_id, book_id, ext, mtime, copy_func
fname = os.path.join(base, bname)
do_copy = True
mtime = timestampfromdt(mtime)
# TODO: Implement locking for this cache
try:
ans = lopen(fname, 'r+b')
do_copy = os.fstat(ans.fileno()).st_mtime < mtime
@ -167,34 +171,35 @@ def icon(ctx, rd, which):
return lopen(path, 'rb')
except EnvironmentError:
raise HTTPNotFound()
tdir = os.path.join(rd.tdir, 'icons')
cached = os.path.join(tdir, '%d-%s.png' % (sz, which))
try:
return lopen(cached, 'rb')
except EnvironmentError:
pass
try:
src = lopen(path, 'rb')
except EnvironmentError:
raise HTTPNotFound()
with src:
img = Image()
img.load(src.read())
width, height = img.size
scaled, width, height = fit_image(width, height, sz, sz)
if scaled:
img.size = (width, height)
try:
ans = lopen(cached, 'w+b')
except EnvironmentError:
with lock:
tdir = os.path.join(rd.tdir, 'icons')
cached = os.path.join(tdir, '%d-%s.png' % (sz, which))
try:
os.mkdir(tdir)
return lopen(cached, 'rb')
except EnvironmentError:
pass
ans = lopen(cached, 'w+b')
ans.write(img.export('png'))
ans.seek(0)
return ans
try:
src = lopen(path, 'rb')
except EnvironmentError:
raise HTTPNotFound()
with src:
img = Image()
img.load(src.read())
width, height = img.size
scaled, width, height = fit_image(width, height, sz, sz)
if scaled:
img.size = (width, height)
try:
ans = lopen(cached, 'w+b')
except EnvironmentError:
try:
os.mkdir(tdir)
except EnvironmentError:
pass
ans = lopen(cached, 'w+b')
ans.write(img.export('png'))
ans.seek(0)
return ans
@endpoint('/get/{what}/{book_id}/{library_id=None}', types={'book_id':int})

View File

@ -67,6 +67,7 @@ class Context(object):
self.opts = opts
self.library_broker = LibraryBroker(libraries)
self.testing = testing
self.lock = Lock()
def init_session(self, endpoint, data):
pass
@ -80,39 +81,42 @@ class Context(object):
def allowed_book_ids(self, data, db):
# TODO: Implement this based on data.username caching result on the
# data object
ans = data.allowed_book_ids.get(db.server_library_id)
if ans is None:
ans = data.allowed_book_ids[db.server_library_id] = db.all_book_ids()
return ans
with self.lock:
ans = data.allowed_book_ids.get(db.server_library_id)
if ans is None:
ans = data.allowed_book_ids[db.server_library_id] = db.all_book_ids()
return ans
def get_categories(self, data, db, restrict_to_ids=None):
if restrict_to_ids is None:
restrict_to_ids = self.allowed_book_ids(data, db)
cache = self.library_broker.category_caches[db.server_library_id]
old = cache.pop(restrict_to_ids, None)
if old is None or old[0] <= db.last_modified():
categories = db.get_categories(book_ids=restrict_to_ids)
cache[restrict_to_ids] = old = (utcnow(), categories)
if len(cache) > self.CATEGORY_CACHE_SIZE:
cache.popitem(last=False)
else:
cache[restrict_to_ids] = old
return old[1]
with self.lock:
cache = self.library_broker.category_caches[db.server_library_id]
old = cache.pop(restrict_to_ids, None)
if old is None or old[0] <= db.last_modified():
categories = db.get_categories(book_ids=restrict_to_ids)
cache[restrict_to_ids] = old = (utcnow(), categories)
if len(cache) > self.CATEGORY_CACHE_SIZE:
cache.popitem(last=False)
else:
cache[restrict_to_ids] = old
return old[1]
def search(self, data, db, query, restrict_to_ids=None):
if restrict_to_ids is None:
restrict_to_ids = self.allowed_book_ids(data, db)
cache = self.library_broker.search_caches[db.server_library_id]
key = (query, restrict_to_ids)
old = cache.pop(key, None)
if old is None or old[0] < db.clear_search_cache_count:
matches = db.search(query, book_ids=restrict_to_ids)
cache[key] = old = (db.clear_search_cache_count, matches)
if len(cache) > self.SEARCH_CACHE_SIZE:
cache.popitem(last=False)
else:
cache[key] = old
return old[1]
with self.lock:
cache = self.library_broker.search_caches[db.server_library_id]
key = (query, restrict_to_ids)
old = cache.pop(key, None)
if old is None or old[0] < db.clear_search_cache_count:
matches = db.search(query, book_ids=restrict_to_ids)
cache[key] = old = (db.clear_search_cache_count, matches)
if len(cache) > self.SEARCH_CACHE_SIZE:
cache.popitem(last=False)
else:
cache[key] = old
return old[1]
class Handler(object):