Make various global caches used by the server thread-safe

This commit is contained in:
Kovid Goyal 2015-06-16 08:16:14 +05:30
parent 8405be4ebb
commit 043d345287
2 changed files with 59 additions and 50 deletions

View File

@ -8,6 +8,7 @@ __copyright__ = '2015, Kovid Goyal <kovid at kovidgoyal.net>'
import os import os
from io import BytesIO from io import BytesIO
from threading import Lock
from calibre import fit_image from calibre import fit_image
from calibre.constants import config_dir from calibre.constants import config_dir
@ -27,6 +28,7 @@ from calibre.utils.magick.draw import thumbnail, Image
plugboard_content_server_value = 'content_server' plugboard_content_server_value = 'content_server'
plugboard_content_server_formats = ['epub', 'mobi', 'azw3'] plugboard_content_server_formats = ['epub', 'mobi', 'azw3']
update_metadata_in_fmts = frozenset(plugboard_content_server_formats) update_metadata_in_fmts = frozenset(plugboard_content_server_formats)
lock = Lock()
# Get book formats/cover as a cached filesystem file {{{ # Get book formats/cover as a cached filesystem file {{{
@ -45,6 +47,8 @@ def create_file_copy(ctx, rd, prefix, library_id, book_id, ext, mtime, copy_func
fname = os.path.join(base, bname) fname = os.path.join(base, bname)
do_copy = True do_copy = True
mtime = timestampfromdt(mtime) mtime = timestampfromdt(mtime)
# TODO: Implement locking for this cache
try: try:
ans = lopen(fname, 'r+b') ans = lopen(fname, 'r+b')
do_copy = os.fstat(ans.fileno()).st_mtime < mtime do_copy = os.fstat(ans.fileno()).st_mtime < mtime
@ -167,6 +171,7 @@ def icon(ctx, rd, which):
return lopen(path, 'rb') return lopen(path, 'rb')
except EnvironmentError: except EnvironmentError:
raise HTTPNotFound() raise HTTPNotFound()
with lock:
tdir = os.path.join(rd.tdir, 'icons') tdir = os.path.join(rd.tdir, 'icons')
cached = os.path.join(tdir, '%d-%s.png' % (sz, which)) cached = os.path.join(tdir, '%d-%s.png' % (sz, which))
try: try:

View File

@ -67,6 +67,7 @@ class Context(object):
self.opts = opts self.opts = opts
self.library_broker = LibraryBroker(libraries) self.library_broker = LibraryBroker(libraries)
self.testing = testing self.testing = testing
self.lock = Lock()
def init_session(self, endpoint, data): def init_session(self, endpoint, data):
pass pass
@ -80,6 +81,7 @@ class Context(object):
def allowed_book_ids(self, data, db): def allowed_book_ids(self, data, db):
# TODO: Implement this based on data.username caching result on the # TODO: Implement this based on data.username caching result on the
# data object # data object
with self.lock:
ans = data.allowed_book_ids.get(db.server_library_id) ans = data.allowed_book_ids.get(db.server_library_id)
if ans is None: if ans is None:
ans = data.allowed_book_ids[db.server_library_id] = db.all_book_ids() ans = data.allowed_book_ids[db.server_library_id] = db.all_book_ids()
@ -88,6 +90,7 @@ class Context(object):
def get_categories(self, data, db, restrict_to_ids=None): def get_categories(self, data, db, restrict_to_ids=None):
if restrict_to_ids is None: if restrict_to_ids is None:
restrict_to_ids = self.allowed_book_ids(data, db) restrict_to_ids = self.allowed_book_ids(data, db)
with self.lock:
cache = self.library_broker.category_caches[db.server_library_id] cache = self.library_broker.category_caches[db.server_library_id]
old = cache.pop(restrict_to_ids, None) old = cache.pop(restrict_to_ids, None)
if old is None or old[0] <= db.last_modified(): if old is None or old[0] <= db.last_modified():
@ -102,6 +105,7 @@ class Context(object):
def search(self, data, db, query, restrict_to_ids=None): def search(self, data, db, query, restrict_to_ids=None):
if restrict_to_ids is None: if restrict_to_ids is None:
restrict_to_ids = self.allowed_book_ids(data, db) restrict_to_ids = self.allowed_book_ids(data, db)
with self.lock:
cache = self.library_broker.search_caches[db.server_library_id] cache = self.library_broker.search_caches[db.server_library_id]
key = (query, restrict_to_ids) key = (query, restrict_to_ids)
old = cache.pop(key, None) old = cache.pop(key, None)