Replace some uses of plugins dict

This commit is contained in:
Kovid Goyal 2020-10-16 20:19:25 +05:30
parent 8311f0a8d3
commit c9f2ae20fc
No known key found for this signature in database
GPG Key ID: 06BC317B515ACE7C
3 changed files with 17 additions and 12 deletions

View File

@ -10,12 +10,10 @@ import numbers
from datetime import datetime, timedelta
from collections import defaultdict
from calibre.constants import plugins
from calibre.utils.date import parse_date, UNDEFINED_DATE, utc_tz
from calibre.ebooks.metadata import author_to_author_sort
from polyglot.builtins import iteritems, itervalues, range
_c_speedup = plugins['speedup'][0].parse_date
from calibre_extensions.speedup import parse_date as _c_speedup
def c_parse(val):

View File

@ -20,7 +20,7 @@ builtins.__dict__['__'] = lambda s: s
# For backwards compat with some third party plugins
builtins.__dict__['dynamic_property'] = lambda func: func(None)
from calibre.constants import iswindows, plugins, ismacos, islinux, DEBUG, isfreebsd
from calibre.constants import iswindows, ismacos, islinux, DEBUG, isfreebsd
def get_debug_executable():
@ -149,6 +149,7 @@ def initialize_calibre():
# Name all threads at the OS level created using the threading module, see
# http://bugs.python.org/issue15500
import threading
from calibre_extensions import speedup
orig_start = threading.Thread.start
@ -163,7 +164,7 @@ def initialize_calibre():
if name:
if isinstance(name, unicode_type):
name = name.encode('ascii', 'replace').decode('ascii')
plugins['speedup'][0].set_thread_name(name[:15])
speedup.set_thread_name(name[:15])
except Exception:
pass # Don't care about failure to set name
threading.Thread.start = new_start

View File

@ -186,6 +186,7 @@ def _init():
for names in dispatch
)
_init()
@ -196,6 +197,7 @@ def _unicode_replace(match, int=int, unichr=unichr, maxunicode=sys.maxunicode):
else:
return '\N{REPLACEMENT CHARACTER}' # U+FFFD
UNICODE_UNESCAPE = functools.partial(
re.compile(COMPILED_MACROS['unicode'], re.I).sub,
_unicode_replace)
@ -204,12 +206,15 @@ NEWLINE_UNESCAPE = functools.partial(
re.compile(r'()\\' + COMPILED_MACROS['nl']).sub,
'')
SIMPLE_UNESCAPE = functools.partial(
re.compile(r'\\(%s)' % COMPILED_MACROS['simple_escape'] , re.I).sub,
re.compile(r'\\(%s)' % COMPILED_MACROS['simple_escape'], re.I).sub,
# Same as r'\1', but faster on CPython
operator.methodcaller('group', 1))
FIND_NEWLINES = lambda x : list(re.compile(COMPILED_MACROS['nl']).finditer(x))
def FIND_NEWLINES(x):
return list(re.compile(COMPILED_MACROS['nl']).finditer(x))
class Token(object):
@ -439,11 +444,12 @@ class TokenList(list):
"""
return ''.join(token.as_css() for token in self)
def load_c_tokenizer():
from calibre.constants import plugins
tokenizer, err = plugins['tokenizer']
if err:
raise RuntimeError('Failed to load module tokenizer: %s' % err)
from calibre_extensions import tokenizer
tokens = list(':;(){}[]') + ['DELIM', 'INTEGER', 'STRING']
tokenizer.init(COMPILED_TOKEN_REGEXPS, UNICODE_UNESCAPE, NEWLINE_UNESCAPE, SIMPLE_UNESCAPE, FIND_NEWLINES, TOKEN_DISPATCH, COMPILED_TOKEN_INDEXES, *tokens)
tokenizer.init(
COMPILED_TOKEN_REGEXPS, UNICODE_UNESCAPE, NEWLINE_UNESCAPE,
SIMPLE_UNESCAPE, FIND_NEWLINES, TOKEN_DISPATCH, COMPILED_TOKEN_INDEXES,
*tokens)
return tokenizer