mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-09 03:04:10 -04:00
Automated conversion of % format specifiers
Using ruff. Does not change any translatable strings. There are still several thousand usages of % left that ruff wont auto-convert. Get to them someday.
This commit is contained in:
parent
39f7f616bc
commit
5c7dc9613b
@ -27,7 +27,7 @@ for name, src in sources.items():
|
||||
try:
|
||||
for sz in (16, 32, 128, 256, 512, 1024):
|
||||
iname = f'icon_{sz}x{sz}.png'
|
||||
iname2x = 'icon_{0}x{0}@2x.png'.format(sz // 2)
|
||||
iname2x = f'icon_{sz // 2}x{sz // 2}@2x.png'
|
||||
if src.endswith('.svg'):
|
||||
subprocess.check_call(['rsvg-convert', src, '-w', str(sz), '-h', str(sz), '-o', iname])
|
||||
else:
|
||||
|
@ -656,7 +656,7 @@ class Build(Command):
|
||||
os.chdir(bdir)
|
||||
try:
|
||||
self.check_call(cmd + ['-S', os.path.dirname(sources[0])])
|
||||
self.check_call([self.env.make] + ['-j{}'.format(cpu_count or 1)])
|
||||
self.check_call([self.env.make] + [f'-j{cpu_count or 1}'])
|
||||
finally:
|
||||
os.chdir(cwd)
|
||||
os.rename(self.j(bdir, 'libheadless.so'), target)
|
||||
@ -733,7 +733,7 @@ sip-file = {os.path.basename(sipf)!r}
|
||||
env = os.environ.copy()
|
||||
if is_macos_universal_build:
|
||||
env['ARCHS'] = 'x86_64 arm64'
|
||||
self.check_call([self.env.make] + ([] if iswindows else ['-j{}'.format(os.cpu_count() or 1)]), env=env)
|
||||
self.check_call([self.env.make] + ([] if iswindows else [f'-j{os.cpu_count() or 1}']), env=env)
|
||||
e = 'pyd' if iswindows else 'so'
|
||||
m = glob.glob(f'{ext.name}/{ext.name}.*{e}')
|
||||
if not m:
|
||||
|
@ -114,7 +114,7 @@ class Check(Command):
|
||||
for i, f in enumerate(dirty_files):
|
||||
self.info('\tChecking', f)
|
||||
if self.file_has_errors(f):
|
||||
self.info('{} files left to check'.format(len(dirty_files) - i - 1))
|
||||
self.info(f'{len(dirty_files) - i - 1} files left to check')
|
||||
try:
|
||||
edit_file(f)
|
||||
except FileNotFoundError:
|
||||
|
@ -56,16 +56,16 @@ class ReadFileWithProgressReporting: # {{{
|
||||
eta = int((self._total - self.tell()) / bit_rate) + 1
|
||||
eta_m, eta_s = eta / 60, eta % 60
|
||||
sys.stdout.write(
|
||||
' {:.1f}% {:.1f}/{:.1f}MB {:.1f} KB/sec {} minutes, {} seconds left'
|
||||
.format(frac * 100, mb_pos, mb_tot, kb_rate, eta_m, eta_s)
|
||||
f' {frac * 100:.1f}% {mb_pos:.1f}/{mb_tot:.1f}MB {kb_rate:.1f} KB/sec {eta_m} minutes, {eta_s} seconds left'
|
||||
|
||||
)
|
||||
sys.stdout.write('\x1b[u')
|
||||
if self.tell() >= self._total:
|
||||
sys.stdout.write('\n')
|
||||
t = int(time.time() - self.start_time) + 1
|
||||
print(
|
||||
'Upload took {} minutes and {} seconds at {:.1f} KB/sec'
|
||||
.format(t/60, t % 60, kb_rate)
|
||||
f'Upload took {t/60} minutes and {t % 60} seconds at {kb_rate:.1f} KB/sec'
|
||||
|
||||
)
|
||||
sys.stdout.flush()
|
||||
|
||||
|
@ -388,7 +388,7 @@ class Bootstrap(Command):
|
||||
st = time.time()
|
||||
clone_cmd.insert(2, '--depth=1')
|
||||
subprocess.check_call(clone_cmd, cwd=self.d(self.SRC))
|
||||
print('Downloaded translations in {} seconds'.format(int(time.time() - st)))
|
||||
print(f'Downloaded translations in {int(time.time() - st)} seconds')
|
||||
else:
|
||||
if os.path.exists(tdir):
|
||||
subprocess.check_call(['git', 'pull'], cwd=tdir)
|
||||
|
@ -460,7 +460,7 @@ def plugin_to_index(plugin, count):
|
||||
released = datetime(*tuple(map(int, re.split(r'\D', plugin['last_modified'])))[:6]).strftime('%e %b, %Y').lstrip()
|
||||
details = [
|
||||
'Version: <b>{}</b>'.format(escape('.'.join(map(str, plugin['version'])))),
|
||||
'Released: <b>{}</b>'.format(escape(released)),
|
||||
f'Released: <b>{escape(released)}</b>',
|
||||
'Author: {}'.format(escape(plugin['author'])),
|
||||
'calibre: {}'.format(escape('.'.join(map(str, plugin['minimum_calibre_version'])))),
|
||||
'Platforms: {}'.format(escape(', '.join(sorted(plugin['supported_platforms']) or ['all']))),
|
||||
|
@ -50,9 +50,9 @@ class Stage2(Command):
|
||||
platforms = 'linux64', 'linuxarm64', 'osx', 'win'
|
||||
for x in platforms:
|
||||
cmd = (
|
||||
'''{exe} -c "import subprocess; subprocess.Popen(['{exe}', './setup.py', '{x}']).wait() != 0 and'''
|
||||
''' input('Build of {x} failed, press Enter to exit');"'''
|
||||
).format(exe=sys.executable, x=x)
|
||||
f'''{sys.executable} -c "import subprocess; subprocess.Popen(['{sys.executable}', './setup.py', '{x}']).wait() != 0 and'''
|
||||
f''' input('Build of {x} failed, press Enter to exit');"'''
|
||||
)
|
||||
session.append('title ' + x)
|
||||
session.append('launch ' + cmd)
|
||||
|
||||
@ -220,8 +220,8 @@ class Manual(Command):
|
||||
if x and not os.path.exists(x):
|
||||
os.symlink('.', x)
|
||||
self.info(
|
||||
'Built manual for {} languages in {} minutes'
|
||||
.format(len(jobs), int((time.time() - st) / 60.))
|
||||
f'Built manual for {len(jobs)} languages in {int((time.time() - st) / 60.)} minutes'
|
||||
|
||||
)
|
||||
finally:
|
||||
os.chdir(cwd)
|
||||
@ -335,6 +335,6 @@ class TagRelease(Command):
|
||||
def run(self, opts):
|
||||
self.info('Tagging release')
|
||||
subprocess.check_call(
|
||||
'git tag -s v{0} -m "version-{0}"'.format(__version__).split()
|
||||
f'git tag -s v{__version__} -m "version-{__version__}"'.split()
|
||||
)
|
||||
subprocess.check_call(f'git push origin v{__version__}'.split())
|
||||
|
@ -361,12 +361,11 @@ class UploadDemo(Command): # {{{
|
||||
|
||||
def run(self, opts):
|
||||
check_call(
|
||||
'''ebook-convert {}/demo.html /tmp/html2lrf.lrf '''
|
||||
f'''ebook-convert {self.j(self.SRC, HTML2LRF)}/demo.html /tmp/html2lrf.lrf '''
|
||||
'''--title='Demonstration of html2lrf' --authors='Kovid Goyal' '''
|
||||
'''--header '''
|
||||
'''--serif-family "/usr/share/fonts/corefonts, Times New Roman" '''
|
||||
'''--mono-family "/usr/share/fonts/corefonts, Andale Mono" '''
|
||||
''''''.format(self.j(self.SRC, HTML2LRF)),
|
||||
'''--mono-family "/usr/share/fonts/corefonts, Andale Mono" ''',
|
||||
shell=True
|
||||
)
|
||||
|
||||
|
@ -98,7 +98,7 @@ def main():
|
||||
else:
|
||||
if len(sys.argv) == 1:
|
||||
raise SystemExit('Usage: win-ci.py sw|build|test')
|
||||
raise SystemExit('{!r} is not a valid action'.format(sys.argv[-1]))
|
||||
raise SystemExit(f'{sys.argv[-1]!r} is not a valid action')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
@ -292,7 +292,7 @@ def get_proxy_info(proxy_scheme, proxy_string):
|
||||
'''
|
||||
from polyglot.urllib import urlparse
|
||||
try:
|
||||
proxy_url = '%s://%s'%(proxy_scheme, proxy_string)
|
||||
proxy_url = f'{proxy_scheme}://{proxy_string}'
|
||||
urlinfo = urlparse(proxy_url)
|
||||
ans = {
|
||||
'scheme': urlinfo.scheme,
|
||||
|
@ -146,7 +146,7 @@ def _get_cache_dir():
|
||||
|
||||
if iswindows:
|
||||
try:
|
||||
candidate = os.path.join(winutil.special_folder_path(winutil.CSIDL_LOCAL_APPDATA), '%s-cache'%__appname__)
|
||||
candidate = os.path.join(winutil.special_folder_path(winutil.CSIDL_LOCAL_APPDATA), f'{__appname__}-cache')
|
||||
except ValueError:
|
||||
return confcache
|
||||
elif ismacos:
|
||||
@ -341,7 +341,7 @@ class Plugins(collections.abc.Mapping):
|
||||
try:
|
||||
return import_module('calibre_extensions.' + name), ''
|
||||
except ModuleNotFoundError:
|
||||
raise KeyError('No plugin named %r'%name)
|
||||
raise KeyError(f'No plugin named {name!r}')
|
||||
except Exception as err:
|
||||
return None, str(err)
|
||||
|
||||
|
@ -322,8 +322,7 @@ class Plugin: # {{{
|
||||
interface. It is called when the user does: calibre-debug -r "Plugin
|
||||
Name". Any arguments passed are present in the args variable.
|
||||
'''
|
||||
raise NotImplementedError('The %s plugin has no command line interface'
|
||||
%self.name)
|
||||
raise NotImplementedError(f'The {self.name} plugin has no command line interface')
|
||||
|
||||
# }}}
|
||||
|
||||
@ -540,7 +539,7 @@ class CatalogPlugin(Plugin): # {{{
|
||||
Custom fields sort after standard fields
|
||||
'''
|
||||
if key.startswith('#'):
|
||||
return '~%s' % key[1:]
|
||||
return f'~{key[1:]}'
|
||||
else:
|
||||
return key
|
||||
|
||||
@ -575,9 +574,8 @@ class CatalogPlugin(Plugin): # {{{
|
||||
if requested_fields - all_fields:
|
||||
from calibre.library import current_library_name
|
||||
invalid_fields = sorted(requested_fields - all_fields)
|
||||
print('invalid --fields specified: %s' % ', '.join(invalid_fields))
|
||||
print("available fields in '%s': %s" %
|
||||
(current_library_name(), ', '.join(sorted(all_fields))))
|
||||
print('invalid --fields specified: {}'.format(', '.join(invalid_fields)))
|
||||
print("available fields in '{}': {}".format(current_library_name(), ', '.join(sorted(all_fields))))
|
||||
raise ValueError('unable to generate catalog with specified fields')
|
||||
|
||||
fields = [x for x in of if x in all_fields]
|
||||
|
@ -78,10 +78,9 @@ class OptionRecommendation:
|
||||
def validate_parameters(self):
|
||||
if self.option.choices and self.recommended_value not in \
|
||||
self.option.choices:
|
||||
raise ValueError('OpRec: %s: Recommended value not in choices'%
|
||||
self.option.name)
|
||||
raise ValueError(f'OpRec: {self.option.name}: Recommended value not in choices')
|
||||
if not (isinstance(self.recommended_value, (numbers.Number, bytes, str)) or self.recommended_value is None):
|
||||
raise ValueError('OpRec: %s:'%self.option.name + repr(
|
||||
raise ValueError(f'OpRec: {self.option.name}:' + repr(
|
||||
self.recommended_value) + ' is not a string or a number')
|
||||
|
||||
|
||||
@ -229,7 +228,7 @@ class InputFormatPlugin(Plugin):
|
||||
def __call__(self, stream, options, file_ext, log,
|
||||
accelerators, output_dir):
|
||||
try:
|
||||
log('InputFormatPlugin: %s running'%self.name)
|
||||
log(f'InputFormatPlugin: {self.name} running')
|
||||
if hasattr(stream, 'name'):
|
||||
log('on', stream.name)
|
||||
except:
|
||||
|
@ -85,7 +85,7 @@ def disable_plugin(plugin_or_name):
|
||||
if plugin is None:
|
||||
raise ValueError(f'No plugin named: {x} found')
|
||||
if not plugin.can_be_disabled:
|
||||
raise ValueError('Plugin %s cannot be disabled'%x)
|
||||
raise ValueError(f'Plugin {x} cannot be disabled')
|
||||
dp = config['disabled_plugins']
|
||||
dp.add(x)
|
||||
config['disabled_plugins'] = dp
|
||||
@ -199,7 +199,7 @@ def _run_filetype_plugins(path_to_file, ft=None, occasion='preprocess'):
|
||||
try:
|
||||
nfp = plugin.run(nfp) or nfp
|
||||
except:
|
||||
print('Running file type plugin %s failed with traceback:'%plugin.name, file=oe)
|
||||
print(f'Running file type plugin {plugin.name} failed with traceback:', file=oe)
|
||||
traceback.print_exc(file=oe)
|
||||
sys.stdout, sys.stderr = oo, oe
|
||||
def x(j):
|
||||
@ -526,10 +526,10 @@ def add_plugin(path_to_zip_file):
|
||||
plugin = load_plugin(path_to_zip_file)
|
||||
if plugin.name in builtin_names:
|
||||
raise NameConflict(
|
||||
'A builtin plugin with the name %r already exists' % plugin.name)
|
||||
f'A builtin plugin with the name {plugin.name!r} already exists')
|
||||
if plugin.name in get_system_plugins():
|
||||
raise NameConflict(
|
||||
'A system plugin with the name %r already exists' % plugin.name)
|
||||
f'A system plugin with the name {plugin.name!r} already exists')
|
||||
plugin = initialize_plugin(plugin, path_to_zip_file, PluginInstallationType.EXTERNAL)
|
||||
plugins = config['plugins']
|
||||
zfp = os.path.join(plugin_dir, plugin.name+'.zip')
|
||||
@ -892,7 +892,7 @@ def main(args=sys.argv):
|
||||
name, custom = opts.customize_plugin, ''
|
||||
plugin = find_plugin(name.strip())
|
||||
if plugin is None:
|
||||
print('No plugin with the name %s exists'%name)
|
||||
print(f'No plugin with the name {name} exists')
|
||||
return 1
|
||||
customize_plugin(plugin, custom)
|
||||
if opts.enable_plugin is not None:
|
||||
|
@ -296,14 +296,14 @@ class CalibrePluginFinder:
|
||||
|
||||
def load(self, path_to_zip_file):
|
||||
if not os.access(path_to_zip_file, os.R_OK):
|
||||
raise PluginNotFound('Cannot access %r'%path_to_zip_file)
|
||||
raise PluginNotFound(f'Cannot access {path_to_zip_file!r}')
|
||||
|
||||
with zipfile.ZipFile(path_to_zip_file) as zf:
|
||||
plugin_name = self._locate_code(zf, path_to_zip_file)
|
||||
|
||||
try:
|
||||
ans = None
|
||||
plugin_module = 'calibre_plugins.%s'%plugin_name
|
||||
plugin_module = f'calibre_plugins.{plugin_name}'
|
||||
m = sys.modules.get(plugin_module, None)
|
||||
if m is not None:
|
||||
reload(m)
|
||||
@ -315,8 +315,7 @@ class CalibrePluginFinder:
|
||||
obj.name != 'Trivial Plugin':
|
||||
plugin_classes.append(obj)
|
||||
if not plugin_classes:
|
||||
raise InvalidPlugin('No plugin class found in %s:%s'%(
|
||||
as_unicode(path_to_zip_file), plugin_name))
|
||||
raise InvalidPlugin(f'No plugin class found in {as_unicode(path_to_zip_file)}:{plugin_name}')
|
||||
if len(plugin_classes) > 1:
|
||||
plugin_classes.sort(key=lambda c:(getattr(c, '__module__', None) or '').count('.'))
|
||||
|
||||
@ -324,14 +323,12 @@ class CalibrePluginFinder:
|
||||
|
||||
if ans.minimum_calibre_version > numeric_version:
|
||||
raise InvalidPlugin(
|
||||
'The plugin at %s needs a version of calibre >= %s' %
|
||||
(as_unicode(path_to_zip_file), '.'.join(map(str,
|
||||
'The plugin at {} needs a version of calibre >= {}'.format(as_unicode(path_to_zip_file), '.'.join(map(str,
|
||||
ans.minimum_calibre_version))))
|
||||
|
||||
if platform not in ans.supported_platforms:
|
||||
raise InvalidPlugin(
|
||||
'The plugin at %s cannot be used on %s' %
|
||||
(as_unicode(path_to_zip_file), platform))
|
||||
f'The plugin at {as_unicode(path_to_zip_file)} cannot be used on {platform}')
|
||||
|
||||
return ans
|
||||
except:
|
||||
@ -359,8 +356,7 @@ class CalibrePluginFinder:
|
||||
else:
|
||||
if self._identifier_pat.match(plugin_name) is None:
|
||||
raise InvalidPlugin(
|
||||
'The plugin at %r uses an invalid import name: %r' %
|
||||
(path_to_zip_file, plugin_name))
|
||||
f'The plugin at {path_to_zip_file!r} uses an invalid import name: {plugin_name!r}')
|
||||
|
||||
pynames = [x for x in names if x.endswith('.py')]
|
||||
|
||||
@ -394,9 +390,8 @@ class CalibrePluginFinder:
|
||||
break
|
||||
|
||||
if '__init__' not in names:
|
||||
raise InvalidPlugin(('The plugin in %r is invalid. It does not '
|
||||
raise InvalidPlugin(f'The plugin in {path_to_zip_file!r} is invalid. It does not '
|
||||
'contain a top-level __init__.py file')
|
||||
% path_to_zip_file)
|
||||
|
||||
with self._lock:
|
||||
self.loaded_plugins[plugin_name] = path_to_zip_file, names, tuple(all_names)
|
||||
|
@ -163,7 +163,7 @@ class DBPrefs(dict): # {{{
|
||||
self.__setitem__(key, val)
|
||||
|
||||
def get_namespaced(self, namespace, key, default=None):
|
||||
key = 'namespaced:%s:%s'%(namespace, key)
|
||||
key = f'namespaced:{namespace}:{key}'
|
||||
try:
|
||||
return dict.__getitem__(self, key)
|
||||
except KeyError:
|
||||
@ -174,7 +174,7 @@ class DBPrefs(dict): # {{{
|
||||
raise KeyError('Colons are not allowed in keys')
|
||||
if ':' in namespace:
|
||||
raise KeyError('Colons are not allowed in the namespace')
|
||||
key = 'namespaced:%s:%s'%(namespace, key)
|
||||
key = f'namespaced:{namespace}:{key}'
|
||||
self[key] = val
|
||||
|
||||
def write_serialized(self, library_path):
|
||||
@ -273,7 +273,7 @@ def IdentifiersConcat():
|
||||
'''String concatenation aggregator for the identifiers map'''
|
||||
|
||||
def step(ctxt, key, val):
|
||||
ctxt.append('%s:%s'%(key, val))
|
||||
ctxt.append(f'{key}:{val}')
|
||||
|
||||
def finalize(ctxt):
|
||||
try:
|
||||
@ -684,7 +684,7 @@ class DB:
|
||||
suffix = 1
|
||||
while icu_lower(cat + str(suffix)) in catmap:
|
||||
suffix += 1
|
||||
prints('Renaming user category %s to %s'%(cat, cat+str(suffix)))
|
||||
prints(f'Renaming user category {cat} to {cat+str(suffix)}')
|
||||
user_cats[cat + str(suffix)] = user_cats[cat]
|
||||
del user_cats[cat]
|
||||
cats_changed = True
|
||||
@ -700,7 +700,7 @@ class DB:
|
||||
for num, label in self.conn.get(
|
||||
'SELECT id,label FROM custom_columns WHERE mark_for_delete=1'):
|
||||
table, lt = self.custom_table_names(num)
|
||||
self.execute('''\
|
||||
self.execute(f'''\
|
||||
DROP INDEX IF EXISTS {table}_idx;
|
||||
DROP INDEX IF EXISTS {lt}_aidx;
|
||||
DROP INDEX IF EXISTS {lt}_bidx;
|
||||
@ -714,7 +714,7 @@ class DB:
|
||||
DROP VIEW IF EXISTS tag_browser_filtered_{table};
|
||||
DROP TABLE IF EXISTS {table};
|
||||
DROP TABLE IF EXISTS {lt};
|
||||
'''.format(table=table, lt=lt)
|
||||
'''
|
||||
)
|
||||
self.prefs.set('update_all_last_mod_dates_on_start', True)
|
||||
self.deleted_fields.append('#'+label)
|
||||
@ -764,16 +764,15 @@ class DB:
|
||||
|
||||
# Create Foreign Key triggers
|
||||
if data['normalized']:
|
||||
trigger = 'DELETE FROM %s WHERE book=OLD.id;'%lt
|
||||
trigger = f'DELETE FROM {lt} WHERE book=OLD.id;'
|
||||
else:
|
||||
trigger = 'DELETE FROM %s WHERE book=OLD.id;'%table
|
||||
trigger = f'DELETE FROM {table} WHERE book=OLD.id;'
|
||||
triggers.append(trigger)
|
||||
|
||||
if remove:
|
||||
with self.conn:
|
||||
for data in remove:
|
||||
prints('WARNING: Custom column %r not found, removing.' %
|
||||
data['label'])
|
||||
prints('WARNING: Custom column {!r} not found, removing.'.format(data['label']))
|
||||
self.execute('DELETE FROM custom_columns WHERE id=?',
|
||||
(data['num'],))
|
||||
|
||||
@ -783,9 +782,9 @@ class DB:
|
||||
CREATE TEMP TRIGGER custom_books_delete_trg
|
||||
AFTER DELETE ON books
|
||||
BEGIN
|
||||
%s
|
||||
{}
|
||||
END;
|
||||
'''%(' \n'.join(triggers)))
|
||||
'''.format(' \n'.join(triggers)))
|
||||
|
||||
# Setup data adapters
|
||||
def adapt_text(x, d):
|
||||
@ -1212,7 +1211,7 @@ class DB:
|
||||
if re.match(r'^\w*$', label) is None or not label[0].isalpha() or label.lower() != label:
|
||||
raise ValueError(_('The label must contain only lower case letters, digits and underscores, and start with a letter'))
|
||||
if datatype not in CUSTOM_DATA_TYPES:
|
||||
raise ValueError('%r is not a supported data type'%datatype)
|
||||
raise ValueError(f'{datatype!r} is not a supported data type')
|
||||
normalized = datatype not in ('datetime', 'comments', 'int', 'bool',
|
||||
'float', 'composite')
|
||||
is_multiple = is_multiple and datatype in ('text', 'composite')
|
||||
@ -1241,29 +1240,29 @@ class DB:
|
||||
else:
|
||||
s_index = ''
|
||||
lines = [
|
||||
'''\
|
||||
CREATE TABLE %s(
|
||||
f'''\
|
||||
CREATE TABLE {table}(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
value %s NOT NULL %s,
|
||||
value {dt} NOT NULL {collate},
|
||||
link TEXT NOT NULL DEFAULT "",
|
||||
UNIQUE(value));
|
||||
'''%(table, dt, collate),
|
||||
''',
|
||||
|
||||
'CREATE INDEX %s_idx ON %s (value %s);'%(table, table, collate),
|
||||
f'CREATE INDEX {table}_idx ON {table} (value {collate});',
|
||||
|
||||
'''\
|
||||
CREATE TABLE %s(
|
||||
f'''\
|
||||
CREATE TABLE {lt}(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
book INTEGER NOT NULL,
|
||||
value INTEGER NOT NULL,
|
||||
%s
|
||||
{s_index}
|
||||
UNIQUE(book, value)
|
||||
);'''%(lt, s_index),
|
||||
);''',
|
||||
|
||||
'CREATE INDEX %s_aidx ON %s (value);'%(lt,lt),
|
||||
'CREATE INDEX %s_bidx ON %s (book);'%(lt,lt),
|
||||
f'CREATE INDEX {lt}_aidx ON {lt} (value);',
|
||||
f'CREATE INDEX {lt}_bidx ON {lt} (book);',
|
||||
|
||||
'''\
|
||||
f'''\
|
||||
CREATE TRIGGER fkc_update_{lt}_a
|
||||
BEFORE UPDATE OF book ON {lt}
|
||||
BEGIN
|
||||
@ -1324,22 +1323,22 @@ class DB:
|
||||
value AS sort
|
||||
FROM {table};
|
||||
|
||||
'''.format(lt=lt, table=table),
|
||||
''',
|
||||
|
||||
]
|
||||
else:
|
||||
lines = [
|
||||
'''\
|
||||
CREATE TABLE %s(
|
||||
f'''\
|
||||
CREATE TABLE {table}(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
book INTEGER,
|
||||
value %s NOT NULL %s,
|
||||
value {dt} NOT NULL {collate},
|
||||
UNIQUE(book));
|
||||
'''%(table, dt, collate),
|
||||
''',
|
||||
|
||||
'CREATE INDEX %s_idx ON %s (book);'%(table, table),
|
||||
f'CREATE INDEX {table}_idx ON {table} (book);',
|
||||
|
||||
'''\
|
||||
f'''\
|
||||
CREATE TRIGGER fkc_insert_{table}
|
||||
BEFORE INSERT ON {table}
|
||||
BEGIN
|
||||
@ -1356,7 +1355,7 @@ class DB:
|
||||
THEN RAISE(ABORT, 'Foreign key violation: book not in books')
|
||||
END;
|
||||
END;
|
||||
'''.format(table=table),
|
||||
''',
|
||||
]
|
||||
script = ' \n'.join(lines)
|
||||
self.execute(script)
|
||||
@ -2396,15 +2395,14 @@ class DB:
|
||||
data = []
|
||||
if highlight_start is not None and highlight_end is not None:
|
||||
if snippet_size is not None:
|
||||
text = "snippet({fts_table}, 0, ?, ?, '…', {snippet_size})".format(
|
||||
fts_table=fts_table, snippet_size=max(1, min(snippet_size, 64)))
|
||||
text = f"snippet({fts_table}, 0, ?, ?, '…', {max(1, min(snippet_size, 64))})"
|
||||
else:
|
||||
text = f'highlight({fts_table}, 0, ?, ?)'
|
||||
data.append(highlight_start)
|
||||
data.append(highlight_end)
|
||||
query = 'SELECT {0}.id, {0}.book, {0}.format, {0}.user_type, {0}.user, {0}.annot_data, {1} FROM {0} '
|
||||
query = query.format('annotations', text)
|
||||
query += ' JOIN {fts_table} ON annotations.id = {fts_table}.rowid'.format(fts_table=fts_table)
|
||||
query += f' JOIN {fts_table} ON annotations.id = {fts_table}.rowid'
|
||||
query += f' WHERE {fts_table} MATCH ?'
|
||||
data.append(fts_engine_query)
|
||||
if restrict_to_user:
|
||||
|
@ -916,7 +916,7 @@ class Cache:
|
||||
try:
|
||||
return frozenset(self.fields[field].table.id_map.values())
|
||||
except AttributeError:
|
||||
raise ValueError('%s is not a many-one or many-many field' % field)
|
||||
raise ValueError(f'{field} is not a many-one or many-many field')
|
||||
|
||||
@read_api
|
||||
def get_usage_count_by_id(self, field):
|
||||
@ -925,7 +925,7 @@ class Cache:
|
||||
try:
|
||||
return {k:len(v) for k, v in iteritems(self.fields[field].table.col_book_map)}
|
||||
except AttributeError:
|
||||
raise ValueError('%s is not a many-one or many-many field' % field)
|
||||
raise ValueError(f'{field} is not a many-one or many-many field')
|
||||
|
||||
@read_api
|
||||
def get_id_map(self, field):
|
||||
@ -937,7 +937,7 @@ class Cache:
|
||||
except AttributeError:
|
||||
if field == 'title':
|
||||
return self.fields[field].table.book_col_map.copy()
|
||||
raise ValueError('%s is not a many-one or many-many field' % field)
|
||||
raise ValueError(f'{field} is not a many-one or many-many field')
|
||||
|
||||
@read_api
|
||||
def get_item_name(self, field, item_id):
|
||||
@ -2319,7 +2319,7 @@ class Cache:
|
||||
try:
|
||||
func = f.table.rename_item
|
||||
except AttributeError:
|
||||
raise ValueError('Cannot rename items for one-one fields: %s' % field)
|
||||
raise ValueError(f'Cannot rename items for one-one fields: {field}')
|
||||
moved_books = set()
|
||||
id_map = {}
|
||||
for item_id, new_name in item_id_to_new_name_map.items():
|
||||
@ -2705,7 +2705,7 @@ class Cache:
|
||||
if mi.authors:
|
||||
try:
|
||||
quathors = mi.authors[:20] # Too many authors causes parsing of the search expression to fail
|
||||
query = ' and '.join('authors:"=%s"'%(a.replace('"', '')) for a in quathors)
|
||||
query = ' and '.join('authors:"={}"'.format(a.replace('"', '')) for a in quathors)
|
||||
qauthors = mi.authors[20:]
|
||||
except ValueError:
|
||||
return identical_book_ids
|
||||
|
@ -60,8 +60,7 @@ class Tag:
|
||||
|
||||
@property
|
||||
def string_representation(self):
|
||||
return '%s:%s:%s:%s:%s:%s'%(self.name, self.count, self.id, self.state,
|
||||
self.category, self.original_categories)
|
||||
return f'{self.name}:{self.count}:{self.id}:{self.state}:{self.category}:{self.original_categories}'
|
||||
|
||||
def __str__(self):
|
||||
return self.string_representation
|
||||
|
@ -400,7 +400,7 @@ the folder related options below.
|
||||
try:
|
||||
getattr(parser.values, option.dest).append(compile_rule(rule))
|
||||
except Exception:
|
||||
raise OptionValueError('%r is not a valid filename pattern' % value)
|
||||
raise OptionValueError(f'{value!r} is not a valid filename pattern')
|
||||
|
||||
g.add_option(
|
||||
'-1',
|
||||
|
@ -72,7 +72,7 @@ def do_add_custom_column(db, label, name, datatype, is_multiple, display):
|
||||
num = db.create_custom_column(
|
||||
label, name, datatype, is_multiple, display=display
|
||||
)
|
||||
prints('Custom column created with id: %s' % num)
|
||||
prints(f'Custom column created with id: {num}')
|
||||
|
||||
|
||||
def main(opts, args, dbctx):
|
||||
|
@ -156,7 +156,7 @@ def main(opts, args, dbctx):
|
||||
|
||||
def fmtr(v):
|
||||
v = v or 0
|
||||
ans = '%.1f' % v
|
||||
ans = f'{v:.1f}'
|
||||
if ans.endswith('.0'):
|
||||
ans = ans[:-2]
|
||||
return ans
|
||||
|
@ -61,7 +61,7 @@ def do_remove_custom_column(db, label, force):
|
||||
' Use calibredb custom_columns to get a list of labels.'
|
||||
) % label
|
||||
)
|
||||
prints('Column %r removed.' % label)
|
||||
prints(f'Column {label!r} removed.')
|
||||
|
||||
|
||||
def main(opts, args, dbctx):
|
||||
|
@ -89,7 +89,7 @@ class Field:
|
||||
if tweaks['sort_dates_using_visible_fields']:
|
||||
fmt = None
|
||||
if name in {'timestamp', 'pubdate', 'last_modified'}:
|
||||
fmt = tweaks['gui_%s_display_format' % name]
|
||||
fmt = tweaks[f'gui_{name}_display_format']
|
||||
elif self.metadata['is_custom']:
|
||||
fmt = self.metadata.get('display', {}).get('date_format', None)
|
||||
self._sort_key = partial(clean_date_for_sort, fmt=fmt)
|
||||
@ -454,7 +454,7 @@ class OnDeviceField(OneToOneField):
|
||||
loc.append(_('Card A'))
|
||||
if b is not None:
|
||||
loc.append(_('Card B'))
|
||||
return ', '.join(loc) + ((' (%s books)'%count) if count > 1 else '')
|
||||
return ', '.join(loc) + ((f' ({count} books)') if count > 1 else '')
|
||||
|
||||
def __iter__(self):
|
||||
return iter(())
|
||||
|
@ -263,7 +263,7 @@ def composite_getter(mi, field, dbref, book_id, cache, formatter, template_cache
|
||||
except Exception:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return 'ERROR WHILE EVALUATING: %s' % field
|
||||
return f'ERROR WHILE EVALUATING: {field}'
|
||||
return ret
|
||||
|
||||
|
||||
@ -365,7 +365,7 @@ class ProxyMetadata(Metadata):
|
||||
try:
|
||||
return ga(self, '_cache')[field]
|
||||
except KeyError:
|
||||
raise AttributeError('Metadata object has no attribute named: %r' % field)
|
||||
raise AttributeError(f'Metadata object has no attribute named: {field!r}')
|
||||
|
||||
def __setattr__(self, field, val, extra=None):
|
||||
cache = ga(self, '_cache')
|
||||
|
@ -616,9 +616,9 @@ class LibraryDatabase:
|
||||
def set_custom_bulk_multiple(self, ids, add=[], remove=[], label=None, num=None, notify=False):
|
||||
data = self.backend.custom_field_metadata(label, num)
|
||||
if not data['editable']:
|
||||
raise ValueError('Column %r is not editable'%data['label'])
|
||||
raise ValueError('Column {!r} is not editable'.format(data['label']))
|
||||
if data['datatype'] != 'text' or not data['is_multiple']:
|
||||
raise ValueError('Column %r is not text/multiple'%data['label'])
|
||||
raise ValueError('Column {!r} is not text/multiple'.format(data['label']))
|
||||
field = self.custom_field_name(label, num)
|
||||
self._do_bulk_modify(field, ids, add, remove, notify)
|
||||
|
||||
@ -756,7 +756,7 @@ class LibraryDatabase:
|
||||
if data['datatype'] == 'composite':
|
||||
return set()
|
||||
if not data['editable']:
|
||||
raise ValueError('Column %r is not editable'%data['label'])
|
||||
raise ValueError('Column {!r} is not editable'.format(data['label']))
|
||||
if data['datatype'] == 'enumeration' and (
|
||||
val and val not in data['display']['enum_values']):
|
||||
return set()
|
||||
@ -789,7 +789,7 @@ class LibraryDatabase:
|
||||
val and val not in data['display']['enum_values']):
|
||||
return
|
||||
if not data['editable']:
|
||||
raise ValueError('Column %r is not editable'%data['label'])
|
||||
raise ValueError('Column {!r} is not editable'.format(data['label']))
|
||||
|
||||
if append:
|
||||
for book_id in ids:
|
||||
@ -826,7 +826,7 @@ class LibraryDatabase:
|
||||
self.notify('cover', [book_id])
|
||||
|
||||
def original_fmt(self, book_id, fmt):
|
||||
nfmt = ('ORIGINAL_%s'%fmt).upper()
|
||||
nfmt = (f'ORIGINAL_{fmt}').upper()
|
||||
return nfmt if self.new_api.has_format(book_id, nfmt) else fmt
|
||||
|
||||
def save_original_format(self, book_id, fmt, notify=True):
|
||||
@ -931,7 +931,7 @@ for field in (
|
||||
self.notify([book_id])
|
||||
return ret if field == 'languages' else retval
|
||||
return func
|
||||
setattr(LibraryDatabase, 'set_%s' % field.replace('!', ''), setter(field))
|
||||
setattr(LibraryDatabase, 'set_{}'.format(field.replace('!', '')), setter(field))
|
||||
|
||||
for field in ('authors', 'tags', 'publisher'):
|
||||
def renamer(field):
|
||||
@ -941,7 +941,7 @@ for field in ('authors', 'tags', 'publisher'):
|
||||
return id_map[old_id]
|
||||
return func
|
||||
fname = field[:-1] if field in {'tags', 'authors'} else field
|
||||
setattr(LibraryDatabase, 'rename_%s' % fname, renamer(field))
|
||||
setattr(LibraryDatabase, f'rename_{fname}', renamer(field))
|
||||
|
||||
LibraryDatabase.update_last_modified = lambda self, book_ids, commit=False, now=None: self.new_api.update_last_modified(book_ids, now=now)
|
||||
|
||||
@ -954,7 +954,7 @@ for field in ('authors', 'tags', 'publisher', 'series'):
|
||||
return self.new_api.all_field_names(field)
|
||||
return func
|
||||
name = field[:-1] if field in {'authors', 'tags'} else field
|
||||
setattr(LibraryDatabase, 'all_%s_names' % name, getter(field))
|
||||
setattr(LibraryDatabase, f'all_{name}_names', getter(field))
|
||||
LibraryDatabase.all_formats = lambda self: self.new_api.all_field_names('formats')
|
||||
LibraryDatabase.all_custom = lambda self, label=None, num=None:self.new_api.all_field_names(self.custom_field_name(label, num))
|
||||
|
||||
@ -977,7 +977,7 @@ for field in ('tags', 'series', 'publishers', 'ratings', 'languages'):
|
||||
def func(self):
|
||||
return [[tid, tag] for tid, tag in iteritems(self.new_api.get_id_map(fname))]
|
||||
return func
|
||||
setattr(LibraryDatabase, 'get_%s_with_ids' % field, getter(field))
|
||||
setattr(LibraryDatabase, f'get_{field}_with_ids', getter(field))
|
||||
|
||||
for field in ('author', 'tag', 'series'):
|
||||
def getter(field):
|
||||
@ -986,7 +986,7 @@ for field in ('author', 'tag', 'series'):
|
||||
def func(self, item_id):
|
||||
return self.new_api.get_item_name(field, item_id)
|
||||
return func
|
||||
setattr(LibraryDatabase, '%s_name' % field, getter(field))
|
||||
setattr(LibraryDatabase, f'{field}_name', getter(field))
|
||||
|
||||
for field in ('publisher', 'series', 'tag'):
|
||||
def getter(field):
|
||||
@ -995,7 +995,7 @@ for field in ('publisher', 'series', 'tag'):
|
||||
def func(self, item_id):
|
||||
self.new_api.remove_items(fname, (item_id,))
|
||||
return func
|
||||
setattr(LibraryDatabase, 'delete_%s_using_id' % field, getter(field))
|
||||
setattr(LibraryDatabase, f'delete_{field}_using_id', getter(field))
|
||||
# }}}
|
||||
|
||||
# Legacy field API {{{
|
||||
|
@ -111,13 +111,9 @@ class Restore(Thread):
|
||||
'and were not fully restored:\n')
|
||||
for x in self.conflicting_custom_cols:
|
||||
ans += '\t#'+x+'\n'
|
||||
ans += '\tused:\t%s, %s, %s, %s\n'%(self.custom_columns[x][1],
|
||||
self.custom_columns[x][2],
|
||||
self.custom_columns[x][3],
|
||||
self.custom_columns[x][5])
|
||||
ans += f'\tused:\t{self.custom_columns[x][1]}, {self.custom_columns[x][2]}, {self.custom_columns[x][3]}, {self.custom_columns[x][5]}\n'
|
||||
for coldef in self.conflicting_custom_cols[x]:
|
||||
ans += '\tother:\t%s, %s, %s, %s\n'%(coldef[1], coldef[2],
|
||||
coldef[3], coldef[5])
|
||||
ans += f'\tother:\t{coldef[1]}, {coldef[2]}, {coldef[3]}, {coldef[5]}\n'
|
||||
|
||||
if self.mismatched_dirs:
|
||||
ans += '\n\n'
|
||||
|
@ -243,14 +243,14 @@ class SchemaUpgrade:
|
||||
def upgrade_version_8(self):
|
||||
'Add Tag Browser views'
|
||||
def create_tag_browser_view(table_name, column_name):
|
||||
self.db.execute('''
|
||||
DROP VIEW IF EXISTS tag_browser_{tn};
|
||||
CREATE VIEW tag_browser_{tn} AS SELECT
|
||||
self.db.execute(f'''
|
||||
DROP VIEW IF EXISTS tag_browser_{table_name};
|
||||
CREATE VIEW tag_browser_{table_name} AS SELECT
|
||||
id,
|
||||
name,
|
||||
(SELECT COUNT(id) FROM books_{tn}_link WHERE {cn}={tn}.id) count
|
||||
FROM {tn};
|
||||
'''.format(tn=table_name, cn=column_name))
|
||||
(SELECT COUNT(id) FROM books_{table_name}_link WHERE {column_name}={table_name}.id) count
|
||||
FROM {table_name};
|
||||
''')
|
||||
|
||||
for tn in ('authors', 'tags', 'publishers', 'series'):
|
||||
cn = tn[:-1]
|
||||
@ -280,28 +280,28 @@ class SchemaUpgrade:
|
||||
def upgrade_version_10(self):
|
||||
'Add restricted Tag Browser views'
|
||||
def create_tag_browser_view(table_name, column_name, view_column_name):
|
||||
script = ('''
|
||||
DROP VIEW IF EXISTS tag_browser_{tn};
|
||||
CREATE VIEW tag_browser_{tn} AS SELECT
|
||||
script = (f'''
|
||||
DROP VIEW IF EXISTS tag_browser_{table_name};
|
||||
CREATE VIEW tag_browser_{table_name} AS SELECT
|
||||
id,
|
||||
{vcn},
|
||||
(SELECT COUNT(id) FROM books_{tn}_link WHERE {cn}={tn}.id) count
|
||||
FROM {tn};
|
||||
DROP VIEW IF EXISTS tag_browser_filtered_{tn};
|
||||
CREATE VIEW tag_browser_filtered_{tn} AS SELECT
|
||||
{view_column_name},
|
||||
(SELECT COUNT(id) FROM books_{table_name}_link WHERE {column_name}={table_name}.id) count
|
||||
FROM {table_name};
|
||||
DROP VIEW IF EXISTS tag_browser_filtered_{table_name};
|
||||
CREATE VIEW tag_browser_filtered_{table_name} AS SELECT
|
||||
id,
|
||||
{vcn},
|
||||
(SELECT COUNT(books_{tn}_link.id) FROM books_{tn}_link WHERE
|
||||
{cn}={tn}.id AND books_list_filter(book)) count
|
||||
FROM {tn};
|
||||
'''.format(tn=table_name, cn=column_name, vcn=view_column_name))
|
||||
{view_column_name},
|
||||
(SELECT COUNT(books_{table_name}_link.id) FROM books_{table_name}_link WHERE
|
||||
{column_name}={table_name}.id AND books_list_filter(book)) count
|
||||
FROM {table_name};
|
||||
''')
|
||||
self.db.execute(script)
|
||||
|
||||
for field in itervalues(self.field_metadata):
|
||||
if field['is_category'] and not field['is_custom'] and 'link_column' in field:
|
||||
table = self.db.get(
|
||||
"SELECT name FROM sqlite_master WHERE type='table' AND name=?",
|
||||
('books_%s_link'%field['table'],), all=False)
|
||||
('books_{}_link'.format(field['table']),), all=False)
|
||||
if table is not None:
|
||||
create_tag_browser_view(field['table'], field['link_column'], field['column'])
|
||||
|
||||
@ -309,75 +309,74 @@ class SchemaUpgrade:
|
||||
'Add average rating to tag browser views'
|
||||
def create_std_tag_browser_view(table_name, column_name,
|
||||
view_column_name, sort_column_name):
|
||||
script = ('''
|
||||
DROP VIEW IF EXISTS tag_browser_{tn};
|
||||
CREATE VIEW tag_browser_{tn} AS SELECT
|
||||
script = (f'''
|
||||
DROP VIEW IF EXISTS tag_browser_{table_name};
|
||||
CREATE VIEW tag_browser_{table_name} AS SELECT
|
||||
id,
|
||||
{vcn},
|
||||
(SELECT COUNT(id) FROM books_{tn}_link WHERE {cn}={tn}.id) count,
|
||||
{view_column_name},
|
||||
(SELECT COUNT(id) FROM books_{table_name}_link WHERE {column_name}={table_name}.id) count,
|
||||
(SELECT AVG(ratings.rating)
|
||||
FROM books_{tn}_link AS tl, books_ratings_link AS bl, ratings
|
||||
WHERE tl.{cn}={tn}.id AND bl.book=tl.book AND
|
||||
FROM books_{table_name}_link AS tl, books_ratings_link AS bl, ratings
|
||||
WHERE tl.{column_name}={table_name}.id AND bl.book=tl.book AND
|
||||
ratings.id = bl.rating AND ratings.rating <> 0) avg_rating,
|
||||
{scn} AS sort
|
||||
FROM {tn};
|
||||
DROP VIEW IF EXISTS tag_browser_filtered_{tn};
|
||||
CREATE VIEW tag_browser_filtered_{tn} AS SELECT
|
||||
{sort_column_name} AS sort
|
||||
FROM {table_name};
|
||||
DROP VIEW IF EXISTS tag_browser_filtered_{table_name};
|
||||
CREATE VIEW tag_browser_filtered_{table_name} AS SELECT
|
||||
id,
|
||||
{vcn},
|
||||
(SELECT COUNT(books_{tn}_link.id) FROM books_{tn}_link WHERE
|
||||
{cn}={tn}.id AND books_list_filter(book)) count,
|
||||
{view_column_name},
|
||||
(SELECT COUNT(books_{table_name}_link.id) FROM books_{table_name}_link WHERE
|
||||
{column_name}={table_name}.id AND books_list_filter(book)) count,
|
||||
(SELECT AVG(ratings.rating)
|
||||
FROM books_{tn}_link AS tl, books_ratings_link AS bl, ratings
|
||||
WHERE tl.{cn}={tn}.id AND bl.book=tl.book AND
|
||||
FROM books_{table_name}_link AS tl, books_ratings_link AS bl, ratings
|
||||
WHERE tl.{column_name}={table_name}.id AND bl.book=tl.book AND
|
||||
ratings.id = bl.rating AND ratings.rating <> 0 AND
|
||||
books_list_filter(bl.book)) avg_rating,
|
||||
{scn} AS sort
|
||||
FROM {tn};
|
||||
{sort_column_name} AS sort
|
||||
FROM {table_name};
|
||||
|
||||
'''.format(tn=table_name, cn=column_name,
|
||||
vcn=view_column_name, scn=sort_column_name))
|
||||
''')
|
||||
self.db.execute(script)
|
||||
|
||||
def create_cust_tag_browser_view(table_name, link_table_name):
|
||||
script = '''
|
||||
DROP VIEW IF EXISTS tag_browser_{table};
|
||||
CREATE VIEW tag_browser_{table} AS SELECT
|
||||
script = f'''
|
||||
DROP VIEW IF EXISTS tag_browser_{table_name};
|
||||
CREATE VIEW tag_browser_{table_name} AS SELECT
|
||||
id,
|
||||
value,
|
||||
(SELECT COUNT(id) FROM {lt} WHERE value={table}.id) count,
|
||||
(SELECT COUNT(id) FROM {link_table_name} WHERE value={table_name}.id) count,
|
||||
(SELECT AVG(r.rating)
|
||||
FROM {lt},
|
||||
FROM {link_table_name},
|
||||
books_ratings_link AS bl,
|
||||
ratings AS r
|
||||
WHERE {lt}.value={table}.id AND bl.book={lt}.book AND
|
||||
WHERE {link_table_name}.value={table_name}.id AND bl.book={link_table_name}.book AND
|
||||
r.id = bl.rating AND r.rating <> 0) avg_rating,
|
||||
value AS sort
|
||||
FROM {table};
|
||||
FROM {table_name};
|
||||
|
||||
DROP VIEW IF EXISTS tag_browser_filtered_{table};
|
||||
CREATE VIEW tag_browser_filtered_{table} AS SELECT
|
||||
DROP VIEW IF EXISTS tag_browser_filtered_{table_name};
|
||||
CREATE VIEW tag_browser_filtered_{table_name} AS SELECT
|
||||
id,
|
||||
value,
|
||||
(SELECT COUNT({lt}.id) FROM {lt} WHERE value={table}.id AND
|
||||
(SELECT COUNT({link_table_name}.id) FROM {link_table_name} WHERE value={table_name}.id AND
|
||||
books_list_filter(book)) count,
|
||||
(SELECT AVG(r.rating)
|
||||
FROM {lt},
|
||||
FROM {link_table_name},
|
||||
books_ratings_link AS bl,
|
||||
ratings AS r
|
||||
WHERE {lt}.value={table}.id AND bl.book={lt}.book AND
|
||||
WHERE {link_table_name}.value={table_name}.id AND bl.book={link_table_name}.book AND
|
||||
r.id = bl.rating AND r.rating <> 0 AND
|
||||
books_list_filter(bl.book)) avg_rating,
|
||||
value AS sort
|
||||
FROM {table};
|
||||
'''.format(lt=link_table_name, table=table_name)
|
||||
FROM {table_name};
|
||||
'''
|
||||
self.db.execute(script)
|
||||
|
||||
for field in itervalues(self.field_metadata):
|
||||
if field['is_category'] and not field['is_custom'] and 'link_column' in field:
|
||||
table = self.db.get(
|
||||
"SELECT name FROM sqlite_master WHERE type='table' AND name=?",
|
||||
('books_%s_link'%field['table'],), all=False)
|
||||
('books_{}_link'.format(field['table']),), all=False)
|
||||
if table is not None:
|
||||
create_std_tag_browser_view(field['table'], field['link_column'],
|
||||
field['column'], field['category_sort'])
|
||||
@ -389,7 +388,7 @@ class SchemaUpgrade:
|
||||
for (table,) in db_tables:
|
||||
tables.append(table)
|
||||
for table in tables:
|
||||
link_table = 'books_%s_link'%table
|
||||
link_table = f'books_{table}_link'
|
||||
if table.startswith('custom_column_') and link_table in tables:
|
||||
create_cust_tag_browser_view(table, link_table)
|
||||
|
||||
@ -580,9 +579,9 @@ class SchemaUpgrade:
|
||||
|
||||
INSERT INTO identifiers (book, val) SELECT id,isbn FROM books WHERE isbn;
|
||||
|
||||
ALTER TABLE books ADD COLUMN last_modified TIMESTAMP NOT NULL DEFAULT "%s";
|
||||
ALTER TABLE books ADD COLUMN last_modified TIMESTAMP NOT NULL DEFAULT "{}";
|
||||
|
||||
'''%isoformat(DEFAULT_DATE, sep=' ')
|
||||
'''.format(isoformat(DEFAULT_DATE, sep=' '))
|
||||
# Sqlite does not support non constant default values in alter
|
||||
# statements
|
||||
self.db.execute(script)
|
||||
|
@ -108,7 +108,7 @@ class DateSearch: # {{{
|
||||
self.local_today = {'_today', 'today', icu_lower(_('today'))}
|
||||
self.local_yesterday = {'_yesterday', 'yesterday', icu_lower(_('yesterday'))}
|
||||
self.local_thismonth = {'_thismonth', 'thismonth', icu_lower(_('thismonth'))}
|
||||
self.daysago_pat = regex.compile(r'(%s|daysago|_daysago)$'%_('daysago'), flags=regex.UNICODE | regex.VERSION1)
|
||||
self.daysago_pat = regex.compile(r'({}|daysago|_daysago)$'.format(_('daysago')), flags=regex.UNICODE | regex.VERSION1)
|
||||
|
||||
def eq(self, dbdate, query, field_count):
|
||||
if dbdate.year == query.year:
|
||||
|
@ -72,7 +72,7 @@ class Table:
|
||||
self.unserialize = lambda x: x.replace('|', ',') if x else ''
|
||||
self.serialize = lambda x: x.replace(',', '|')
|
||||
self.link_table = (link_table if link_table else
|
||||
'books_%s_link'%self.metadata['table'])
|
||||
'books_{}_link'.format(self.metadata['table']))
|
||||
if self.supports_notes and dt == 'rating': # custom ratings table
|
||||
self.supports_notes = False
|
||||
|
||||
|
@ -248,7 +248,7 @@ class AddRemoveTest(BaseTest):
|
||||
item_id = {v:k for k, v in iteritems(cache.fields['#series'].table.id_map)}['My Series Two']
|
||||
cache.remove_books((1,), permanent=True)
|
||||
for x in (fmtpath, bookpath, authorpath):
|
||||
af(os.path.exists(x), 'The file %s exists, when it should not' % x)
|
||||
af(os.path.exists(x), f'The file {x} exists, when it should not')
|
||||
for c in (cache, self.init_cache()):
|
||||
table = c.fields['authors'].table
|
||||
self.assertNotIn(1, c.all_book_ids())
|
||||
@ -279,7 +279,7 @@ class AddRemoveTest(BaseTest):
|
||||
item_id = {v:k for k, v in iteritems(cache.fields['#series'].table.id_map)}['My Series Two']
|
||||
cache.remove_books((1,))
|
||||
for x in (fmtpath, bookpath, authorpath):
|
||||
af(os.path.exists(x), 'The file %s exists, when it should not' % x)
|
||||
af(os.path.exists(x), f'The file {x} exists, when it should not')
|
||||
b, f = cache.list_trash_entries()
|
||||
self.assertEqual(len(b), 1)
|
||||
self.assertEqual(len(f), 0)
|
||||
|
@ -48,7 +48,7 @@ class BaseTest(unittest.TestCase):
|
||||
def create_db(self, library_path):
|
||||
from calibre.library.database2 import LibraryDatabase2
|
||||
if LibraryDatabase2.exists_at(library_path):
|
||||
raise ValueError('A library already exists at %r'%library_path)
|
||||
raise ValueError(f'A library already exists at {library_path!r}')
|
||||
src = os.path.join(os.path.dirname(__file__), 'metadata.db')
|
||||
dest = os.path.join(library_path, 'metadata.db')
|
||||
shutil.copyfile(src, dest)
|
||||
@ -114,8 +114,8 @@ class BaseTest(unittest.TestCase):
|
||||
if isinstance(attr1, (tuple, list)) and 'authors' not in attr and 'languages' not in attr:
|
||||
attr1, attr2 = set(attr1), set(attr2)
|
||||
self.assertEqual(attr1, attr2,
|
||||
'%s not the same: %r != %r'%(attr, attr1, attr2))
|
||||
f'{attr} not the same: {attr1!r} != {attr2!r}')
|
||||
if attr.startswith('#') and attr + '_index' not in exclude:
|
||||
attr1, attr2 = mi1.get_extra(attr), mi2.get_extra(attr)
|
||||
self.assertEqual(attr1, attr2,
|
||||
'%s {#extra} not the same: %r != %r'%(attr, attr1, attr2))
|
||||
f'{attr} {{#extra}} not the same: {attr1!r} != {attr2!r}')
|
||||
|
@ -32,8 +32,7 @@ class ET:
|
||||
legacy = self.legacy or test.init_legacy(test.cloned_library)
|
||||
oldres = getattr(old, self.func_name)(*self.args, **self.kwargs)
|
||||
newres = getattr(legacy, self.func_name)(*self.args, **self.kwargs)
|
||||
test.assertEqual(oldres, newres, 'Equivalence test for {} with args: {} and kwargs: {} failed'.format(
|
||||
self.func_name, reprlib.repr(self.args), reprlib.repr(self.kwargs)))
|
||||
test.assertEqual(oldres, newres, f'Equivalence test for {self.func_name} with args: {reprlib.repr(self.args)} and kwargs: {reprlib.repr(self.kwargs)} failed')
|
||||
self.retval = newres
|
||||
return newres
|
||||
|
||||
|
@ -165,10 +165,10 @@ class ReadingTest(BaseTest):
|
||||
x = list(reversed(order))
|
||||
ae(order, cache.multisort([(field, True)],
|
||||
ids_to_sort=x),
|
||||
'Ascending sort of %s failed'%field)
|
||||
f'Ascending sort of {field} failed')
|
||||
ae(x, cache.multisort([(field, False)],
|
||||
ids_to_sort=order),
|
||||
'Descending sort of %s failed'%field)
|
||||
f'Descending sort of {field} failed')
|
||||
|
||||
# Test sorting of is_multiple fields.
|
||||
|
||||
@ -337,8 +337,7 @@ class ReadingTest(BaseTest):
|
||||
for query, ans in iteritems(oldvals):
|
||||
nr = cache.search(query, '')
|
||||
self.assertEqual(ans, nr,
|
||||
'Old result: %r != New result: %r for search: %s'%(
|
||||
ans, nr, query))
|
||||
f'Old result: {ans!r} != New result: {nr!r} for search: {query}')
|
||||
|
||||
# Test searching by id, which was introduced in the new backend
|
||||
self.assertEqual(cache.search('id:1', ''), {1})
|
||||
@ -414,13 +413,12 @@ class ReadingTest(BaseTest):
|
||||
):
|
||||
continue
|
||||
self.assertEqual(oval, nval,
|
||||
'The attribute %s for %s in category %s does not match. Old is %r, New is %r'
|
||||
%(attr, old.name, category, oval, nval))
|
||||
f'The attribute {attr} for {old.name} in category {category} does not match. Old is {oval!r}, New is {nval!r}')
|
||||
|
||||
for category in old_categories:
|
||||
old, new = old_categories[category], new_categories[category]
|
||||
self.assertEqual(len(old), len(new),
|
||||
'The number of items in the category %s is not the same'%category)
|
||||
f'The number of items in the category {category} is not the same')
|
||||
for o, n in zip(old, new):
|
||||
compare_category(category, o, n)
|
||||
|
||||
@ -595,7 +593,7 @@ class ReadingTest(BaseTest):
|
||||
test(True, {3}, 'Unknown')
|
||||
c.limit = 5
|
||||
for i in range(6):
|
||||
test(False, set(), 'nomatch_%s' % i)
|
||||
test(False, set(), f'nomatch_{i}')
|
||||
test(False, {3}, 'Unknown') # cached search expired
|
||||
test(False, {3}, '', 'unknown', num=1)
|
||||
test(True, {3}, '', 'unknown', num=1)
|
||||
@ -638,7 +636,7 @@ class ReadingTest(BaseTest):
|
||||
v = pmi.get_standard_metadata(field)
|
||||
self.assertTrue(v is None or isinstance(v, dict))
|
||||
self.assertEqual(f(mi.get_standard_metadata(field, False)), f(v),
|
||||
'get_standard_metadata() failed for field %s' % field)
|
||||
f'get_standard_metadata() failed for field {field}')
|
||||
for field, meta in cache.field_metadata.custom_iteritems():
|
||||
if meta['datatype'] != 'composite':
|
||||
self.assertEqual(f(getattr(mi, field)), f(getattr(pmi, field)),
|
||||
|
@ -65,19 +65,16 @@ class WritingTest(BaseTest):
|
||||
if test.name.endswith('_index'):
|
||||
val = float(val) if val is not None else 1.0
|
||||
self.assertEqual(sqlite_res, val,
|
||||
'Failed setting for %s with value %r, sqlite value not the same. val: %r != sqlite_val: %r'%(
|
||||
test.name, val, val, sqlite_res))
|
||||
f'Failed setting for {test.name} with value {val!r}, sqlite value not the same. val: {val!r} != sqlite_val: {sqlite_res!r}')
|
||||
else:
|
||||
test.setter(db)(1, val)
|
||||
old_cached_res = getter(1)
|
||||
self.assertEqual(old_cached_res, cached_res,
|
||||
'Failed setting for %s with value %r, cached value not the same. Old: %r != New: %r'%(
|
||||
test.name, val, old_cached_res, cached_res))
|
||||
f'Failed setting for {test.name} with value {val!r}, cached value not the same. Old: {old_cached_res!r} != New: {cached_res!r}')
|
||||
db.refresh()
|
||||
old_sqlite_res = getter(1)
|
||||
self.assertEqual(old_sqlite_res, sqlite_res,
|
||||
'Failed setting for %s, sqlite value not the same: %r != %r'%(
|
||||
test.name, old_sqlite_res, sqlite_res))
|
||||
f'Failed setting for {test.name}, sqlite value not the same: {old_sqlite_res!r} != {sqlite_res!r}')
|
||||
del db
|
||||
# }}}
|
||||
|
||||
@ -755,7 +752,7 @@ class WritingTest(BaseTest):
|
||||
self.assertEqual(ldata, {aid:d['link'] for aid, d in iteritems(c.author_data())})
|
||||
self.assertEqual({3}, cache.set_link_for_authors({aid:'xxx' if aid == max(adata) else str(aid) for aid in adata}),
|
||||
'Setting the author link to the same value as before, incorrectly marked some books as dirty')
|
||||
sdata = {aid:'%s, changed' % aid for aid in adata}
|
||||
sdata = {aid:f'{aid}, changed' for aid in adata}
|
||||
self.assertEqual({1,2,3}, cache.set_sort_for_authors(sdata))
|
||||
for bid in (1, 2, 3):
|
||||
self.assertIn(', changed', cache.field_for('author_sort', bid))
|
||||
|
@ -294,7 +294,7 @@ class ThumbnailCache:
|
||||
if not hasattr(self, 'total_size'):
|
||||
self._load_index()
|
||||
self._invalidate_sizes()
|
||||
ts = ('%.2f' % timestamp).replace('.00', '')
|
||||
ts = (f'{timestamp:.2f}').replace('.00', '')
|
||||
path = '%s%s%s%s%d-%s-%d-%dx%d' % (
|
||||
self.group_id, os.sep, book_id % 100, os.sep,
|
||||
book_id, ts, len(data), self.thumbnail_size[0], self.thumbnail_size[1])
|
||||
|
@ -95,7 +95,7 @@ def format_is_multiple(x, sep=',', repl=None):
|
||||
def format_identifiers(x):
|
||||
if not x:
|
||||
return None
|
||||
return ','.join('%s:%s'%(k, v) for k, v in iteritems(x))
|
||||
return ','.join(f'{k}:{v}' for k, v in iteritems(x))
|
||||
|
||||
|
||||
class View:
|
||||
@ -190,7 +190,7 @@ class View:
|
||||
|
||||
def _get_id(self, idx, index_is_id=True):
|
||||
if index_is_id and not self.cache.has_id(idx):
|
||||
raise IndexError('No book with id %s present'%idx)
|
||||
raise IndexError(f'No book with id {idx} present')
|
||||
return idx if index_is_id else self.index_to_id(idx)
|
||||
|
||||
def has_id(self, book_id):
|
||||
@ -242,7 +242,7 @@ class View:
|
||||
def _get(self, field, idx, index_is_id=True, default_value=None, fmt=lambda x:x):
|
||||
id_ = idx if index_is_id else self.index_to_id(idx)
|
||||
if index_is_id and not self.cache.has_id(id_):
|
||||
raise IndexError('No book with id %s present'%idx)
|
||||
raise IndexError(f'No book with id {idx} present')
|
||||
return fmt(self.cache.field_for(field, id_, default_value=default_value))
|
||||
|
||||
def get_series_sort(self, idx, index_is_id=True, default_value=''):
|
||||
|
@ -204,7 +204,7 @@ def one_one_in_books(book_id_val_map, db, field, *args):
|
||||
if book_id_val_map:
|
||||
sequence = ((sqlite_datetime(v), k) for k, v in book_id_val_map.items())
|
||||
db.executemany(
|
||||
'UPDATE books SET %s=? WHERE id=?'%field.metadata['column'], sequence)
|
||||
'UPDATE books SET {}=? WHERE id=?'.format(field.metadata['column']), sequence)
|
||||
field.table.book_col_map.update(book_id_val_map)
|
||||
return set(book_id_val_map)
|
||||
|
||||
@ -229,13 +229,13 @@ def one_one_in_other(book_id_val_map, db, field, *args):
|
||||
book_id_val_map = {k:v for k, v in iteritems(book_id_val_map) if v != g(k, missing)}
|
||||
deleted = tuple((k,) for k, v in iteritems(book_id_val_map) if v is None)
|
||||
if deleted:
|
||||
db.executemany('DELETE FROM %s WHERE book=?'%field.metadata['table'],
|
||||
db.executemany('DELETE FROM {} WHERE book=?'.format(field.metadata['table']),
|
||||
deleted)
|
||||
for book_id in deleted:
|
||||
field.table.book_col_map.pop(book_id[0], None)
|
||||
updated = {k:v for k, v in iteritems(book_id_val_map) if v is not None}
|
||||
if updated:
|
||||
db.executemany('INSERT OR REPLACE INTO %s(book,%s) VALUES (?,?)'%(
|
||||
db.executemany('INSERT OR REPLACE INTO {}(book,{}) VALUES (?,?)'.format(
|
||||
field.metadata['table'], field.metadata['column']),
|
||||
((k, sqlite_datetime(v)) for k, v in iteritems(updated)))
|
||||
field.table.book_col_map.update(updated)
|
||||
@ -260,7 +260,7 @@ def custom_series_index(book_id_val_map, db, field, *args):
|
||||
# sorts the same as other books with no series.
|
||||
field.table.remove_books((book_id,), db)
|
||||
if sequence:
|
||||
db.executemany('UPDATE %s SET %s=? WHERE book=? AND value=?'%(
|
||||
db.executemany('UPDATE {} SET {}=? WHERE book=? AND value=?'.format(
|
||||
field.metadata['table'], field.metadata['column']), sequence)
|
||||
return {s[1] for s in sequence}
|
||||
# }}}
|
||||
@ -287,7 +287,7 @@ def get_db_id(val, db, m, table, kmap, rid_map, allow_case_change,
|
||||
db.execute('INSERT INTO authors(name,sort) VALUES (?,?)',
|
||||
(val.replace(',', '|'), aus))
|
||||
else:
|
||||
db.execute('INSERT INTO %s(%s) VALUES (?)'%(
|
||||
db.execute('INSERT INTO {}({}) VALUES (?)'.format(
|
||||
m['table'], m['column']), (val,))
|
||||
item_id = rid_map[kval] = db.last_insert_rowid()
|
||||
table.id_map[item_id] = val
|
||||
@ -310,7 +310,7 @@ def change_case(case_changes, dirtied, db, table, m, is_authors=False):
|
||||
else:
|
||||
vals = ((val, item_id) for item_id, val in iteritems(case_changes))
|
||||
db.executemany(
|
||||
'UPDATE %s SET %s=? WHERE id=?'%(m['table'], m['column']), vals)
|
||||
'UPDATE {} SET {}=? WHERE id=?'.format(m['table'], m['column']), vals)
|
||||
for item_id, val in iteritems(case_changes):
|
||||
table.id_map[item_id] = val
|
||||
dirtied.update(table.col_book_map[item_id])
|
||||
@ -366,7 +366,7 @@ def many_one(book_id_val_map, db, field, allow_case_change, *args):
|
||||
|
||||
# Update the db link table
|
||||
if deleted:
|
||||
db.executemany('DELETE FROM %s WHERE book=?'%table.link_table,
|
||||
db.executemany(f'DELETE FROM {table.link_table} WHERE book=?',
|
||||
((k,) for k in deleted))
|
||||
if updated:
|
||||
sql = (
|
||||
@ -383,7 +383,7 @@ def many_one(book_id_val_map, db, field, allow_case_change, *args):
|
||||
if remove:
|
||||
if table.supports_notes:
|
||||
db.clear_notes_for_category_items(table.name, remove)
|
||||
db.executemany('DELETE FROM %s WHERE id=?'%m['table'],
|
||||
db.executemany('DELETE FROM {} WHERE id=?'.format(m['table']),
|
||||
((item_id,) for item_id in remove))
|
||||
for item_id in remove:
|
||||
del table.id_map[item_id]
|
||||
@ -467,14 +467,14 @@ def many_many(book_id_val_map, db, field, allow_case_change, *args):
|
||||
|
||||
# Update the db link table
|
||||
if deleted:
|
||||
db.executemany('DELETE FROM %s WHERE book=?'%table.link_table,
|
||||
db.executemany(f'DELETE FROM {table.link_table} WHERE book=?',
|
||||
((k,) for k in deleted))
|
||||
if updated:
|
||||
vals = (
|
||||
(book_id, val) for book_id, vals in iteritems(updated)
|
||||
for val in vals
|
||||
)
|
||||
db.executemany('DELETE FROM %s WHERE book=?'%table.link_table,
|
||||
db.executemany(f'DELETE FROM {table.link_table} WHERE book=?',
|
||||
((k,) for k in updated))
|
||||
db.executemany('INSERT INTO {}(book,{}) VALUES(?, ?)'.format(
|
||||
table.link_table, m['link_column']), vals)
|
||||
@ -488,7 +488,7 @@ def many_many(book_id_val_map, db, field, allow_case_change, *args):
|
||||
if remove:
|
||||
if table.supports_notes:
|
||||
db.clear_notes_for_category_items(table.name, remove)
|
||||
db.executemany('DELETE FROM %s WHERE id=?'%m['table'],
|
||||
db.executemany('DELETE FROM {} WHERE id=?'.format(m['table']),
|
||||
((item_id,) for item_id in remove))
|
||||
for item_id in remove:
|
||||
del table.id_map[item_id]
|
||||
|
@ -321,7 +321,7 @@ def main(args=sys.argv):
|
||||
elif ext in {'mobi', 'azw', 'azw3'}:
|
||||
inspect_mobi(path)
|
||||
else:
|
||||
print('Cannot dump unknown filetype: %s' % path)
|
||||
print(f'Cannot dump unknown filetype: {path}')
|
||||
elif len(args) >= 2 and os.path.exists(os.path.join(args[1], '__main__.py')):
|
||||
sys.path.insert(0, args[1])
|
||||
run_script(os.path.join(args[1], '__main__.py'), args[2:])
|
||||
|
@ -90,7 +90,7 @@ def debug(ioreg_to_tmp=False, buf=None, plugins=None,
|
||||
try:
|
||||
d.startup()
|
||||
except:
|
||||
out('Startup failed for device plugin: %s'%d)
|
||||
out(f'Startup failed for device plugin: {d}')
|
||||
|
||||
if disabled_plugins is None:
|
||||
disabled_plugins = list(disabled_device_plugins())
|
||||
|
@ -208,7 +208,7 @@ def main():
|
||||
try:
|
||||
d.startup()
|
||||
except:
|
||||
print('Startup failed for device plugin: %s'%d)
|
||||
print(f'Startup failed for device plugin: {d}')
|
||||
if d.MANAGES_DEVICE_PRESENCE:
|
||||
cd = d.detect_managed_devices(scanner.devices)
|
||||
if cd is not None:
|
||||
|
@ -49,7 +49,7 @@ class CYBOOK(USBMS):
|
||||
coverdata = coverdata[2]
|
||||
else:
|
||||
coverdata = None
|
||||
with open('%s_6090.t2b' % os.path.join(path, filename), 'wb') as t2bfile:
|
||||
with open(f'{os.path.join(path, filename)}_6090.t2b', 'wb') as t2bfile:
|
||||
t2b.write_t2b(t2bfile, coverdata)
|
||||
fsync(t2bfile)
|
||||
|
||||
@ -89,7 +89,7 @@ class ORIZON(CYBOOK):
|
||||
coverdata = coverdata[2]
|
||||
else:
|
||||
coverdata = None
|
||||
with open('%s.thn' % filepath, 'wb') as thnfile:
|
||||
with open(f'{filepath}.thn', 'wb') as thnfile:
|
||||
t4b.write_t4b(thnfile, coverdata)
|
||||
fsync(thnfile)
|
||||
|
||||
|
@ -28,9 +28,9 @@ class IRIVER_STORY(USBMS):
|
||||
VENDOR_NAME = 'IRIVER'
|
||||
WINDOWS_MAIN_MEM = ['STORY', 'STORY_EB05', 'STORY_WI-FI', 'STORY_EB07',
|
||||
'STORY_EB12']
|
||||
WINDOWS_MAIN_MEM = re.compile(r'(%s)&'%('|'.join(WINDOWS_MAIN_MEM)))
|
||||
WINDOWS_MAIN_MEM = re.compile(r'({})&'.format('|'.join(WINDOWS_MAIN_MEM)))
|
||||
WINDOWS_CARD_A_MEM = ['STORY', 'STORY_SD', 'STORY_EB12_SD']
|
||||
WINDOWS_CARD_A_MEM = re.compile(r'(%s)&'%('|'.join(WINDOWS_CARD_A_MEM)))
|
||||
WINDOWS_CARD_A_MEM = re.compile(r'({})&'.format('|'.join(WINDOWS_CARD_A_MEM)))
|
||||
|
||||
# OSX_MAIN_MEM = 'Kindle Internal Storage Media'
|
||||
# OSX_CARD_A_MEM = 'Kindle Card Storage Media'
|
||||
|
@ -105,13 +105,13 @@ class APNXBuilder:
|
||||
|
||||
# Updated header if we have a KF8 file...
|
||||
if apnx_meta['format'] == 'MOBI_8':
|
||||
content_header = '{"contentGuid":"%(guid)s","asin":"%(asin)s","cdeType":"%(cdetype)s","format":"%(format)s","fileRevisionId":"1","acr":"%(acr)s"}' % apnx_meta # noqa: E501
|
||||
content_header = '{{"contentGuid":"{guid}","asin":"{asin}","cdeType":"{cdetype}","format":"{format}","fileRevisionId":"1","acr":"{acr}"}}'.format(**apnx_meta) # noqa: E501
|
||||
else:
|
||||
# My 5.1.x Touch & 3.4 K3 seem to handle the 'extended' header fine for
|
||||
# legacy mobi files, too. But, since they still handle this one too, let's
|
||||
# try not to break old devices, and keep using the simple header ;).
|
||||
content_header = '{"contentGuid":"%(guid)s","asin":"%(asin)s","cdeType":"%(cdetype)s","fileRevisionId":"1"}' % apnx_meta
|
||||
page_header = '{"asin":"%(asin)s","pageMap":"' % apnx_meta
|
||||
content_header = '{{"contentGuid":"{guid}","asin":"{asin}","cdeType":"{cdetype}","fileRevisionId":"1"}}'.format(**apnx_meta)
|
||||
page_header = '{{"asin":"{asin}","pageMap":"'.format(**apnx_meta)
|
||||
page_header += pages.page_maps + '"}'
|
||||
if DEBUG:
|
||||
prints('APNX Content Header:', content_header)
|
||||
|
@ -33,7 +33,7 @@ class Bookmark: # {{{
|
||||
def record(self, n):
|
||||
from calibre.ebooks.metadata.mobi import StreamSlicer
|
||||
if n >= self.nrecs:
|
||||
raise ValueError('non-existent record %r' % n)
|
||||
raise ValueError(f'non-existent record {n!r}')
|
||||
offoff = 78 + (8 * n)
|
||||
start, = unpack('>I', self.data[offoff + 0:offoff + 4])
|
||||
stop = None
|
||||
@ -141,7 +141,7 @@ class Bookmark: # {{{
|
||||
# Search looks for book title match, highlight match, and location match
|
||||
# Author is not matched
|
||||
# This will find the first instance of a clipping only
|
||||
book_fs = self.path.replace('.%s' % self.bookmark_extension,'.%s' % self.book_format)
|
||||
book_fs = self.path.replace(f'.{self.bookmark_extension}',f'.{self.book_format}')
|
||||
with open(book_fs,'rb') as f2:
|
||||
stream = io.BytesIO(f2.read())
|
||||
mi = get_topaz_metadata(stream)
|
||||
@ -152,7 +152,7 @@ class Bookmark: # {{{
|
||||
with open(my_clippings, encoding='utf-8', errors='replace') as f2:
|
||||
marker_found = 0
|
||||
text = ''
|
||||
search_str1 = '%s' % (mi.title)
|
||||
search_str1 = f'{mi.title}'
|
||||
search_str2 = '- Highlight Loc. %d' % (displayed_location)
|
||||
for line in f2:
|
||||
if marker_found == 0:
|
||||
@ -271,12 +271,12 @@ class Bookmark: # {{{
|
||||
self.last_read_location = self.last_read - self.pdf_page_offset
|
||||
|
||||
else:
|
||||
print('unsupported bookmark_extension: %s' % self.bookmark_extension)
|
||||
print(f'unsupported bookmark_extension: {self.bookmark_extension}')
|
||||
self.user_notes = user_notes
|
||||
|
||||
def get_book_length(self):
|
||||
from calibre.ebooks.metadata.mobi import StreamSlicer
|
||||
book_fs = self.path.replace('.%s' % self.bookmark_extension,'.%s' % self.book_format)
|
||||
book_fs = self.path.replace(f'.{self.bookmark_extension}',f'.{self.book_format}')
|
||||
|
||||
self.book_length = 0
|
||||
if self.bookmark_extension == 'mbp':
|
||||
@ -300,6 +300,6 @@ class Bookmark: # {{{
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
print('unsupported bookmark_extension: %s' % self.bookmark_extension)
|
||||
print(f'unsupported bookmark_extension: {self.bookmark_extension}')
|
||||
|
||||
# }}}
|
||||
|
@ -294,7 +294,7 @@ class KINDLE(USBMS):
|
||||
typ=user_notes[location]['type'],
|
||||
text=(user_notes[location]['text'] if
|
||||
user_notes[location]['type'] == 'Note' else
|
||||
'<i>%s</i>' % user_notes[location]['text'])))
|
||||
'<i>{}</i>'.format(user_notes[location]['text']))))
|
||||
else:
|
||||
if bookmark.book_format == 'pdf':
|
||||
annotations.append(
|
||||
@ -351,7 +351,7 @@ class KINDLE(USBMS):
|
||||
bm.value.path, index_is_id=True)
|
||||
elif bm.type == 'kindle_clippings':
|
||||
# Find 'My Clippings' author=Kindle in database, or add
|
||||
last_update = 'Last modified %s' % strftime('%x %X',bm.value['timestamp'].timetuple())
|
||||
last_update = 'Last modified {}'.format(strftime('%x %X',bm.value['timestamp'].timetuple()))
|
||||
mc_id = list(db.data.search_getting_ids('title:"My Clippings"', '', sort_results=False))
|
||||
if mc_id:
|
||||
db.add_format_with_hooks(mc_id[0], 'TXT', bm.value['path'],
|
||||
@ -623,7 +623,7 @@ class KINDLE2(KINDLE):
|
||||
except:
|
||||
pass
|
||||
|
||||
apnx_path = '%s.apnx' % os.path.join(path, filename)
|
||||
apnx_path = f'{os.path.join(path, filename)}.apnx'
|
||||
apnx_builder = APNXBuilder()
|
||||
# Check to see if there is an existing apnx file on Kindle we should keep.
|
||||
if opts.extra_customization[self.OPT_APNX_OVERWRITE] or not os.path.exists(apnx_path):
|
||||
@ -636,7 +636,7 @@ class KINDLE2(KINDLE):
|
||||
if temp in self.EXTRA_CUSTOMIZATION_CHOICES[self.OPT_APNX_METHOD]:
|
||||
method = temp
|
||||
else:
|
||||
print('Invalid method choice for this book (%r), ignoring.' % temp)
|
||||
print(f'Invalid method choice for this book ({temp!r}), ignoring.')
|
||||
except:
|
||||
print('Could not retrieve override method choice, using default.')
|
||||
apnx_builder.write_apnx(filepath, apnx_path, method=method, page_count=custom_page_count)
|
||||
|
@ -106,7 +106,7 @@ class Book(Book_):
|
||||
if self.contentID:
|
||||
fmt('Content ID', self.contentID)
|
||||
if self.kobo_series:
|
||||
fmt('Kobo Series', self.kobo_series + ' #%s'%self.kobo_series_number)
|
||||
fmt('Kobo Series', self.kobo_series + f' #{self.kobo_series_number}')
|
||||
if self.kobo_series_id:
|
||||
fmt('Kobo Series ID', self.kobo_series_id)
|
||||
if self.kobo_subtitle:
|
||||
@ -203,7 +203,7 @@ class KTCollectionsBookList(CollectionsBookList):
|
||||
fm = None
|
||||
attr = attr.strip()
|
||||
if show_debug:
|
||||
debug_print("KTCollectionsBookList:get_collections - attr='%s'"%attr)
|
||||
debug_print(f"KTCollectionsBookList:get_collections - attr='{attr}'")
|
||||
|
||||
# If attr is device_collections, then we cannot use
|
||||
# format_field, because we don't know the fields where the
|
||||
|
@ -384,25 +384,25 @@ class KOBO(USBMS):
|
||||
if self.dbversion >= 33:
|
||||
query= ('select Title, Attribution, DateCreated, ContentID, MimeType, ContentType, '
|
||||
'ImageID, ReadStatus, ___ExpirationStatus, FavouritesIndex, Accessibility, IsDownloaded from content where '
|
||||
'BookID is Null %(previews)s %(recommendations)s and not ((___ExpirationStatus=3 or ___ExpirationStatus is Null) %(expiry)s') % dict(
|
||||
'BookID is Null {previews} {recommendations} and not ((___ExpirationStatus=3 or ___ExpirationStatus is Null) {expiry}').format(**dict(
|
||||
expiry=' and ContentType = 6)' if opts.extra_customization[self.OPT_SHOW_EXPIRED_BOOK_RECORDS] else ')',
|
||||
previews=' and Accessibility <> 6' if not self.show_previews else '',
|
||||
recommendations=" and IsDownloaded in ('true', 1)" if opts.extra_customization[self.OPT_SHOW_RECOMMENDATIONS] is False else '')
|
||||
recommendations=" and IsDownloaded in ('true', 1)" if opts.extra_customization[self.OPT_SHOW_RECOMMENDATIONS] is False else ''))
|
||||
elif self.dbversion >= 16 and self.dbversion < 33:
|
||||
query= ('select Title, Attribution, DateCreated, ContentID, MimeType, ContentType, '
|
||||
'ImageID, ReadStatus, ___ExpirationStatus, FavouritesIndex, Accessibility, "1" as IsDownloaded from content where '
|
||||
'BookID is Null and not ((___ExpirationStatus=3 or ___ExpirationStatus is Null) %(expiry)s') % dict(expiry=' and ContentType = 6)'
|
||||
if opts.extra_customization[self.OPT_SHOW_EXPIRED_BOOK_RECORDS] else ')')
|
||||
'BookID is Null and not ((___ExpirationStatus=3 or ___ExpirationStatus is Null) {expiry}').format(**dict(expiry=' and ContentType = 6)'
|
||||
if opts.extra_customization[self.OPT_SHOW_EXPIRED_BOOK_RECORDS] else ')'))
|
||||
elif self.dbversion < 16 and self.dbversion >= 14:
|
||||
query= ('select Title, Attribution, DateCreated, ContentID, MimeType, ContentType, '
|
||||
'ImageID, ReadStatus, ___ExpirationStatus, FavouritesIndex, "-1" as Accessibility, "1" as IsDownloaded from content where '
|
||||
'BookID is Null and not ((___ExpirationStatus=3 or ___ExpirationStatus is Null) %(expiry)s') % dict(expiry=' and ContentType = 6)'
|
||||
if opts.extra_customization[self.OPT_SHOW_EXPIRED_BOOK_RECORDS] else ')')
|
||||
'BookID is Null and not ((___ExpirationStatus=3 or ___ExpirationStatus is Null) {expiry}').format(**dict(expiry=' and ContentType = 6)'
|
||||
if opts.extra_customization[self.OPT_SHOW_EXPIRED_BOOK_RECORDS] else ')'))
|
||||
elif self.dbversion < 14 and self.dbversion >= 8:
|
||||
query= ('select Title, Attribution, DateCreated, ContentID, MimeType, ContentType, '
|
||||
'ImageID, ReadStatus, ___ExpirationStatus, "-1" as FavouritesIndex, "-1" as Accessibility, "1" as IsDownloaded from content where '
|
||||
'BookID is Null and not ((___ExpirationStatus=3 or ___ExpirationStatus is Null) %(expiry)s') % dict(expiry=' and ContentType = 6)'
|
||||
if opts.extra_customization[self.OPT_SHOW_EXPIRED_BOOK_RECORDS] else ')')
|
||||
'BookID is Null and not ((___ExpirationStatus=3 or ___ExpirationStatus is Null) {expiry}').format(**dict(expiry=' and ContentType = 6)'
|
||||
if opts.extra_customization[self.OPT_SHOW_EXPIRED_BOOK_RECORDS] else ')'))
|
||||
else:
|
||||
query = ('select Title, Attribution, DateCreated, ContentID, MimeType, ContentType, '
|
||||
'ImageID, ReadStatus, "-1" as ___ExpirationStatus, "-1" as FavouritesIndex, '
|
||||
@ -608,12 +608,12 @@ class KOBO(USBMS):
|
||||
self.report_progress(1.0, _('Removing books from device metadata listing...'))
|
||||
|
||||
def add_books_to_metadata(self, locations, metadata, booklists):
|
||||
debug_print('KoboTouch::add_books_to_metadata - start. metadata=%s' % metadata[0])
|
||||
debug_print(f'KoboTouch::add_books_to_metadata - start. metadata={metadata[0]}')
|
||||
metadata = iter(metadata)
|
||||
for i, location in enumerate(locations):
|
||||
self.report_progress((i+1) / float(len(locations)), _('Adding books to device metadata listing...'))
|
||||
info = next(metadata)
|
||||
debug_print('KoboTouch::add_books_to_metadata - info=%s' % info)
|
||||
debug_print(f'KoboTouch::add_books_to_metadata - info={info}')
|
||||
blist = 2 if location[1] == 'cardb' else 1 if location[1] == 'carda' else 0
|
||||
|
||||
# Extract the correct prefix from the pathname. To do this correctly,
|
||||
@ -645,7 +645,7 @@ class KOBO(USBMS):
|
||||
book.size = os.stat(self.normalize_path(path)).st_size
|
||||
b = booklists[blist].add_book(book, replace_metadata=True)
|
||||
if b:
|
||||
debug_print('KoboTouch::add_books_to_metadata - have a new book - book=%s' % book)
|
||||
debug_print(f'KoboTouch::add_books_to_metadata - have a new book - book={book}')
|
||||
b._new_book = True
|
||||
self.report_progress(1.0, _('Adding books to device metadata listing...'))
|
||||
|
||||
@ -755,9 +755,9 @@ class KOBO(USBMS):
|
||||
' selecting "Configure this device" and then the '
|
||||
' "Attempt to support newer firmware" option.'
|
||||
' Doing so may require you to perform a Factory reset of'
|
||||
' your Kobo.') + ((
|
||||
'\nDevice database version: %s.'
|
||||
'\nDevice firmware version: %s') % (self.dbversion, self.display_fwversion))
|
||||
' your Kobo.') + (
|
||||
f'\nDevice database version: {self.dbversion}.'
|
||||
f'\nDevice firmware version: {self.display_fwversion}')
|
||||
, UserFeedback.WARN)
|
||||
|
||||
return False
|
||||
@ -807,7 +807,7 @@ class KOBO(USBMS):
|
||||
('card_a', 'metadata.calibre', 1),
|
||||
('card_b', 'metadata.calibre', 2)
|
||||
]:
|
||||
prefix = getattr(self, '_%s_prefix'%prefix)
|
||||
prefix = getattr(self, f'_{prefix}_prefix')
|
||||
if prefix is not None and os.path.exists(prefix):
|
||||
paths[source_id] = os.path.join(prefix, *(path.split('/')))
|
||||
return paths
|
||||
@ -891,7 +891,7 @@ class KOBO(USBMS):
|
||||
cursor.close()
|
||||
|
||||
def update_device_database_collections(self, booklists, collections_attributes, oncard):
|
||||
debug_print("Kobo:update_device_database_collections - oncard='%s'"%oncard)
|
||||
debug_print(f"Kobo:update_device_database_collections - oncard='{oncard}'")
|
||||
if self.modify_database_check('update_device_database_collections') is False:
|
||||
return
|
||||
|
||||
@ -1678,7 +1678,7 @@ class KOBOTOUCH(KOBO):
|
||||
return "'true'" if x else "'false'"
|
||||
|
||||
def books(self, oncard=None, end_session=True):
|
||||
debug_print("KoboTouch:books - oncard='%s'"%oncard)
|
||||
debug_print(f"KoboTouch:books - oncard='{oncard}'")
|
||||
self.debugging_title = self.get_debugging_title()
|
||||
|
||||
dummy_bl = self.booklist_class(None, None, None)
|
||||
@ -1699,11 +1699,11 @@ class KOBOTOUCH(KOBO):
|
||||
prefix = self._card_a_prefix if oncard == 'carda' else \
|
||||
self._card_b_prefix if oncard == 'cardb' \
|
||||
else self._main_prefix
|
||||
debug_print("KoboTouch:books - oncard='%s', prefix='%s'"%(oncard, prefix))
|
||||
debug_print(f"KoboTouch:books - oncard='{oncard}', prefix='{prefix}'")
|
||||
|
||||
self.fwversion = self.get_firmware_version()
|
||||
|
||||
debug_print('Kobo device: %s' % self.gui_name)
|
||||
debug_print(f'Kobo device: {self.gui_name}')
|
||||
debug_print('Version of driver:', self.version, 'Has kepubs:', self.has_kepubs)
|
||||
debug_print('Version of firmware:', self.fwversion, 'Has kepubs:', self.has_kepubs)
|
||||
debug_print('Firmware supports cover image tree:', self.fwversion >= self.min_fwversion_images_tree)
|
||||
@ -1718,7 +1718,7 @@ class KOBOTOUCH(KOBO):
|
||||
debug_print('KoboTouch:books - driver options=', self)
|
||||
debug_print("KoboTouch:books - prefs['manage_device_metadata']=", prefs['manage_device_metadata'])
|
||||
debugging_title = self.debugging_title
|
||||
debug_print("KoboTouch:books - set_debugging_title to '%s'" % debugging_title)
|
||||
debug_print(f"KoboTouch:books - set_debugging_title to '{debugging_title}'")
|
||||
bl.set_debugging_title(debugging_title)
|
||||
debug_print('KoboTouch:books - length bl=%d'%len(bl))
|
||||
need_sync = self.parse_metadata_cache(bl, prefix, self.METADATA_CACHE)
|
||||
@ -1739,7 +1739,7 @@ class KOBOTOUCH(KOBO):
|
||||
show_debug = self.is_debugging_title(title)
|
||||
# show_debug = authors == 'L. Frank Baum'
|
||||
if show_debug:
|
||||
debug_print("KoboTouch:update_booklist - title='%s'"%title, 'ContentType=%s'%ContentType, 'isdownloaded=', isdownloaded)
|
||||
debug_print(f"KoboTouch:update_booklist - title='{title}'", f'ContentType={ContentType}', 'isdownloaded=', isdownloaded)
|
||||
debug_print(
|
||||
' prefix=%s, DateCreated=%s, readstatus=%d, MimeType=%s, expired=%d, favouritesindex=%d, accessibility=%d, isdownloaded=%s'%
|
||||
(prefix, DateCreated, readstatus, MimeType, expired, favouritesindex, accessibility, isdownloaded,))
|
||||
@ -1838,19 +1838,19 @@ class KOBOTOUCH(KOBO):
|
||||
try:
|
||||
kobo_metadata.pubdate = datetime.strptime(DateCreated, '%Y-%m-%dT%H:%M:%S.%fZ')
|
||||
except:
|
||||
debug_print("KoboTouch:update_booklist - Cannot convert date - DateCreated='%s'"%DateCreated)
|
||||
debug_print(f"KoboTouch:update_booklist - Cannot convert date - DateCreated='{DateCreated}'")
|
||||
|
||||
idx = bl_cache.get(lpath, None)
|
||||
if idx is not None: # and not (accessibility == 1 and isdownloaded == 'false'):
|
||||
if show_debug:
|
||||
self.debug_index = idx
|
||||
debug_print('KoboTouch:update_booklist - idx=%d'%idx)
|
||||
debug_print('KoboTouch:update_booklist - lpath=%s'%lpath)
|
||||
debug_print(f'KoboTouch:update_booklist - lpath={lpath}')
|
||||
debug_print('KoboTouch:update_booklist - bl[idx].device_collections=', bl[idx].device_collections)
|
||||
debug_print('KoboTouch:update_booklist - playlist_map=', playlist_map)
|
||||
debug_print('KoboTouch:update_booklist - bookshelves=', bookshelves)
|
||||
debug_print('KoboTouch:update_booklist - kobo_collections=', kobo_collections)
|
||||
debug_print('KoboTouch:update_booklist - series="%s"' % bl[idx].series)
|
||||
debug_print(f'KoboTouch:update_booklist - series="{bl[idx].series}"')
|
||||
debug_print('KoboTouch:update_booklist - the book=', bl[idx])
|
||||
debug_print('KoboTouch:update_booklist - the authors=', bl[idx].authors)
|
||||
debug_print('KoboTouch:update_booklist - application_id=', bl[idx].application_id)
|
||||
@ -1871,7 +1871,7 @@ class KOBOTOUCH(KOBO):
|
||||
debug_print('KoboTouch:update_booklist - book size=', bl[idx].size)
|
||||
|
||||
if show_debug:
|
||||
debug_print("KoboTouch:update_booklist - ContentID='%s'"%ContentID)
|
||||
debug_print(f"KoboTouch:update_booklist - ContentID='{ContentID}'")
|
||||
bl[idx].contentID = ContentID
|
||||
bl[idx].kobo_metadata = kobo_metadata
|
||||
bl[idx].kobo_series = series
|
||||
@ -1897,8 +1897,8 @@ class KOBOTOUCH(KOBO):
|
||||
debug_print('KoboTouch:update_booklist - updated bl[idx].device_collections=', bl[idx].device_collections)
|
||||
debug_print('KoboTouch:update_booklist - playlist_map=', playlist_map, 'changed=', changed)
|
||||
# debug_print('KoboTouch:update_booklist - book=', bl[idx])
|
||||
debug_print('KoboTouch:update_booklist - book class=%s'%bl[idx].__class__)
|
||||
debug_print('KoboTouch:update_booklist - book title=%s'%bl[idx].title)
|
||||
debug_print(f'KoboTouch:update_booklist - book class={bl[idx].__class__}')
|
||||
debug_print(f'KoboTouch:update_booklist - book title={bl[idx].title}')
|
||||
else:
|
||||
if show_debug:
|
||||
debug_print('KoboTouch:update_booklist - idx is none')
|
||||
@ -1911,10 +1911,10 @@ class KOBOTOUCH(KOBO):
|
||||
title = 'FILE MISSING: ' + title
|
||||
book = self.book_class(prefix, lpath, title, authors, MimeType, DateCreated, ContentType, ImageID, size=0)
|
||||
if show_debug:
|
||||
debug_print('KoboTouch:update_booklist - book file does not exist. ContentID="%s"'%ContentID)
|
||||
debug_print(f'KoboTouch:update_booklist - book file does not exist. ContentID="{ContentID}"')
|
||||
|
||||
except Exception as e:
|
||||
debug_print("KoboTouch:update_booklist - exception creating book: '%s'"%str(e))
|
||||
debug_print(f"KoboTouch:update_booklist - exception creating book: '{e!s}'")
|
||||
debug_print(' prefix: ', prefix, 'lpath: ', lpath, 'title: ', title, 'authors: ', authors,
|
||||
'MimeType: ', MimeType, 'DateCreated: ', DateCreated, 'ContentType: ', ContentType, 'ImageID: ', ImageID)
|
||||
raise
|
||||
@ -1922,10 +1922,10 @@ class KOBOTOUCH(KOBO):
|
||||
if show_debug:
|
||||
debug_print('KoboTouch:update_booklist - class:', book.__class__)
|
||||
# debug_print(' resolution:', book.__class__.__mro__)
|
||||
debug_print(" contentid: '%s'"%book.contentID)
|
||||
debug_print(" title:'%s'"%book.title)
|
||||
debug_print(f" contentid: '{book.contentID}'")
|
||||
debug_print(f" title:'{book.title}'")
|
||||
debug_print(' the book:', book)
|
||||
debug_print(" author_sort:'%s'"%book.author_sort)
|
||||
debug_print(f" author_sort:'{book.author_sort}'")
|
||||
debug_print(' bookshelves:', bookshelves)
|
||||
debug_print(' kobo_collections:', kobo_collections)
|
||||
|
||||
@ -2021,39 +2021,35 @@ class KOBOTOUCH(KOBO):
|
||||
if self.supports_kobo_archive() or self.supports_overdrive():
|
||||
where_clause = (" WHERE BookID IS NULL "
|
||||
" AND ((Accessibility = -1 AND IsDownloaded in ('true', 1 )) " # Sideloaded books
|
||||
" OR (Accessibility IN (%(downloaded_accessibility)s) %(expiry)s) " # Purchased books
|
||||
" %(previews)s %(recommendations)s ) " # Previews or Recommendations
|
||||
) % \
|
||||
dict(
|
||||
" OR (Accessibility IN ({downloaded_accessibility}) {expiry}) " # Purchased books
|
||||
" {previews} {recommendations} ) " # Previews or Recommendations
|
||||
).format(**dict(
|
||||
expiry='' if self.show_archived_books else "and IsDownloaded in ('true', 1)",
|
||||
previews=" OR (Accessibility in (6) AND ___UserID <> '')" if self.show_previews else '',
|
||||
recommendations=" OR (Accessibility IN (-1, 4, 6) AND ___UserId = '')" if self.show_recommendations else '',
|
||||
downloaded_accessibility='1,2,8,9' if self.supports_overdrive() else '1,2'
|
||||
)
|
||||
))
|
||||
elif self.supports_series():
|
||||
where_clause = (" WHERE BookID IS NULL "
|
||||
" AND ((Accessibility = -1 AND IsDownloaded IN ('true', 1)) or (Accessibility IN (1,2)) %(previews)s %(recommendations)s )"
|
||||
" AND NOT ((___ExpirationStatus=3 OR ___ExpirationStatus is Null) %(expiry)s)"
|
||||
) % \
|
||||
dict(
|
||||
" AND ((Accessibility = -1 AND IsDownloaded IN ('true', 1)) or (Accessibility IN (1,2)) {previews} {recommendations} )"
|
||||
" AND NOT ((___ExpirationStatus=3 OR ___ExpirationStatus is Null) {expiry})"
|
||||
).format(**dict(
|
||||
expiry=' AND ContentType = 6' if self.show_archived_books else '',
|
||||
previews=" or (Accessibility IN (6) AND ___UserID <> '')" if self.show_previews else '',
|
||||
recommendations=" or (Accessibility in (-1, 4, 6) AND ___UserId = '')" if self.show_recommendations else ''
|
||||
)
|
||||
))
|
||||
elif self.dbversion >= 33:
|
||||
where_clause = (' WHERE BookID IS NULL %(previews)s %(recommendations)s AND NOT'
|
||||
' ((___ExpirationStatus=3 or ___ExpirationStatus IS NULL) %(expiry)s)'
|
||||
) % \
|
||||
dict(
|
||||
where_clause = (' WHERE BookID IS NULL {previews} {recommendations} AND NOT'
|
||||
' ((___ExpirationStatus=3 or ___ExpirationStatus IS NULL) {expiry})'
|
||||
).format(**dict(
|
||||
expiry=' AND ContentType = 6' if self.show_archived_books else '',
|
||||
previews=' AND Accessibility <> 6' if not self.show_previews else '',
|
||||
recommendations=" AND IsDownloaded IN ('true', 1)" if not self.show_recommendations else ''
|
||||
)
|
||||
))
|
||||
elif self.dbversion >= 16:
|
||||
where_clause = (' WHERE BookID IS NULL '
|
||||
'AND NOT ((___ExpirationStatus=3 OR ___ExpirationStatus IS Null) %(expiry)s)'
|
||||
) % \
|
||||
dict(expiry=' and ContentType = 6' if self.show_archived_books else '')
|
||||
'AND NOT ((___ExpirationStatus=3 OR ___ExpirationStatus IS Null) {expiry})'
|
||||
).format(**dict(expiry=' and ContentType = 6' if self.show_archived_books else ''))
|
||||
else:
|
||||
where_clause = ' WHERE BookID IS NULL'
|
||||
|
||||
@ -2094,7 +2090,7 @@ class KOBOTOUCH(KOBO):
|
||||
show_debug = self.is_debugging_title(row['Title'])
|
||||
if show_debug:
|
||||
debug_print('KoboTouch:books - looping on database - row=%d' % i)
|
||||
debug_print("KoboTouch:books - title='%s'"%row['Title'], 'authors=', row['Attribution'])
|
||||
debug_print("KoboTouch:books - title='{}'".format(row['Title']), 'authors=', row['Attribution'])
|
||||
debug_print('KoboTouch:books - row=', row)
|
||||
if not hasattr(row['ContentID'], 'startswith') or row['ContentID'].lower().startswith(
|
||||
'file:///usr/local/kobo/help/') or row['ContentID'].lower().startswith('/usr/local/kobo/help/'):
|
||||
@ -2103,7 +2099,7 @@ class KOBOTOUCH(KOBO):
|
||||
externalId = None if row['ExternalId'] and len(row['ExternalId']) == 0 else row['ExternalId']
|
||||
path = self.path_from_contentid(row['ContentID'], row['ContentType'], row['MimeType'], oncard, externalId)
|
||||
if show_debug:
|
||||
debug_print("KoboTouch:books - path='%s'"%path, " ContentID='%s'"%row['ContentID'], ' externalId=%s' % externalId)
|
||||
debug_print(f"KoboTouch:books - path='{path}'", " ContentID='{}'".format(row['ContentID']), f' externalId={externalId}')
|
||||
|
||||
bookshelves = get_bookshelvesforbook(connection, row['ContentID'])
|
||||
|
||||
@ -2142,7 +2138,7 @@ class KOBOTOUCH(KOBO):
|
||||
need_sync = True
|
||||
del bl[idx]
|
||||
else:
|
||||
debug_print("KoboTouch:books - Book in mtadata.calibre, on file system but not database - bl[idx].title:'%s'"%bl[idx].title)
|
||||
debug_print(f"KoboTouch:books - Book in mtadata.calibre, on file system but not database - bl[idx].title:'{bl[idx].title}'")
|
||||
|
||||
# print('count found in cache: %d, count of files in metadata: %d, need_sync: %s' % \
|
||||
# (len(bl_cache), len(bl), need_sync))
|
||||
@ -2159,12 +2155,12 @@ class KOBOTOUCH(KOBO):
|
||||
debug_print('KoboTouch:books - have done sync_booklists')
|
||||
|
||||
self.report_progress(1.0, _('Getting list of books on device...'))
|
||||
debug_print("KoboTouch:books - end - oncard='%s'"%oncard)
|
||||
debug_print(f"KoboTouch:books - end - oncard='{oncard}'")
|
||||
return bl
|
||||
|
||||
@classmethod
|
||||
def book_from_path(cls, prefix, lpath, title, authors, mime, date, ContentType, ImageID):
|
||||
debug_print('KoboTouch:book_from_path - title=%s'%title)
|
||||
debug_print(f'KoboTouch:book_from_path - title={title}')
|
||||
book = super().book_from_path(prefix, lpath, title, authors, mime, date, ContentType, ImageID)
|
||||
|
||||
# Kobo Audiobooks are directories with files in them.
|
||||
@ -2222,11 +2218,11 @@ class KOBOTOUCH(KOBO):
|
||||
fpath = path + ending
|
||||
if os.path.exists(fpath):
|
||||
if show_debug:
|
||||
debug_print('KoboTouch:imagefilename_from_imageID - have cover image fpath=%s' % (fpath))
|
||||
debug_print(f'KoboTouch:imagefilename_from_imageID - have cover image fpath={fpath}')
|
||||
return fpath
|
||||
|
||||
if show_debug:
|
||||
debug_print('KoboTouch:imagefilename_from_imageID - no cover image found - ImageID=%s' % (ImageID))
|
||||
debug_print(f'KoboTouch:imagefilename_from_imageID - no cover image found - ImageID={ImageID}')
|
||||
return None
|
||||
|
||||
def get_extra_css(self):
|
||||
@ -2313,7 +2309,7 @@ class KOBOTOUCH(KOBO):
|
||||
|
||||
cursor.close()
|
||||
except Exception as e:
|
||||
debug_print('KoboTouch:upload_books - Exception: %s'%str(e))
|
||||
debug_print(f'KoboTouch:upload_books - Exception: {e!s}')
|
||||
|
||||
return result
|
||||
|
||||
@ -2419,7 +2415,7 @@ class KOBOTOUCH(KOBO):
|
||||
imageId = super().delete_via_sql(ContentID, ContentType)
|
||||
|
||||
if self.dbversion >= 53:
|
||||
debug_print('KoboTouch:delete_via_sql: ContentID="%s"'%ContentID, 'ContentType="%s"'%ContentType)
|
||||
debug_print(f'KoboTouch:delete_via_sql: ContentID="{ContentID}"', f'ContentType="{ContentType}"')
|
||||
try:
|
||||
with closing(self.device_database_connection()) as connection:
|
||||
debug_print('KoboTouch:delete_via_sql: have database connection')
|
||||
@ -2457,9 +2453,9 @@ class KOBOTOUCH(KOBO):
|
||||
debug_print('KoboTouch:delete_via_sql: finished SQL')
|
||||
debug_print('KoboTouch:delete_via_sql: After SQL, no exception')
|
||||
except Exception as e:
|
||||
debug_print('KoboTouch:delete_via_sql - Database Exception: %s'%str(e))
|
||||
debug_print(f'KoboTouch:delete_via_sql - Database Exception: {e!s}')
|
||||
|
||||
debug_print('KoboTouch:delete_via_sql: imageId="%s"'%imageId)
|
||||
debug_print(f'KoboTouch:delete_via_sql: imageId="{imageId}"')
|
||||
if imageId is None:
|
||||
imageId = self.imageid_from_contentid(ContentID)
|
||||
|
||||
@ -2469,12 +2465,12 @@ class KOBOTOUCH(KOBO):
|
||||
debug_print('KoboTouch:delete_images - ImageID=', ImageID)
|
||||
if ImageID is not None:
|
||||
path = self.images_path(book_path, ImageID)
|
||||
debug_print('KoboTouch:delete_images - path=%s' % path)
|
||||
debug_print(f'KoboTouch:delete_images - path={path}')
|
||||
|
||||
for ending in self.cover_file_endings().keys():
|
||||
fpath = path + ending
|
||||
fpath = self.normalize_path(fpath)
|
||||
debug_print('KoboTouch:delete_images - fpath=%s' % fpath)
|
||||
debug_print(f'KoboTouch:delete_images - fpath={fpath}')
|
||||
|
||||
if os.path.exists(fpath):
|
||||
debug_print('KoboTouch:delete_images - Image File Exists')
|
||||
@ -2488,8 +2484,8 @@ class KOBOTOUCH(KOBO):
|
||||
def contentid_from_path(self, path, ContentType):
|
||||
show_debug = self.is_debugging_title(path) and True
|
||||
if show_debug:
|
||||
debug_print("KoboTouch:contentid_from_path - path='%s'"%path, "ContentType='%s'"%ContentType)
|
||||
debug_print("KoboTouch:contentid_from_path - self._main_prefix='%s'"%self._main_prefix, "self._card_a_prefix='%s'"%self._card_a_prefix)
|
||||
debug_print(f"KoboTouch:contentid_from_path - path='{path}'", f"ContentType='{ContentType}'")
|
||||
debug_print(f"KoboTouch:contentid_from_path - self._main_prefix='{self._main_prefix}'", f"self._card_a_prefix='{self._card_a_prefix}'")
|
||||
if ContentType == 6:
|
||||
extension = os.path.splitext(path)[1]
|
||||
if extension == '.kobo':
|
||||
@ -2504,19 +2500,19 @@ class KOBOTOUCH(KOBO):
|
||||
ContentID = ContentID.replace(self._main_prefix, 'file:///mnt/onboard/')
|
||||
|
||||
if show_debug:
|
||||
debug_print("KoboTouch:contentid_from_path - 1 ContentID='%s'"%ContentID)
|
||||
debug_print(f"KoboTouch:contentid_from_path - 1 ContentID='{ContentID}'")
|
||||
|
||||
if self._card_a_prefix is not None:
|
||||
ContentID = ContentID.replace(self._card_a_prefix, 'file:///mnt/sd/')
|
||||
else: # ContentType = 16
|
||||
debug_print("KoboTouch:contentid_from_path ContentType other than 6 - ContentType='%d'"%ContentType, "path='%s'"%path)
|
||||
debug_print("KoboTouch:contentid_from_path ContentType other than 6 - ContentType='%d'"%ContentType, f"path='{path}'")
|
||||
ContentID = path
|
||||
ContentID = ContentID.replace(self._main_prefix, 'file:///mnt/onboard/')
|
||||
if self._card_a_prefix is not None:
|
||||
ContentID = ContentID.replace(self._card_a_prefix, 'file:///mnt/sd/')
|
||||
ContentID = ContentID.replace('\\', '/')
|
||||
if show_debug:
|
||||
debug_print("KoboTouch:contentid_from_path - end - ContentID='%s'"%ContentID)
|
||||
debug_print(f"KoboTouch:contentid_from_path - end - ContentID='{ContentID}'")
|
||||
return ContentID
|
||||
|
||||
def get_content_type_from_path(self, path):
|
||||
@ -2538,8 +2534,8 @@ class KOBOTOUCH(KOBO):
|
||||
self.plugboard_func = pb_func
|
||||
|
||||
def update_device_database_collections(self, booklists, collections_attributes, oncard):
|
||||
debug_print("KoboTouch:update_device_database_collections - oncard='%s'"%oncard)
|
||||
debug_print("KoboTouch:update_device_database_collections - device='%s'" % self)
|
||||
debug_print(f"KoboTouch:update_device_database_collections - oncard='{oncard}'")
|
||||
debug_print(f"KoboTouch:update_device_database_collections - device='{self}'")
|
||||
if self.modify_database_check('update_device_database_collections') is False:
|
||||
return
|
||||
|
||||
@ -2573,7 +2569,7 @@ class KOBOTOUCH(KOBO):
|
||||
update_core_metadata = self.update_core_metadata
|
||||
update_purchased_kepubs = self.update_purchased_kepubs
|
||||
debugging_title = self.get_debugging_title()
|
||||
debug_print("KoboTouch:update_device_database_collections - set_debugging_title to '%s'" % debugging_title)
|
||||
debug_print(f"KoboTouch:update_device_database_collections - set_debugging_title to '{debugging_title}'")
|
||||
booklists.set_debugging_title(debugging_title)
|
||||
booklists.set_device_managed_collections(self.ignore_collections_names)
|
||||
|
||||
@ -2623,11 +2619,11 @@ class KOBOTOUCH(KOBO):
|
||||
# debug_print(' Title:', book.title, 'category: ', category)
|
||||
show_debug = self.is_debugging_title(book.title)
|
||||
if show_debug:
|
||||
debug_print(' Title="%s"'%book.title, 'category="%s"'%category)
|
||||
debug_print(f' Title="{book.title}"', f'category="{category}"')
|
||||
# debug_print(book)
|
||||
debug_print(' class=%s'%book.__class__)
|
||||
debug_print(' book.contentID="%s"'%book.contentID)
|
||||
debug_print(' book.application_id="%s"'%book.application_id)
|
||||
debug_print(f' class={book.__class__}')
|
||||
debug_print(f' book.contentID="{book.contentID}"')
|
||||
debug_print(f' book.application_id="{book.application_id}"')
|
||||
|
||||
if book.application_id is None:
|
||||
continue
|
||||
@ -2635,13 +2631,13 @@ class KOBOTOUCH(KOBO):
|
||||
category_added = False
|
||||
|
||||
if book.contentID is None:
|
||||
debug_print(' Do not know ContentID - Title="%s", Authors="%s", path="%s"'%(book.title, book.author, book.path))
|
||||
debug_print(f' Do not know ContentID - Title="{book.title}", Authors="{book.author}", path="{book.path}"')
|
||||
extension = os.path.splitext(book.path)[1]
|
||||
ContentType = self.get_content_type_from_extension(extension) if extension else self.get_content_type_from_path(book.path)
|
||||
book.contentID = self.contentid_from_path(book.path, ContentType)
|
||||
|
||||
if category in self.ignore_collections_names:
|
||||
debug_print(' Ignoring collection=%s' % category)
|
||||
debug_print(f' Ignoring collection={category}')
|
||||
category_added = True
|
||||
elif category in self.bookshelvelist and self.supports_bookshelves:
|
||||
if show_debug:
|
||||
@ -2652,18 +2648,18 @@ class KOBOTOUCH(KOBO):
|
||||
self.set_bookshelf(connection, book, category)
|
||||
category_added = True
|
||||
elif category in readstatuslist:
|
||||
debug_print("KoboTouch:update_device_database_collections - about to set_readstatus - category='%s'"%(category, ))
|
||||
debug_print(f"KoboTouch:update_device_database_collections - about to set_readstatus - category='{category}'")
|
||||
# Manage ReadStatus
|
||||
self.set_readstatus(connection, book.contentID, readstatuslist.get(category))
|
||||
category_added = True
|
||||
|
||||
elif category == 'Shortlist' and self.dbversion >= 14:
|
||||
if show_debug:
|
||||
debug_print(' Have an older version shortlist - %s'%book.title)
|
||||
debug_print(f' Have an older version shortlist - {book.title}')
|
||||
# Manage FavouritesIndex/Shortlist
|
||||
if not self.supports_bookshelves:
|
||||
if show_debug:
|
||||
debug_print(' and about to set it - %s'%book.title)
|
||||
debug_print(f' and about to set it - {book.title}')
|
||||
self.set_favouritesindex(connection, book.contentID)
|
||||
category_added = True
|
||||
elif category in accessibilitylist:
|
||||
@ -2677,7 +2673,7 @@ class KOBOTOUCH(KOBO):
|
||||
else:
|
||||
if show_debug:
|
||||
debug_print(' category not added to book.device_collections', book.device_collections)
|
||||
debug_print("KoboTouch:update_device_database_collections - end for category='%s'"%category)
|
||||
debug_print(f"KoboTouch:update_device_database_collections - end for category='{category}'")
|
||||
|
||||
elif have_bookshelf_attributes: # No collections but have set the shelf option
|
||||
# Since no collections exist the ReadStatus needs to be reset to 0 (Unread)
|
||||
@ -2702,11 +2698,10 @@ class KOBOTOUCH(KOBO):
|
||||
books_in_library += 1
|
||||
show_debug = self.is_debugging_title(book.title)
|
||||
if show_debug:
|
||||
debug_print('KoboTouch:update_device_database_collections - book.title=%s' % book.title)
|
||||
debug_print(f'KoboTouch:update_device_database_collections - book.title={book.title}')
|
||||
debug_print(
|
||||
'KoboTouch:update_device_database_collections - contentId=%s,'
|
||||
'update_core_metadata=%s,update_purchased_kepubs=%s, book.is_sideloaded=%s' % (
|
||||
book.contentID, update_core_metadata, update_purchased_kepubs, book.is_sideloaded))
|
||||
f'KoboTouch:update_device_database_collections - contentId={book.contentID},'
|
||||
f'update_core_metadata={update_core_metadata},update_purchased_kepubs={update_purchased_kepubs}, book.is_sideloaded={book.is_sideloaded}')
|
||||
if update_core_metadata and (update_purchased_kepubs or book.is_sideloaded):
|
||||
if show_debug:
|
||||
debug_print('KoboTouch:update_device_database_collections - calling set_core_metadata')
|
||||
@ -2717,7 +2712,7 @@ class KOBOTOUCH(KOBO):
|
||||
self.set_core_metadata(connection, book, series_only=True)
|
||||
if self.manage_collections and have_bookshelf_attributes:
|
||||
if show_debug:
|
||||
debug_print('KoboTouch:update_device_database_collections - about to remove a book from shelves book.title=%s' % book.title)
|
||||
debug_print(f'KoboTouch:update_device_database_collections - about to remove a book from shelves book.title={book.title}')
|
||||
self.remove_book_from_device_bookshelves(connection, book)
|
||||
book.device_collections.extend(book.kobo_collections)
|
||||
if not prefs['manage_device_metadata'] == 'manual' and delete_empty_collections:
|
||||
@ -2749,8 +2744,8 @@ class KOBOTOUCH(KOBO):
|
||||
:param filepath: The full path to the ebook file
|
||||
|
||||
'''
|
||||
debug_print("KoboTouch:upload_cover - path='%s' filename='%s' "%(path, filename))
|
||||
debug_print(" filepath='%s' "%(filepath))
|
||||
debug_print(f"KoboTouch:upload_cover - path='{path}' filename='{filename}' ")
|
||||
debug_print(f" filepath='{filepath}' ")
|
||||
|
||||
if not self.upload_covers:
|
||||
# Building thumbnails disabled
|
||||
@ -2769,7 +2764,7 @@ class KOBOTOUCH(KOBO):
|
||||
self.keep_cover_aspect, self.letterbox_fs_covers, self.png_covers,
|
||||
letterbox_color=self.letterbox_fs_covers_color)
|
||||
except Exception as e:
|
||||
debug_print('KoboTouch: FAILED to upload cover=%s Exception=%s'%(filepath, str(e)))
|
||||
debug_print(f'KoboTouch: FAILED to upload cover={filepath} Exception={e!s}')
|
||||
|
||||
def imageid_from_contentid(self, ContentID):
|
||||
ImageID = ContentID.replace('/', '_')
|
||||
@ -2793,7 +2788,7 @@ class KOBOTOUCH(KOBO):
|
||||
hash1 = qhash(imageId)
|
||||
dir1 = hash1 & (0xff * 1)
|
||||
dir2 = (hash1 & (0xff00 * 1)) >> 8
|
||||
path = os.path.join(path, '%s' % dir1, '%s' % dir2)
|
||||
path = os.path.join(path, f'{dir1}', f'{dir2}')
|
||||
|
||||
if imageId:
|
||||
path = os.path.join(path, imageId)
|
||||
@ -2864,15 +2859,15 @@ class KOBOTOUCH(KOBO):
|
||||
):
|
||||
from calibre.utils.img import optimize_png
|
||||
from calibre.utils.imghdr import identify
|
||||
debug_print("KoboTouch:_upload_cover - filename='%s' upload_grayscale='%s' dithered_covers='%s' "%(filename, upload_grayscale, dithered_covers))
|
||||
debug_print(f"KoboTouch:_upload_cover - filename='{filename}' upload_grayscale='{upload_grayscale}' dithered_covers='{dithered_covers}' ")
|
||||
|
||||
if not metadata.cover:
|
||||
return
|
||||
|
||||
show_debug = self.is_debugging_title(filename)
|
||||
if show_debug:
|
||||
debug_print("KoboTouch:_upload_cover - path='%s'"%path, "filename='%s'"%filename)
|
||||
debug_print(" filepath='%s'"%filepath)
|
||||
debug_print(f"KoboTouch:_upload_cover - path='{path}'", f"filename='{filename}'")
|
||||
debug_print(f" filepath='{filepath}'")
|
||||
cover = self.normalize_path(metadata.cover.replace('/', os.sep))
|
||||
|
||||
if not os.path.exists(cover):
|
||||
@ -2895,7 +2890,7 @@ class KOBOTOUCH(KOBO):
|
||||
ImageID = result[0]
|
||||
except StopIteration:
|
||||
ImageID = self.imageid_from_contentid(ContentID)
|
||||
debug_print("KoboTouch:_upload_cover - No rows exist in the database - generated ImageID='%s'" % ImageID)
|
||||
debug_print(f"KoboTouch:_upload_cover - No rows exist in the database - generated ImageID='{ImageID}'")
|
||||
|
||||
cursor.close()
|
||||
|
||||
@ -2907,7 +2902,7 @@ class KOBOTOUCH(KOBO):
|
||||
|
||||
image_dir = os.path.dirname(os.path.abspath(path))
|
||||
if not os.path.exists(image_dir):
|
||||
debug_print("KoboTouch:_upload_cover - Image folder does not exist. Creating path='%s'" % (image_dir))
|
||||
debug_print(f"KoboTouch:_upload_cover - Image folder does not exist. Creating path='{image_dir}'")
|
||||
os.makedirs(image_dir)
|
||||
|
||||
with open(cover, 'rb') as f:
|
||||
@ -2924,7 +2919,7 @@ class KOBOTOUCH(KOBO):
|
||||
|
||||
if self.dbversion >= min_dbversion and self.dbversion <= max_dbversion:
|
||||
if show_debug:
|
||||
debug_print("KoboTouch:_upload_cover - creating cover for ending='%s'"%ending) # , "library_cover_size'%s'"%library_cover_size)
|
||||
debug_print(f"KoboTouch:_upload_cover - creating cover for ending='{ending}'") # , "library_cover_size'%s'"%library_cover_size)
|
||||
fpath = path + ending
|
||||
fpath = self.normalize_path(fpath.replace('/', os.sep))
|
||||
|
||||
@ -2943,9 +2938,8 @@ class KOBOTOUCH(KOBO):
|
||||
resize_to, expand_to = self._calculate_kobo_cover_size(library_cover_size, kobo_size, not is_full_size, keep_cover_aspect, letterbox)
|
||||
if show_debug:
|
||||
debug_print(
|
||||
'KoboTouch:_calculate_kobo_cover_size - expand_to=%s'
|
||||
' (vs. kobo_size=%s) & resize_to=%s, keep_cover_aspect=%s & letterbox_fs_covers=%s, png_covers=%s' % (
|
||||
expand_to, kobo_size, resize_to, keep_cover_aspect, letterbox_fs_covers, png_covers))
|
||||
f'KoboTouch:_calculate_kobo_cover_size - expand_to={expand_to}'
|
||||
f' (vs. kobo_size={kobo_size}) & resize_to={resize_to}, keep_cover_aspect={keep_cover_aspect} & letterbox_fs_covers={letterbox_fs_covers}, png_covers={png_covers}')
|
||||
|
||||
# NOTE: To speed things up, we enforce a lower
|
||||
# compression level for png_covers, as the final
|
||||
@ -2983,7 +2977,7 @@ class KOBOTOUCH(KOBO):
|
||||
fsync(f)
|
||||
except Exception as e:
|
||||
err = str(e)
|
||||
debug_print('KoboTouch:_upload_cover - Exception string: %s'%err)
|
||||
debug_print(f'KoboTouch:_upload_cover - Exception string: {err}')
|
||||
raise
|
||||
|
||||
def remove_book_from_device_bookshelves(self, connection, book):
|
||||
@ -2993,8 +2987,8 @@ class KOBOTOUCH(KOBO):
|
||||
remove_shelf_list = remove_shelf_list - set(self.ignore_collections_names)
|
||||
|
||||
if show_debug:
|
||||
debug_print('KoboTouch:remove_book_from_device_bookshelves - book.application_id="%s"'%book.application_id)
|
||||
debug_print('KoboTouch:remove_book_from_device_bookshelves - book.contentID="%s"'%book.contentID)
|
||||
debug_print(f'KoboTouch:remove_book_from_device_bookshelves - book.application_id="{book.application_id}"')
|
||||
debug_print(f'KoboTouch:remove_book_from_device_bookshelves - book.contentID="{book.contentID}"')
|
||||
debug_print('KoboTouch:remove_book_from_device_bookshelves - book.device_collections=', book.device_collections)
|
||||
debug_print('KoboTouch:remove_book_from_device_bookshelves - book.current_shelves=', book.current_shelves)
|
||||
debug_print('KoboTouch:remove_book_from_device_bookshelves - remove_shelf_list=', remove_shelf_list)
|
||||
@ -3009,12 +3003,12 @@ class KOBOTOUCH(KOBO):
|
||||
if book.device_collections:
|
||||
placeholder = '?'
|
||||
placeholders = ','.join(placeholder for unused in book.device_collections)
|
||||
query += ' and ShelfName not in (%s)' % placeholders
|
||||
query += f' and ShelfName not in ({placeholders})'
|
||||
values.extend(book.device_collections)
|
||||
|
||||
if show_debug:
|
||||
debug_print('KoboTouch:remove_book_from_device_bookshelves query="%s"'%query)
|
||||
debug_print('KoboTouch:remove_book_from_device_bookshelves values="%s"'%values)
|
||||
debug_print(f'KoboTouch:remove_book_from_device_bookshelves query="{query}"')
|
||||
debug_print(f'KoboTouch:remove_book_from_device_bookshelves values="{values}"')
|
||||
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(query, values)
|
||||
@ -3023,7 +3017,7 @@ class KOBOTOUCH(KOBO):
|
||||
def set_filesize_in_device_database(self, connection, contentID, fpath):
|
||||
show_debug = self.is_debugging_title(fpath)
|
||||
if show_debug:
|
||||
debug_print('KoboTouch:set_filesize_in_device_database contentID="%s"'%contentID)
|
||||
debug_print(f'KoboTouch:set_filesize_in_device_database contentID="{contentID}"')
|
||||
|
||||
test_query = ('SELECT ___FileSize '
|
||||
'FROM content '
|
||||
@ -3136,8 +3130,8 @@ class KOBOTOUCH(KOBO):
|
||||
def set_bookshelf(self, connection, book, shelfName):
|
||||
show_debug = self.is_debugging_title(book.title)
|
||||
if show_debug:
|
||||
debug_print('KoboTouch:set_bookshelf book.ContentID="%s"'%book.contentID)
|
||||
debug_print('KoboTouch:set_bookshelf book.current_shelves="%s"'%book.current_shelves)
|
||||
debug_print(f'KoboTouch:set_bookshelf book.ContentID="{book.contentID}"')
|
||||
debug_print(f'KoboTouch:set_bookshelf book.current_shelves="{book.current_shelves}"')
|
||||
|
||||
if shelfName in book.current_shelves:
|
||||
if show_debug:
|
||||
@ -3175,7 +3169,7 @@ class KOBOTOUCH(KOBO):
|
||||
def check_for_bookshelf(self, connection, bookshelf_name):
|
||||
show_debug = self.is_debugging_title(bookshelf_name)
|
||||
if show_debug:
|
||||
debug_print('KoboTouch:check_for_bookshelf bookshelf_name="%s"'%bookshelf_name)
|
||||
debug_print(f'KoboTouch:check_for_bookshelf bookshelf_name="{bookshelf_name}"')
|
||||
test_query = 'SELECT InternalName, Name, _IsDeleted FROM Shelf WHERE Name = ?'
|
||||
test_values = (bookshelf_name, )
|
||||
addquery = 'INSERT INTO "main"."Shelf"'
|
||||
@ -3220,7 +3214,7 @@ class KOBOTOUCH(KOBO):
|
||||
|
||||
if result is None:
|
||||
if show_debug:
|
||||
debug_print(' Did not find a record - adding shelf "%s"' % bookshelf_name)
|
||||
debug_print(f' Did not find a record - adding shelf "{bookshelf_name}"')
|
||||
cursor.execute(addquery, add_values)
|
||||
elif self.is_true_value(result['_IsDeleted']):
|
||||
debug_print("KoboTouch:check_for_bookshelf - Shelf '{}' is deleted - undeleting. result['_IsDeleted']='{}'".format(
|
||||
@ -3253,7 +3247,7 @@ class KOBOTOUCH(KOBO):
|
||||
if bookshelves:
|
||||
placeholder = '?'
|
||||
placeholders = ','.join(placeholder for unused in bookshelves)
|
||||
query += ' and ShelfName in (%s)' % placeholders
|
||||
query += f' and ShelfName in ({placeholders})'
|
||||
values.append(bookshelves)
|
||||
debug_print('KoboTouch:remove_from_bookshelf query=', query)
|
||||
debug_print('KoboTouch:remove_from_bookshelf values=', values)
|
||||
@ -3267,8 +3261,8 @@ class KOBOTOUCH(KOBO):
|
||||
def set_series(self, connection, book):
|
||||
show_debug = self.is_debugging_title(book.title)
|
||||
if show_debug:
|
||||
debug_print('KoboTouch:set_series book.kobo_series="%s"'%book.kobo_series)
|
||||
debug_print('KoboTouch:set_series book.series="%s"'%book.series)
|
||||
debug_print(f'KoboTouch:set_series book.kobo_series="{book.kobo_series}"')
|
||||
debug_print(f'KoboTouch:set_series book.series="{book.series}"')
|
||||
debug_print('KoboTouch:set_series book.series_index=', book.series_index)
|
||||
|
||||
if book.series == book.kobo_series:
|
||||
@ -3289,7 +3283,7 @@ class KOBOTOUCH(KOBO):
|
||||
elif book.series_index is None: # This should never happen, but...
|
||||
update_values = (book.series, None, book.contentID, )
|
||||
else:
|
||||
update_values = (book.series, '%g'%book.series_index, book.contentID, )
|
||||
update_values = (book.series, f'{book.series_index:g}', book.contentID, )
|
||||
|
||||
cursor = connection.cursor()
|
||||
try:
|
||||
@ -3320,7 +3314,7 @@ class KOBOTOUCH(KOBO):
|
||||
else:
|
||||
new_value = new_value if len(new_value.strip()) else None
|
||||
if new_value is not None and new_value.startswith('PLUGBOARD TEMPLATE ERROR'):
|
||||
debug_print("KoboTouch:generate_update_from_template template error - template='%s'" % template)
|
||||
debug_print(f"KoboTouch:generate_update_from_template template error - template='{template}'")
|
||||
debug_print('KoboTouch:generate_update_from_template - new_value=', new_value)
|
||||
|
||||
# debug_print(
|
||||
@ -3366,7 +3360,7 @@ class KOBOTOUCH(KOBO):
|
||||
if newmi.series is not None:
|
||||
new_series = newmi.series
|
||||
try:
|
||||
new_series_number = '%g' % newmi.series_index
|
||||
new_series_number = f'{newmi.series_index:g}'
|
||||
except:
|
||||
new_series_number = None
|
||||
else:
|
||||
@ -3463,7 +3457,7 @@ class KOBOTOUCH(KOBO):
|
||||
else:
|
||||
new_subtitle = book.subtitle if len(book.subtitle.strip()) else None
|
||||
if new_subtitle is not None and new_subtitle.startswith('PLUGBOARD TEMPLATE ERROR'):
|
||||
debug_print("KoboTouch:set_core_metadata subtitle template error - self.subtitle_template='%s'" % self.subtitle_template)
|
||||
debug_print(f"KoboTouch:set_core_metadata subtitle template error - self.subtitle_template='{self.subtitle_template}'")
|
||||
debug_print('KoboTouch:set_core_metadata - new_subtitle=', new_subtitle)
|
||||
|
||||
if (new_subtitle is not None and (book.kobo_subtitle is None or book.subtitle != book.kobo_subtitle)) or \
|
||||
@ -3509,9 +3503,9 @@ class KOBOTOUCH(KOBO):
|
||||
update_query += ', '.join([col_name + ' = ?' for col_name in set_clause])
|
||||
changes_found = True
|
||||
if show_debug:
|
||||
debug_print('KoboTouch:set_core_metadata set_clause="%s"' % set_clause)
|
||||
debug_print('KoboTouch:set_core_metadata update_values="%s"' % update_values)
|
||||
debug_print('KoboTouch:set_core_metadata update_values="%s"' % update_query)
|
||||
debug_print(f'KoboTouch:set_core_metadata set_clause="{set_clause}"')
|
||||
debug_print(f'KoboTouch:set_core_metadata update_values="{update_values}"')
|
||||
debug_print(f'KoboTouch:set_core_metadata update_values="{update_query}"')
|
||||
if changes_found:
|
||||
update_query += ' WHERE ContentID = ? AND BookID IS NULL'
|
||||
update_values.append(book.contentID)
|
||||
@ -4087,9 +4081,9 @@ class KOBOTOUCH(KOBO):
|
||||
' Kobo forum at MobileRead. This is at %s.'
|
||||
) % 'https://www.mobileread.com/forums/forumdisplay.php?f=223' + '\n' +
|
||||
(
|
||||
'\nDevice database version: %s.'
|
||||
'\nDevice firmware version: %s'
|
||||
) % (self.dbversion, self.display_fwversion),
|
||||
f'\nDevice database version: {self.dbversion}.'
|
||||
f'\nDevice firmware version: {self.display_fwversion}'
|
||||
),
|
||||
UserFeedback.WARN
|
||||
)
|
||||
|
||||
@ -4206,7 +4200,7 @@ class KOBOTOUCH(KOBO):
|
||||
try:
|
||||
is_debugging = (len(self.debugging_title) > 0 and title.lower().find(self.debugging_title.lower()) >= 0) or len(title) == 0
|
||||
except:
|
||||
debug_print(("KoboTouch::is_debugging_title - Exception checking debugging title for title '{}'.").format(title))
|
||||
debug_print(f"KoboTouch::is_debugging_title - Exception checking debugging title for title '{title}'.")
|
||||
is_debugging = False
|
||||
|
||||
return is_debugging
|
||||
|
@ -98,7 +98,7 @@ class PDNOVEL(USBMS):
|
||||
def upload_cover(self, path, filename, metadata, filepath):
|
||||
coverdata = getattr(metadata, 'thumbnail', None)
|
||||
if coverdata and coverdata[2]:
|
||||
with open('%s.jpg' % os.path.join(path, filename), 'wb') as coverfile:
|
||||
with open(f'{os.path.join(path, filename)}.jpg', 'wb') as coverfile:
|
||||
coverfile.write(coverdata[2])
|
||||
fsync(coverfile)
|
||||
|
||||
|
@ -35,7 +35,7 @@ DEFAULT_THUMBNAIL_HEIGHT = 320
|
||||
class MTPInvalidSendPathError(PathError):
|
||||
|
||||
def __init__(self, folder):
|
||||
PathError.__init__(self, 'Trying to send to ignored folder: %s'%folder)
|
||||
PathError.__init__(self, f'Trying to send to ignored folder: {folder}')
|
||||
self.folder = folder
|
||||
|
||||
|
||||
@ -405,7 +405,7 @@ class MTP_DEVICE(BASE):
|
||||
except Exception as e:
|
||||
ans.append((path, e, traceback.format_exc()))
|
||||
continue
|
||||
base = os.path.join(tdir, '%s'%f.object_id)
|
||||
base = os.path.join(tdir, f'{f.object_id}')
|
||||
os.mkdir(base)
|
||||
name = f.name
|
||||
if iswindows:
|
||||
@ -628,8 +628,7 @@ class MTP_DEVICE(BASE):
|
||||
try:
|
||||
self.recursive_delete(parent)
|
||||
except:
|
||||
prints('Failed to delete parent: %s, ignoring'%(
|
||||
'/'.join(parent.full_path)))
|
||||
prints('Failed to delete parent: {}, ignoring'.format('/'.join(parent.full_path)))
|
||||
|
||||
def delete_books(self, paths, end_session=True):
|
||||
self.report_progress(0, _('Deleting books from device...'))
|
||||
@ -673,7 +672,7 @@ class MTP_DEVICE(BASE):
|
||||
If that is not found looks for a device default and if that is not
|
||||
found uses the global default.'''
|
||||
dd = self.current_device_defaults if self.is_mtp_device_connected else {}
|
||||
dev_settings = self.prefs.get('device-%s'%self.current_serial_num, {})
|
||||
dev_settings = self.prefs.get(f'device-{self.current_serial_num}', {})
|
||||
default_value = dd.get(key, self.prefs[key])
|
||||
return dev_settings.get(key, default_value)
|
||||
|
||||
|
@ -69,8 +69,7 @@ class FileOrFolder:
|
||||
self.last_modified = as_utc(self.last_modified)
|
||||
|
||||
if self.storage_id not in fs_cache.all_storage_ids:
|
||||
raise ValueError('Storage id %s not valid for %s, valid values: %s'%(self.storage_id,
|
||||
entry, fs_cache.all_storage_ids))
|
||||
raise ValueError(f'Storage id {self.storage_id} not valid for {entry}, valid values: {fs_cache.all_storage_ids}')
|
||||
|
||||
self.is_hidden = entry.get('is_hidden', False)
|
||||
self.is_system = entry.get('is_system', False)
|
||||
@ -92,7 +91,7 @@ class FileOrFolder:
|
||||
self.deleted = False
|
||||
|
||||
if self.is_storage:
|
||||
self.storage_prefix = 'mtp:::%s:::'%self.persistent_id
|
||||
self.storage_prefix = f'mtp:::{self.persistent_id}:::'
|
||||
|
||||
# Ignore non ebook files and AppleDouble files
|
||||
self.is_ebook = (not self.is_folder and not self.is_storage and
|
||||
@ -107,11 +106,10 @@ class FileOrFolder:
|
||||
path = str(self.full_path)
|
||||
except Exception:
|
||||
path = ''
|
||||
datum = 'size=%s'%(self.size)
|
||||
datum = f'size={self.size}'
|
||||
if self.is_folder or self.is_storage:
|
||||
datum = 'children=%s'%(len(self.files) + len(self.folders))
|
||||
return '%s(id=%s, storage_id=%s, %s, path=%s, modified=%s)'%(name, self.object_id,
|
||||
self.storage_id, datum, path, self.last_mod_string)
|
||||
return f'{name}(id={self.object_id}, storage_id={self.storage_id}, {datum}, path={path}, modified={self.last_mod_string})'
|
||||
|
||||
__str__ = __repr__
|
||||
__unicode__ = __repr__
|
||||
@ -171,10 +169,10 @@ class FileOrFolder:
|
||||
|
||||
def dump(self, prefix='', out=sys.stdout):
|
||||
c = '+' if self.is_folder else '-'
|
||||
data = ('%s children'%(sum(map(len, (self.files, self.folders))))
|
||||
data = (f'{sum(map(len, (self.files, self.folders)))} children'
|
||||
if self.is_folder else human_readable(self.size))
|
||||
data += ' modified=%s'%self.last_mod_string
|
||||
line = '%s%s %s [id:%s %s]'%(prefix, c, self.name, self.object_id, data)
|
||||
data += f' modified={self.last_mod_string}'
|
||||
line = f'{prefix}{c} {self.name} [id:{self.object_id} {data}]'
|
||||
prints(line, file=out)
|
||||
for c in (self.folders, self.files):
|
||||
for e in sorted(c, key=lambda x: sort_key(x.name)):
|
||||
@ -290,14 +288,14 @@ class FilesystemCache:
|
||||
|
||||
def resolve_mtp_id_path(self, path):
|
||||
if not path.startswith('mtp:::'):
|
||||
raise ValueError('%s is not a valid MTP path'%path)
|
||||
raise ValueError(f'{path} is not a valid MTP path')
|
||||
parts = path.split(':::', 2)
|
||||
if len(parts) < 3:
|
||||
raise ValueError('%s is not a valid MTP path'%path)
|
||||
raise ValueError(f'{path} is not a valid MTP path')
|
||||
try:
|
||||
object_id = json.loads(parts[1])
|
||||
except Exception:
|
||||
raise ValueError('%s is not a valid MTP path'%path)
|
||||
raise ValueError(f'{path} is not a valid MTP path')
|
||||
id_map = {}
|
||||
path = parts[2]
|
||||
storage_name = path.partition('/')[0]
|
||||
@ -308,4 +306,4 @@ class FilesystemCache:
|
||||
try:
|
||||
return id_map[object_id]
|
||||
except KeyError:
|
||||
raise ValueError('No object found with MTP path: %s'%path)
|
||||
raise ValueError(f'No object found with MTP path: {path}')
|
||||
|
@ -182,7 +182,7 @@ class TestDeviceInteraction(unittest.TestCase):
|
||||
return end_mem - start_mem
|
||||
|
||||
def check_memory(self, once, many, msg, factor=2):
|
||||
msg += ' for once: %g for many: %g'%(once, many)
|
||||
msg += f' for once: {once:g} for many: {many:g}'
|
||||
if once > 0:
|
||||
self.assertTrue(many <= once*factor, msg=msg)
|
||||
else:
|
||||
|
@ -228,8 +228,7 @@ class MTP_DEVICE(MTPDeviceBase):
|
||||
self.dev = self.create_device(connected_device)
|
||||
except Exception as e:
|
||||
self.blacklisted_devices.add(connected_device)
|
||||
raise OpenFailed('Failed to open %s: Error: %s'%(
|
||||
connected_device, as_unicode(e)))
|
||||
raise OpenFailed(f'Failed to open {connected_device}: Error: {as_unicode(e)}')
|
||||
|
||||
try:
|
||||
storage = sorted_storage(self.dev.storage_info)
|
||||
@ -259,13 +258,13 @@ class MTP_DEVICE(MTPDeviceBase):
|
||||
storage = [x for x in storage if x.get('rw', False)]
|
||||
if not storage:
|
||||
self.blacklisted_devices.add(connected_device)
|
||||
raise OpenFailed('No storage found for device %s'%(connected_device,))
|
||||
raise OpenFailed(f'No storage found for device {connected_device}')
|
||||
snum = self.dev.serial_number
|
||||
if snum in self.prefs.get('blacklist', []):
|
||||
self.blacklisted_devices.add(connected_device)
|
||||
self.dev = None
|
||||
raise BlacklistedDevice(
|
||||
'The %s device has been blacklisted by the user'%(connected_device,))
|
||||
f'The {connected_device} device has been blacklisted by the user')
|
||||
self._main_id = storage[0]['id']
|
||||
self._carda_id = self._cardb_id = None
|
||||
if len(storage) > 1:
|
||||
@ -281,11 +280,11 @@ class MTP_DEVICE(MTPDeviceBase):
|
||||
@synchronous
|
||||
def device_debug_info(self):
|
||||
ans = self.get_gui_name()
|
||||
ans += '\nSerial number: %s'%self.current_serial_num
|
||||
ans += '\nManufacturer: %s'%self.dev.manufacturer_name
|
||||
ans += '\nModel: %s'%self.dev.model_name
|
||||
ans += '\nids: %s'%(self.dev.ids,)
|
||||
ans += '\nDevice version: %s'%self.dev.device_version
|
||||
ans += f'\nSerial number: {self.current_serial_num}'
|
||||
ans += f'\nManufacturer: {self.dev.manufacturer_name}'
|
||||
ans += f'\nModel: {self.dev.model_name}'
|
||||
ans += f'\nids: {self.dev.ids}'
|
||||
ans += f'\nDevice version: {self.dev.device_version}'
|
||||
ans += '\nStorage:\n'
|
||||
storage = sorted_storage(self.dev.storage_info)
|
||||
ans += pprint.pformat(storage)
|
||||
@ -306,7 +305,7 @@ class MTP_DEVICE(MTPDeviceBase):
|
||||
path = tuple(reversed(path))
|
||||
ok = not self.is_folder_ignored(self._currently_getting_sid, path)
|
||||
if not ok:
|
||||
debug('Ignored object: %s' % '/'.join(path))
|
||||
debug('Ignored object: {}'.format('/'.join(path)))
|
||||
return ok
|
||||
|
||||
@property
|
||||
@ -335,14 +334,10 @@ class MTP_DEVICE(MTPDeviceBase):
|
||||
all_items.extend(items), all_errs.extend(errs)
|
||||
if not all_items and all_errs:
|
||||
raise DeviceError(
|
||||
'Failed to read filesystem from %s with errors: %s'
|
||||
%(self.current_friendly_name,
|
||||
self.format_errorstack(all_errs)))
|
||||
f'Failed to read filesystem from {self.current_friendly_name} with errors: {self.format_errorstack(all_errs)}')
|
||||
if all_errs:
|
||||
prints('There were some errors while getting the '
|
||||
' filesystem from %s: %s'%(
|
||||
self.current_friendly_name,
|
||||
self.format_errorstack(all_errs)))
|
||||
f' filesystem from {self.current_friendly_name}: {self.format_errorstack(all_errs)}')
|
||||
self._filesystem_cache = FilesystemCache(storage, all_items)
|
||||
debug('Filesystem metadata loaded in %g seconds (%d objects)'%(
|
||||
time.time()-st, len(self._filesystem_cache)))
|
||||
@ -377,7 +372,7 @@ class MTP_DEVICE(MTPDeviceBase):
|
||||
@synchronous
|
||||
def create_folder(self, parent, name):
|
||||
if not parent.is_folder:
|
||||
raise ValueError('%s is not a folder'%(parent.full_path,))
|
||||
raise ValueError(f'{parent.full_path} is not a folder')
|
||||
e = parent.folder_named(name)
|
||||
if e is not None:
|
||||
return e
|
||||
@ -387,21 +382,18 @@ class MTP_DEVICE(MTPDeviceBase):
|
||||
ans, errs = self.dev.create_folder(sid, pid, name)
|
||||
if ans is None:
|
||||
raise DeviceError(
|
||||
'Failed to create folder named %s in %s with error: %s'%
|
||||
(name, parent.full_path, self.format_errorstack(errs)))
|
||||
f'Failed to create folder named {name} in {parent.full_path} with error: {self.format_errorstack(errs)}')
|
||||
return parent.add_child(ans)
|
||||
|
||||
@synchronous
|
||||
def put_file(self, parent, name, stream, size, callback=None, replace=True):
|
||||
e = parent.folder_named(name)
|
||||
if e is not None:
|
||||
raise ValueError('Cannot upload file, %s already has a folder named: %s'%(
|
||||
parent.full_path, e.name))
|
||||
raise ValueError(f'Cannot upload file, {parent.full_path} already has a folder named: {e.name}')
|
||||
e = parent.file_named(name)
|
||||
if e is not None:
|
||||
if not replace:
|
||||
raise ValueError('Cannot upload file %s, it already exists'%(
|
||||
e.full_path,))
|
||||
raise ValueError(f'Cannot upload file {e.full_path}, it already exists')
|
||||
self.delete_file_or_folder(e)
|
||||
sid, pid = parent.storage_id, parent.object_id
|
||||
if pid == sid:
|
||||
@ -409,21 +401,19 @@ class MTP_DEVICE(MTPDeviceBase):
|
||||
|
||||
ans, errs = self.dev.put_file(sid, pid, name, stream, size, callback)
|
||||
if ans is None:
|
||||
raise DeviceError('Failed to upload file named: %s to %s: %s'
|
||||
%(name, parent.full_path, self.format_errorstack(errs)))
|
||||
raise DeviceError(f'Failed to upload file named: {name} to {parent.full_path}: {self.format_errorstack(errs)}')
|
||||
return parent.add_child(ans)
|
||||
|
||||
@synchronous
|
||||
def get_mtp_file(self, f, stream=None, callback=None):
|
||||
if f.is_folder:
|
||||
raise ValueError('%s if a folder'%(f.full_path,))
|
||||
raise ValueError(f'{f.full_path} if a folder')
|
||||
set_name = stream is None
|
||||
if stream is None:
|
||||
stream = SpooledTemporaryFile(5*1024*1024, '_wpd_receive_file.dat')
|
||||
ok, errs = self.dev.get_file(f.object_id, stream, callback)
|
||||
if not ok:
|
||||
raise DeviceError('Failed to get file: %s with errors: %s'%(
|
||||
f.full_path, self.format_errorstack(errs)))
|
||||
raise DeviceError(f'Failed to get file: {f.full_path} with errors: {self.format_errorstack(errs)}')
|
||||
stream.seek(0)
|
||||
if set_name:
|
||||
stream.name = f.name
|
||||
@ -476,18 +466,14 @@ class MTP_DEVICE(MTPDeviceBase):
|
||||
if obj.deleted:
|
||||
return
|
||||
if not obj.can_delete:
|
||||
raise ValueError('Cannot delete %s as deletion not allowed'%
|
||||
(obj.full_path,))
|
||||
raise ValueError(f'Cannot delete {obj.full_path} as deletion not allowed')
|
||||
if obj.is_system:
|
||||
raise ValueError('Cannot delete %s as it is a system object'%
|
||||
(obj.full_path,))
|
||||
raise ValueError(f'Cannot delete {obj.full_path} as it is a system object')
|
||||
if obj.files or obj.folders:
|
||||
raise ValueError('Cannot delete %s as it is not empty'%
|
||||
(obj.full_path,))
|
||||
raise ValueError(f'Cannot delete {obj.full_path} as it is not empty')
|
||||
parent = obj.parent
|
||||
ok, errs = self.dev.delete_object(obj.object_id)
|
||||
if not ok:
|
||||
raise DeviceError('Failed to delete %s with error: %s'%
|
||||
(obj.full_path, self.format_errorstack(errs)))
|
||||
raise DeviceError(f'Failed to delete {obj.full_path} with error: {self.format_errorstack(errs)}')
|
||||
parent.remove_child(obj)
|
||||
return parent
|
||||
|
@ -34,7 +34,7 @@ class MTPDetect:
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
ipath = os.path.join(self.base, '{0}-*/{0}-*/interface'.format(dev.busnum))
|
||||
ipath = os.path.join(self.base, f'{dev.busnum}-*/{dev.busnum}-*/interface')
|
||||
for x in glob.glob(ipath):
|
||||
raw = read(x)
|
||||
if not raw or raw.strip() != b'MTP':
|
||||
@ -44,8 +44,8 @@ class MTPDetect:
|
||||
try:
|
||||
if raw and int(raw) == dev.devnum:
|
||||
if debug is not None:
|
||||
debug('Unknown device {} claims to be an MTP device'
|
||||
.format(dev))
|
||||
debug(f'Unknown device {dev} claims to be an MTP device'
|
||||
)
|
||||
return True
|
||||
except (ValueError, TypeError):
|
||||
continue
|
||||
|
@ -258,7 +258,7 @@ class MTP_DEVICE(MTPDeviceBase):
|
||||
path = tuple(reversed(path))
|
||||
ok = not self.is_folder_ignored(self._currently_getting_sid, path)
|
||||
if not ok:
|
||||
debug('Ignored object: %s' % '/'.join(path))
|
||||
debug('Ignored object: {}'.format('/'.join(path)))
|
||||
return ok
|
||||
|
||||
@property
|
||||
@ -330,19 +330,18 @@ class MTP_DEVICE(MTPDeviceBase):
|
||||
self.dev = self.wpd.Device(connected_device)
|
||||
except self.wpd.WPDError as e:
|
||||
self.blacklisted_devices.add(connected_device)
|
||||
raise OpenFailed('Failed to open %s with error: %s'%(
|
||||
connected_device, as_unicode(e)))
|
||||
raise OpenFailed(f'Failed to open {connected_device} with error: {as_unicode(e)}')
|
||||
devdata = self.dev.data
|
||||
storage = [s for s in devdata.get('storage', []) if s.get('rw', False)]
|
||||
if not storage:
|
||||
self.blacklisted_devices.add(connected_device)
|
||||
raise OpenFailed('No storage found for device %s'%(connected_device,))
|
||||
raise OpenFailed(f'No storage found for device {connected_device}')
|
||||
snum = devdata.get('serial_number', None)
|
||||
if snum in self.prefs.get('blacklist', []):
|
||||
self.blacklisted_devices.add(connected_device)
|
||||
self.dev = None
|
||||
raise BlacklistedDevice(
|
||||
'The %s device has been blacklisted by the user'%(connected_device,))
|
||||
f'The {connected_device} device has been blacklisted by the user')
|
||||
|
||||
storage = sorted_storage(storage)
|
||||
|
||||
@ -435,7 +434,7 @@ class MTP_DEVICE(MTPDeviceBase):
|
||||
@same_thread
|
||||
def get_mtp_file(self, f, stream=None, callback=None):
|
||||
if f.is_folder:
|
||||
raise ValueError('%s if a folder'%(f.full_path,))
|
||||
raise ValueError(f'{f.full_path} if a folder')
|
||||
set_name = stream is None
|
||||
if stream is None:
|
||||
stream = SpooledTemporaryFile(5*1024*1024, '_wpd_receive_file.dat')
|
||||
@ -446,8 +445,7 @@ class MTP_DEVICE(MTPDeviceBase):
|
||||
time.sleep(2)
|
||||
self.dev.get_file(f.object_id, stream, callback)
|
||||
except Exception as e:
|
||||
raise DeviceError('Failed to fetch the file %s with error: %s'%
|
||||
(f.full_path, as_unicode(e)))
|
||||
raise DeviceError(f'Failed to fetch the file {f.full_path} with error: {as_unicode(e)}')
|
||||
stream.seek(0)
|
||||
if set_name:
|
||||
stream.name = f.name
|
||||
@ -456,7 +454,7 @@ class MTP_DEVICE(MTPDeviceBase):
|
||||
@same_thread
|
||||
def create_folder(self, parent, name):
|
||||
if not parent.is_folder:
|
||||
raise ValueError('%s is not a folder'%(parent.full_path,))
|
||||
raise ValueError(f'{parent.full_path} is not a folder')
|
||||
e = parent.folder_named(name)
|
||||
if e is not None:
|
||||
return e
|
||||
@ -472,14 +470,11 @@ class MTP_DEVICE(MTPDeviceBase):
|
||||
if obj.deleted:
|
||||
return
|
||||
if not obj.can_delete:
|
||||
raise ValueError('Cannot delete %s as deletion not allowed'%
|
||||
(obj.full_path,))
|
||||
raise ValueError(f'Cannot delete {obj.full_path} as deletion not allowed')
|
||||
if obj.is_system:
|
||||
raise ValueError('Cannot delete %s as it is a system object'%
|
||||
(obj.full_path,))
|
||||
raise ValueError(f'Cannot delete {obj.full_path} as it is a system object')
|
||||
if obj.files or obj.folders:
|
||||
raise ValueError('Cannot delete %s as it is not empty'%
|
||||
(obj.full_path,))
|
||||
raise ValueError(f'Cannot delete {obj.full_path} as it is not empty')
|
||||
parent = obj.parent
|
||||
self.dev.delete_object(obj.object_id)
|
||||
parent.remove_child(obj)
|
||||
@ -489,13 +484,11 @@ class MTP_DEVICE(MTPDeviceBase):
|
||||
def put_file(self, parent, name, stream, size, callback=None, replace=True):
|
||||
e = parent.folder_named(name)
|
||||
if e is not None:
|
||||
raise ValueError('Cannot upload file, %s already has a folder named: %s'%(
|
||||
parent.full_path, e.name))
|
||||
raise ValueError(f'Cannot upload file, {parent.full_path} already has a folder named: {e.name}')
|
||||
e = parent.file_named(name)
|
||||
if e is not None:
|
||||
if not replace:
|
||||
raise ValueError('Cannot upload file %s, it already exists'%(
|
||||
e.full_path,))
|
||||
raise ValueError(f'Cannot upload file {e.full_path}, it already exists')
|
||||
self.delete_file_or_folder(e)
|
||||
sid, pid = parent.storage_id, parent.object_id
|
||||
ans = self.dev.put_file(pid, name, stream, size, callback)
|
||||
|
@ -70,7 +70,7 @@ class NOOK(USBMS):
|
||||
cover.save(data, 'JPEG')
|
||||
coverdata = data.getvalue()
|
||||
|
||||
with open('%s.jpg' % os.path.join(path, filename), 'wb') as coverfile:
|
||||
with open(f'{os.path.join(path, filename)}.jpg', 'wb') as coverfile:
|
||||
coverfile.write(coverdata)
|
||||
fsync(coverfile)
|
||||
|
||||
|
@ -214,11 +214,11 @@ class PALADIN(USBMS):
|
||||
import traceback
|
||||
tb = traceback.format_exc()
|
||||
raise DeviceError((('The Paladin database is corrupted. '
|
||||
' Delete the file %s on your reader and then disconnect '
|
||||
f' Delete the file {dbpath} on your reader and then disconnect '
|
||||
' reconnect it. If you are using an SD card, you '
|
||||
' should delete the file on the card as well. Note that '
|
||||
' deleting this file will cause your reader to forget '
|
||||
' any notes/highlights, etc.')%dbpath)+' Underlying error:'
|
||||
' any notes/highlights, etc.'))+' Underlying error:'
|
||||
'\n'+tb)
|
||||
|
||||
def get_database_min_id(self, source_id):
|
||||
@ -261,11 +261,11 @@ class PALADIN(USBMS):
|
||||
import traceback
|
||||
tb = traceback.format_exc()
|
||||
raise DeviceError((('The Paladin database is corrupted. '
|
||||
' Delete the file %s on your reader and then disconnect '
|
||||
f' Delete the file {dbpath} on your reader and then disconnect '
|
||||
' reconnect it. If you are using an SD card, you '
|
||||
' should delete the file on the card as well. Note that '
|
||||
' deleting this file will cause your reader to forget '
|
||||
' any notes/highlights, etc.')%dbpath)+' Underlying error:'
|
||||
' any notes/highlights, etc.'))+' Underlying error:'
|
||||
'\n'+tb)
|
||||
|
||||
# Get the books themselves, but keep track of any that are less than the minimum.
|
||||
@ -398,11 +398,11 @@ class PALADIN(USBMS):
|
||||
import traceback
|
||||
tb = traceback.format_exc()
|
||||
raise DeviceError((('The Paladin database is corrupted. '
|
||||
' Delete the file %s on your reader and then disconnect '
|
||||
f' Delete the file {dbpath} on your reader and then disconnect '
|
||||
' reconnect it. If you are using an SD card, you '
|
||||
' should delete the file on the card as well. Note that '
|
||||
' deleting this file will cause your reader to forget '
|
||||
' any notes/highlights, etc.')%dbpath)+' Underlying error:'
|
||||
' any notes/highlights, etc.'))+' Underlying error:'
|
||||
'\n'+tb)
|
||||
|
||||
db_collections = {}
|
||||
|
@ -170,7 +170,7 @@ class PRS505(USBMS):
|
||||
def filename_callback(self, fname, mi):
|
||||
if getattr(mi, 'application_id', None) is not None:
|
||||
base = fname.rpartition('.')[0]
|
||||
suffix = '_%s'%mi.application_id
|
||||
suffix = f'_{mi.application_id}'
|
||||
if not base.endswith(suffix):
|
||||
fname = base + suffix + '.' + fname.rpartition('.')[-1]
|
||||
return fname
|
||||
@ -183,7 +183,7 @@ class PRS505(USBMS):
|
||||
('card_a', CACHE_XML, CACHE_EXT, 1),
|
||||
('card_b', CACHE_XML, CACHE_EXT, 2)
|
||||
]:
|
||||
prefix = getattr(self, '_%s_prefix'%prefix)
|
||||
prefix = getattr(self, f'_{prefix}_prefix')
|
||||
if prefix is not None and os.path.exists(prefix):
|
||||
paths[source_id] = os.path.join(prefix, *(path.split('/')))
|
||||
ext_paths[source_id] = os.path.join(prefix, *(ext_path.split('/')))
|
||||
@ -298,4 +298,4 @@ class PRS505(USBMS):
|
||||
cpath = os.path.join(thumbnail_dir, 'main_thumbnail.jpg')
|
||||
with open(cpath, 'wb') as f:
|
||||
f.write(metadata.thumbnail[-1])
|
||||
debug_print('Cover uploaded to: %r'%cpath)
|
||||
debug_print(f'Cover uploaded to: {cpath!r}')
|
||||
|
@ -103,8 +103,8 @@ class XMLCache:
|
||||
for source_id, path in paths.items():
|
||||
if source_id == 0:
|
||||
if not os.path.exists(path):
|
||||
raise DeviceError(('The SONY XML cache %r does not exist. Try'
|
||||
' disconnecting and reconnecting your reader.')%repr(path))
|
||||
raise DeviceError(f'The SONY XML cache {repr(path)!r} does not exist. Try'
|
||||
' disconnecting and reconnecting your reader.')
|
||||
with open(path, 'rb') as f:
|
||||
raw = f.read()
|
||||
else:
|
||||
@ -117,8 +117,8 @@ class XMLCache:
|
||||
xml_to_unicode(raw, strip_encoding_pats=True, assume_utf8=True, verbose=DEBUG)[0]
|
||||
)
|
||||
if self.roots[source_id] is None:
|
||||
raise Exception(('The SONY database at %r is corrupted. Try '
|
||||
' disconnecting and reconnecting your reader.')%path)
|
||||
raise Exception(f'The SONY database at {path!r} is corrupted. Try '
|
||||
' disconnecting and reconnecting your reader.')
|
||||
|
||||
self.ext_paths, self.ext_roots = {}, {}
|
||||
for source_id, path in ext_paths.items():
|
||||
@ -265,7 +265,7 @@ class XMLCache:
|
||||
if title in self._playlist_to_playlist_id_map[bl_idx]:
|
||||
return self._playlist_to_playlist_id_map[bl_idx][title]
|
||||
debug_print('Creating playlist:', title)
|
||||
ans = root.makeelement('{%s}playlist'%self.namespaces[bl_idx],
|
||||
ans = root.makeelement(f'{{{self.namespaces[bl_idx]}}}playlist',
|
||||
nsmap=root.nsmap, attrib={
|
||||
'uuid' : uuid(),
|
||||
'title': title,
|
||||
@ -303,11 +303,11 @@ class XMLCache:
|
||||
if id_ in idmap:
|
||||
item.set('id', idmap[id_])
|
||||
if DEBUG:
|
||||
debug_print('Remapping id %s to %s'%(id_, idmap[id_]))
|
||||
debug_print(f'Remapping id {id_} to {idmap[id_]}')
|
||||
|
||||
def ensure_media_xml_base_ids(root):
|
||||
for num, tag in enumerate(('library', 'watchSpecial')):
|
||||
for x in root.xpath('//*[local-name()="%s"]'%tag):
|
||||
for x in root.xpath(f'//*[local-name()="{tag}"]'):
|
||||
x.set('id', str(num))
|
||||
|
||||
def rebase_ids(root, base, sourceid, pl_sourceid):
|
||||
@ -538,7 +538,7 @@ class XMLCache:
|
||||
# add the ids that get_collections didn't know about.
|
||||
for id_ in ids + extra_ids:
|
||||
item = playlist.makeelement(
|
||||
'{%s}item'%self.namespaces[bl_index],
|
||||
f'{{{self.namespaces[bl_index]}}}item',
|
||||
nsmap=playlist.nsmap, attrib={'id':id_})
|
||||
playlist.append(item)
|
||||
|
||||
@ -569,14 +569,14 @@ class XMLCache:
|
||||
attrib = {
|
||||
'page':'0', 'part':'0','pageOffset':'0','scale':'0',
|
||||
'id':str(id_), 'sourceid':'1', 'path':lpath}
|
||||
ans = root.makeelement('{%s}text'%namespace, attrib=attrib, nsmap=root.nsmap)
|
||||
ans = root.makeelement(f'{{{namespace}}}text', attrib=attrib, nsmap=root.nsmap)
|
||||
root.append(ans)
|
||||
return ans
|
||||
|
||||
def create_ext_text_record(self, root, bl_id, lpath, thumbnail):
|
||||
namespace = root.nsmap[None]
|
||||
attrib = {'path': lpath}
|
||||
ans = root.makeelement('{%s}text'%namespace, attrib=attrib,
|
||||
ans = root.makeelement(f'{{{namespace}}}text', attrib=attrib,
|
||||
nsmap=root.nsmap)
|
||||
ans.tail = '\n'
|
||||
if len(root) > 0:
|
||||
@ -586,7 +586,7 @@ class XMLCache:
|
||||
root.append(ans)
|
||||
if thumbnail and thumbnail[-1]:
|
||||
ans.text = '\n' + '\t\t'
|
||||
t = root.makeelement('{%s}thumbnail'%namespace,
|
||||
t = root.makeelement(f'{{{namespace}}}thumbnail',
|
||||
attrib={'width':str(thumbnail[0]), 'height':str(thumbnail[1])},
|
||||
nsmap=root.nsmap)
|
||||
t.text = 'main_thumbnail.jpg'
|
||||
@ -757,7 +757,7 @@ class XMLCache:
|
||||
return m
|
||||
|
||||
def book_by_lpath(self, lpath, root):
|
||||
matches = root.xpath('//*[local-name()="text" and @path="%s"]'%lpath)
|
||||
matches = root.xpath(f'//*[local-name()="text" and @path="{lpath}"]')
|
||||
if matches:
|
||||
return matches[0]
|
||||
|
||||
@ -782,7 +782,7 @@ class XMLCache:
|
||||
for i in self.roots:
|
||||
for c in ('library', 'text', 'image', 'playlist', 'thumbnail',
|
||||
'watchSpecial'):
|
||||
matches = self.record_roots[i].xpath('//*[local-name()="%s"]'%c)
|
||||
matches = self.record_roots[i].xpath(f'//*[local-name()="{c}"]')
|
||||
if matches:
|
||||
e = matches[0]
|
||||
self.namespaces[i] = e.nsmap[e.prefix]
|
||||
|
@ -316,11 +316,11 @@ class PRST1(USBMS):
|
||||
import traceback
|
||||
tb = traceback.format_exc()
|
||||
raise DeviceError((('The SONY database is corrupted. '
|
||||
' Delete the file %s on your reader and then disconnect '
|
||||
f' Delete the file {dbpath} on your reader and then disconnect '
|
||||
' reconnect it. If you are using an SD card, you '
|
||||
' should delete the file on the card as well. Note that '
|
||||
' deleting this file will cause your reader to forget '
|
||||
' any notes/highlights, etc.')%dbpath)+' Underlying error:'
|
||||
' any notes/highlights, etc.'))+' Underlying error:'
|
||||
'\n'+tb)
|
||||
|
||||
def get_lastrowid(self, cursor):
|
||||
@ -374,11 +374,11 @@ class PRST1(USBMS):
|
||||
import traceback
|
||||
tb = traceback.format_exc()
|
||||
raise DeviceError((('The SONY database is corrupted. '
|
||||
' Delete the file %s on your reader and then disconnect '
|
||||
f' Delete the file {dbpath} on your reader and then disconnect '
|
||||
' reconnect it. If you are using an SD card, you '
|
||||
' should delete the file on the card as well. Note that '
|
||||
' deleting this file will cause your reader to forget '
|
||||
' any notes/highlights, etc.')%dbpath)+' Underlying error:'
|
||||
' any notes/highlights, etc.'))+' Underlying error:'
|
||||
'\n'+tb)
|
||||
|
||||
# Get the books themselves, but keep track of any that are less than the minimum.
|
||||
@ -546,11 +546,11 @@ class PRST1(USBMS):
|
||||
import traceback
|
||||
tb = traceback.format_exc()
|
||||
raise DeviceError((('The SONY database is corrupted. '
|
||||
' Delete the file %s on your reader and then disconnect '
|
||||
f' Delete the file {dbpath} on your reader and then disconnect '
|
||||
' reconnect it. If you are using an SD card, you '
|
||||
' should delete the file on the card as well. Note that '
|
||||
' deleting this file will cause your reader to forget '
|
||||
' any notes/highlights, etc.')%dbpath)+' Underlying error:'
|
||||
' any notes/highlights, etc.'))+' Underlying error:'
|
||||
'\n'+tb)
|
||||
|
||||
db_collections = {}
|
||||
|
@ -45,11 +45,9 @@ class USBDevice(_USBDevice):
|
||||
return self
|
||||
|
||||
def __repr__(self):
|
||||
return ('USBDevice(busnum=%s, devnum=%s, '
|
||||
'vendor_id=0x%04x, product_id=0x%04x, bcd=0x%04x, '
|
||||
'manufacturer=%s, product=%s, serial=%s)')%(
|
||||
self.busnum, self.devnum, self.vendor_id, self.product_id,
|
||||
self.bcd, self.manufacturer, self.product, self.serial)
|
||||
return (f'USBDevice(busnum={self.busnum}, devnum={self.devnum}, '
|
||||
f'vendor_id=0x{self.vendor_id:04x}, product_id=0x{self.product_id:04x}, bcd=0x{self.bcd:04x}, '
|
||||
f'manufacturer={self.manufacturer}, product={self.product}, serial={self.serial})')
|
||||
|
||||
__str__ = __repr__
|
||||
__unicode__ = __repr__
|
||||
|
@ -402,8 +402,7 @@ class SMART_DEVICE_APP(DeviceConfig, DevicePlugin):
|
||||
return
|
||||
total_elapsed = time.time() - self.debug_start_time
|
||||
elapsed = time.time() - self.debug_time
|
||||
print('SMART_DEV (%7.2f:%7.3f) %s'%(total_elapsed, elapsed,
|
||||
inspect.stack()[1][3]), end='')
|
||||
print(f'SMART_DEV ({total_elapsed:7.2f}:{elapsed:7.3f}) {inspect.stack()[1][3]}', end='')
|
||||
for a in args:
|
||||
try:
|
||||
if isinstance(a, dict):
|
||||
@ -712,7 +711,7 @@ class SMART_DEVICE_APP(DeviceConfig, DevicePlugin):
|
||||
wait_for_response=self.can_send_ok_to_sendbook)
|
||||
if self.can_send_ok_to_sendbook:
|
||||
if opcode == 'ERROR':
|
||||
raise UserFeedback(msg='Sending book %s to device failed' % lpath,
|
||||
raise UserFeedback(msg=f'Sending book {lpath} to device failed',
|
||||
details=result.get('message', ''),
|
||||
level=UserFeedback.ERROR)
|
||||
return
|
||||
@ -1493,7 +1492,7 @@ class SMART_DEVICE_APP(DeviceConfig, DevicePlugin):
|
||||
book = SDBook(self.PREFIX, lpath, other=mdata)
|
||||
length, lpath = self._put_file(infile, lpath, book, i, len(files))
|
||||
if length < 0:
|
||||
raise ControlError(desc='Sending book %s to device failed' % lpath)
|
||||
raise ControlError(desc=f'Sending book {lpath} to device failed')
|
||||
paths.append((lpath, length))
|
||||
# No need to deal with covers. The client will get the thumbnails
|
||||
# in the mi structure
|
||||
|
@ -256,7 +256,7 @@ class Device(DeviceConfig, DevicePlugin):
|
||||
if dl in dlmap['readonly_drives']:
|
||||
filtered.add(dl)
|
||||
if debug:
|
||||
prints('Ignoring the drive %s as it is readonly' % dl)
|
||||
prints(f'Ignoring the drive {dl} as it is readonly')
|
||||
elif self.windows_filter_pnp_id(pnp_id):
|
||||
filtered.add(dl)
|
||||
if debug:
|
||||
@ -264,7 +264,7 @@ class Device(DeviceConfig, DevicePlugin):
|
||||
elif not drive_is_ok(dl, debug=debug):
|
||||
filtered.add(dl)
|
||||
if debug:
|
||||
prints('Ignoring the drive %s because failed to get free space for it' % dl)
|
||||
prints(f'Ignoring the drive {dl} because failed to get free space for it')
|
||||
dlmap['drive_letters'] = [dl for dl in dlmap['drive_letters'] if dl not in filtered]
|
||||
|
||||
if not dlmap['drive_letters']:
|
||||
|
@ -57,7 +57,7 @@ class DeviceConfig:
|
||||
@classmethod
|
||||
def _config(cls):
|
||||
name = cls._config_base_name()
|
||||
c = Config('device_drivers_%s' % name, _('settings for device drivers'))
|
||||
c = Config(f'device_drivers_{name}', _('settings for device drivers'))
|
||||
c.add_opt('format_map', default=cls.FORMATS,
|
||||
help=_('Ordered list of formats the device will accept'))
|
||||
c.add_opt('use_subdirs', default=cls.SUPPORTS_SUB_DIRS_DEFAULT,
|
||||
|
@ -47,7 +47,7 @@ def safe_walk(top, topdown=True, onerror=None, followlinks=False, maxdepth=128):
|
||||
try:
|
||||
name = name.decode(filesystem_encoding)
|
||||
except UnicodeDecodeError:
|
||||
debug_print('Skipping undecodeable file: %r' % name)
|
||||
debug_print(f'Skipping undecodeable file: {name!r}')
|
||||
continue
|
||||
if isdir(join(top, name)):
|
||||
dirs.append(name)
|
||||
|
@ -60,7 +60,7 @@ def build_template_regexp(template):
|
||||
template = template.rpartition('/')[2]
|
||||
return re.compile(re.sub(r'{([^}]*)}', f, template) + r'([_\d]*$)')
|
||||
except:
|
||||
prints('Failed to parse template: %r'%template)
|
||||
prints(f'Failed to parse template: {template!r}')
|
||||
template = '{title} - {authors}'
|
||||
return re.compile(re.sub(r'{([^}]*)}', f, template) + r'([_\d]*$)')
|
||||
|
||||
|
@ -69,8 +69,8 @@ class GUID(Structure):
|
||||
self.data1,
|
||||
self.data2,
|
||||
self.data3,
|
||||
''.join(['%02x' % d for d in self.data4[:2]]),
|
||||
''.join(['%02x' % d for d in self.data4[2:]]),
|
||||
''.join([f'{d:02x}' for d in self.data4[:2]]),
|
||||
''.join([f'{d:02x}' for d in self.data4[2:]]),
|
||||
)
|
||||
|
||||
|
||||
@ -394,7 +394,7 @@ def cwrap(name, restype, *argtypes, **kw):
|
||||
lib = cfgmgr if name.startswith('CM') else setupapi
|
||||
func = prototype((name, kw.pop('lib', lib)))
|
||||
if kw:
|
||||
raise TypeError('Unknown keyword arguments: %r' % kw)
|
||||
raise TypeError(f'Unknown keyword arguments: {kw!r}')
|
||||
if errcheck is not None:
|
||||
func.errcheck = errcheck
|
||||
return func
|
||||
@ -414,7 +414,7 @@ def bool_err_check(result, func, args):
|
||||
|
||||
def config_err_check(result, func, args):
|
||||
if result != CR_CODES['CR_SUCCESS']:
|
||||
raise WinError(result, 'The cfgmgr32 function failed with err: %s' % CR_CODE_NAMES.get(result, result))
|
||||
raise WinError(result, f'The cfgmgr32 function failed with err: {CR_CODE_NAMES.get(result, result)}')
|
||||
return args
|
||||
|
||||
|
||||
@ -575,7 +575,7 @@ def get_device_id(devinst, buf=None):
|
||||
buf = create_unicode_buffer(devid_size.value)
|
||||
continue
|
||||
if ret != CR_CODES['CR_SUCCESS']:
|
||||
raise WinError(ret, 'The cfgmgr32 function failed with err: %s' % CR_CODE_NAMES.get(ret, ret))
|
||||
raise WinError(ret, f'The cfgmgr32 function failed with err: {CR_CODE_NAMES.get(ret, ret)}')
|
||||
break
|
||||
return wstring_at(buf), buf
|
||||
|
||||
@ -610,7 +610,7 @@ def convert_registry_data(raw, size, dtype):
|
||||
if size == 0:
|
||||
return 0
|
||||
return cast(raw, POINTER(QWORD)).contents.value
|
||||
raise ValueError('Unsupported data type: %r' % dtype)
|
||||
raise ValueError(f'Unsupported data type: {dtype!r}')
|
||||
|
||||
|
||||
def get_device_registry_property(dev_list, p_devinfo, property_type=SPDRP_HARDWAREID, buf=None):
|
||||
@ -712,9 +712,8 @@ class USBDevice(_USBDevice):
|
||||
def r(x):
|
||||
if x is None:
|
||||
return 'None'
|
||||
return '0x%x' % x
|
||||
return 'USBDevice(vendor_id={} product_id={} bcd={} devid={} devinst={})'.format(
|
||||
r(self.vendor_id), r(self.product_id), r(self.bcd), self.devid, self.devinst)
|
||||
return f'0x{x:x}'
|
||||
return f'USBDevice(vendor_id={r(self.vendor_id)} product_id={r(self.product_id)} bcd={r(self.bcd)} devid={self.devid} devinst={self.devinst})'
|
||||
|
||||
|
||||
def parse_hex(x):
|
||||
@ -976,7 +975,7 @@ def get_device_string(hub_handle, device_port, index, buf=None, lang=0x409):
|
||||
data = cast(buf, PUSB_DESCRIPTOR_REQUEST).contents.Data
|
||||
sz, dtype = data.bLength, data.bType
|
||||
if dtype != 0x03:
|
||||
raise OSError(errno.EINVAL, 'Invalid datatype for string descriptor: 0x%x' % dtype)
|
||||
raise OSError(errno.EINVAL, f'Invalid datatype for string descriptor: 0x{dtype:x}')
|
||||
return buf, wstring_at(addressof(data.String), sz // 2).rstrip('\0')
|
||||
|
||||
|
||||
@ -996,7 +995,7 @@ def get_device_languages(hub_handle, device_port, buf=None):
|
||||
data = cast(buf, PUSB_DESCRIPTOR_REQUEST).contents.Data
|
||||
sz, dtype = data.bLength, data.bType
|
||||
if dtype != 0x03:
|
||||
raise OSError(errno.EINVAL, 'Invalid datatype for string descriptor: 0x%x' % dtype)
|
||||
raise OSError(errno.EINVAL, f'Invalid datatype for string descriptor: 0x{dtype:x}')
|
||||
data = cast(data.String, POINTER(USHORT*(sz//2)))
|
||||
return buf, list(filter(None, data.contents))
|
||||
|
||||
|
@ -245,7 +245,7 @@ def escape_xpath_attr(value):
|
||||
if x:
|
||||
q = "'" if '"' in x else '"'
|
||||
ans.append(q + x + q)
|
||||
return 'concat(%s)' % ', '.join(ans)
|
||||
return 'concat({})'.format(', '.join(ans))
|
||||
else:
|
||||
return "'%s'" % value
|
||||
return '"%s"' % value
|
||||
return f"'{value}'"
|
||||
return f'"{value}"'
|
||||
|
@ -55,7 +55,7 @@ class CHMReader(CHMFile):
|
||||
t.write(open(input, 'rb').read())
|
||||
input = t.name
|
||||
if not self.LoadCHM(input):
|
||||
raise CHMError("Unable to open CHM file '%s'"%(input,))
|
||||
raise CHMError(f"Unable to open CHM file '{input}'")
|
||||
self.log = log
|
||||
self.input_encoding = input_encoding
|
||||
self._sourcechm = input
|
||||
@ -188,7 +188,7 @@ class CHMReader(CHMFile):
|
||||
try:
|
||||
data = self.GetFile(path)
|
||||
except:
|
||||
self.log.exception('Failed to extract %s from CHM, ignoring'%path)
|
||||
self.log.exception(f'Failed to extract {path} from CHM, ignoring')
|
||||
continue
|
||||
if lpath.find(';') != -1:
|
||||
# fix file names with ";<junk>" at the end, see _reformat()
|
||||
@ -203,7 +203,7 @@ class CHMReader(CHMFile):
|
||||
pass
|
||||
except:
|
||||
if iswindows and len(lpath) > 250:
|
||||
self.log.warn('%r filename too long, skipping'%path)
|
||||
self.log.warn(f'{path!r} filename too long, skipping')
|
||||
continue
|
||||
raise
|
||||
|
||||
|
@ -119,7 +119,7 @@ def decompress(stream):
|
||||
txt = []
|
||||
stream.seek(0)
|
||||
if stream.read(9) != b'!!8-Bit!!':
|
||||
raise ValueError('File %s contains an invalid TCR header.' % stream.name)
|
||||
raise ValueError(f'File {stream.name} contains an invalid TCR header.')
|
||||
|
||||
# Codes that the file contents are broken down into.
|
||||
entries = []
|
||||
|
@ -371,8 +371,7 @@ def read_sr_patterns(path, log=None):
|
||||
try:
|
||||
re.compile(line)
|
||||
except:
|
||||
msg = 'Invalid regular expression: %r from file: %r'%(
|
||||
line, path)
|
||||
msg = f'Invalid regular expression: {line!r} from file: {path!r}'
|
||||
if log is not None:
|
||||
log.error(msg)
|
||||
raise SystemExit(1)
|
||||
|
@ -23,7 +23,7 @@ class CHMInput(InputFormatPlugin):
|
||||
from calibre.ebooks.chm.reader import CHMReader
|
||||
log.debug('Opening CHM file')
|
||||
rdr = CHMReader(chm_path, log, input_encoding=self.opts.input_encoding)
|
||||
log.debug('Extracting CHM to %s' % output_dir)
|
||||
log.debug(f'Extracting CHM to {output_dir}')
|
||||
rdr.extract_content(output_dir, debug_dump=debug_dump)
|
||||
self._chm_reader = rdr
|
||||
return rdr.hhc_path
|
||||
@ -46,8 +46,8 @@ class CHMInput(InputFormatPlugin):
|
||||
|
||||
# closing stream so CHM can be opened by external library
|
||||
stream.close()
|
||||
log.debug('tdir=%s' % tdir)
|
||||
log.debug('stream.name=%s' % stream.name)
|
||||
log.debug(f'tdir={tdir}')
|
||||
log.debug(f'stream.name={stream.name}')
|
||||
debug_dump = False
|
||||
odi = options.debug_pipeline
|
||||
if odi:
|
||||
|
@ -99,10 +99,9 @@ class ComicInput(InputFormatPlugin):
|
||||
comics = []
|
||||
with CurrentDir(tdir):
|
||||
if not os.path.exists('comics.txt'):
|
||||
raise ValueError((
|
||||
'%s is not a valid comic collection'
|
||||
raise ValueError(
|
||||
f'{stream.name} is not a valid comic collection'
|
||||
' no comics.txt was found in the file')
|
||||
%stream.name)
|
||||
with open('comics.txt', 'rb') as f:
|
||||
raw = f.read()
|
||||
if raw.startswith(codecs.BOM_UTF16_BE):
|
||||
@ -125,7 +124,7 @@ class ComicInput(InputFormatPlugin):
|
||||
if os.access(fname, os.R_OK):
|
||||
comics.append([title, fname])
|
||||
if not comics:
|
||||
raise ValueError('%s has no comics'%stream.name)
|
||||
raise ValueError(f'{stream.name} has no comics')
|
||||
return comics
|
||||
|
||||
def get_pages(self, comic, tdir2):
|
||||
@ -135,12 +134,11 @@ class ComicInput(InputFormatPlugin):
|
||||
verbose=self.opts.verbose)
|
||||
thumbnail = None
|
||||
if not new_pages:
|
||||
raise ValueError('Could not find any pages in the comic: %s'
|
||||
%comic)
|
||||
raise ValueError(f'Could not find any pages in the comic: {comic}')
|
||||
if self.opts.no_process:
|
||||
n2 = []
|
||||
for i, page in enumerate(new_pages):
|
||||
n2.append(os.path.join(tdir2, '{} - {}' .format(i, os.path.basename(page))))
|
||||
n2.append(os.path.join(tdir2, f'{i} - {os.path.basename(page)}'))
|
||||
shutil.copyfile(page, n2[-1])
|
||||
new_pages = n2
|
||||
else:
|
||||
@ -152,8 +150,7 @@ class ComicInput(InputFormatPlugin):
|
||||
for f in failures:
|
||||
self.log.warning('\t', f)
|
||||
if not new_pages:
|
||||
raise ValueError('Could not find any valid pages in comic: %s'
|
||||
% comic)
|
||||
raise ValueError(f'Could not find any valid pages in comic: {comic}')
|
||||
thumbnail = os.path.join(tdir2,
|
||||
'thumbnail.'+self.opts.output_format.lower())
|
||||
if not os.access(thumbnail, os.R_OK):
|
||||
@ -193,7 +190,7 @@ class ComicInput(InputFormatPlugin):
|
||||
comics.append((title, pages, wrappers))
|
||||
|
||||
if not comics:
|
||||
raise ValueError('No comic pages found in %s'%stream.name)
|
||||
raise ValueError(f'No comic pages found in {stream.name}')
|
||||
|
||||
mi = MetaInformation(os.path.basename(stream.name).rpartition('.')[0],
|
||||
[_('Unknown')])
|
||||
@ -299,8 +296,8 @@ class ComicInput(InputFormatPlugin):
|
||||
|
||||
pages = '\n'.join(page(i, src) for i, src in enumerate(pages))
|
||||
base = os.path.dirname(pages[0])
|
||||
wrapper = '''
|
||||
<html xmlns="{}">
|
||||
wrapper = f'''
|
||||
<html xmlns="{XHTML_NS}">
|
||||
<head>
|
||||
<meta charset="utf-8"/>
|
||||
<style type="text/css">
|
||||
@ -317,10 +314,10 @@ class ComicInput(InputFormatPlugin):
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
{}
|
||||
{pages}
|
||||
</body>
|
||||
</html>
|
||||
'''.format(XHTML_NS, pages)
|
||||
'''
|
||||
path = os.path.join(base, cdir, 'wrapper.xhtml')
|
||||
with open(path, 'wb') as f:
|
||||
f.write(wrapper.encode('utf-8'))
|
||||
|
@ -281,7 +281,7 @@ class EPUBInput(InputFormatPlugin):
|
||||
path = getattr(stream, 'name', 'stream')
|
||||
|
||||
if opf is None:
|
||||
raise ValueError('%s is not a valid EPUB file (could not find opf)'%path)
|
||||
raise ValueError(f'{path} is not a valid EPUB file (could not find opf)')
|
||||
|
||||
opf = os.path.relpath(opf, os.getcwd())
|
||||
parts = os.path.split(opf)
|
||||
@ -369,7 +369,7 @@ class EPUBInput(InputFormatPlugin):
|
||||
root = parse(raw, log=log)
|
||||
ncx = safe_xml_fromstring('<ncx xmlns="http://www.daisy.org/z3986/2005/ncx/" version="2005-1" xml:lang="eng"><navMap/></ncx>')
|
||||
navmap = ncx[0]
|
||||
et = '{%s}type' % EPUB_NS
|
||||
et = f'{{{EPUB_NS}}}type'
|
||||
bn = os.path.basename(nav_path)
|
||||
|
||||
def add_from_li(li, parent):
|
||||
|
@ -335,7 +335,7 @@ class EPUBOutput(OutputFormatPlugin):
|
||||
|
||||
key = re.sub(r'[^a-fA-F0-9]', '', uuid)
|
||||
if len(key) < 16:
|
||||
raise ValueError('UUID identifier %r is invalid'%uuid)
|
||||
raise ValueError(f'UUID identifier {uuid!r} is invalid')
|
||||
key = bytearray(from_hex_bytes((key + key)[:32]))
|
||||
paths = []
|
||||
with CurrentDir(tdir):
|
||||
@ -362,10 +362,10 @@ class EPUBOutput(OutputFormatPlugin):
|
||||
<enc:EncryptedData>
|
||||
<enc:EncryptionMethod Algorithm="http://ns.adobe.com/pdf/enc#RC"/>
|
||||
<enc:CipherData>
|
||||
<enc:CipherReference URI="%s"/>
|
||||
<enc:CipherReference URI="{}"/>
|
||||
</enc:CipherData>
|
||||
</enc:EncryptedData>
|
||||
'''%(uri.replace('"', '\\"')))
|
||||
'''.format(uri.replace('"', '\\"')))
|
||||
if fonts:
|
||||
ans = '''<encryption
|
||||
xmlns="urn:oasis:names:tc:opendocument:xmlns:container"
|
||||
@ -409,7 +409,7 @@ class EPUBOutput(OutputFormatPlugin):
|
||||
frag = urlunquote(frag)
|
||||
if frag and frag_pat.match(frag) is None:
|
||||
self.log.warn(
|
||||
'Removing fragment identifier %r from TOC as Adobe Digital Editions cannot handle it'%frag)
|
||||
f'Removing fragment identifier {frag!r} from TOC as Adobe Digital Editions cannot handle it')
|
||||
node.href = base
|
||||
|
||||
for x in self.oeb.spine:
|
||||
@ -540,7 +540,7 @@ class EPUBOutput(OutputFormatPlugin):
|
||||
from calibre.ebooks.oeb.polish.toc import item_at_top
|
||||
|
||||
def frag_is_at_top(root, frag):
|
||||
elem = XPath('//*[@id="%s" or @name="%s"]'%(frag, frag))(root)
|
||||
elem = XPath(f'//*[@id="{frag}" or @name="{frag}"]')(root)
|
||||
if elem:
|
||||
elem = elem[0]
|
||||
else:
|
||||
|
@ -77,7 +77,7 @@ class FB2Input(InputFormatPlugin):
|
||||
parser = css_parser.CSSParser(fetcher=None,
|
||||
log=logging.getLogger('calibre.css'))
|
||||
|
||||
XHTML_CSS_NAMESPACE = '@namespace "%s";\n' % XHTML_NS
|
||||
XHTML_CSS_NAMESPACE = f'@namespace "{XHTML_NS}";\n'
|
||||
text = XHTML_CSS_NAMESPACE + css
|
||||
log.debug('Parsing stylesheet...')
|
||||
stylesheet = parser.parseString(text)
|
||||
@ -115,7 +115,7 @@ class FB2Input(InputFormatPlugin):
|
||||
if not note.get('id', None):
|
||||
note.set('id', 'cite%d' % c)
|
||||
all_ids.add(note.get('id'))
|
||||
a.set('href', '#%s' % note.get('id'))
|
||||
a.set('href', '#{}'.format(note.get('id')))
|
||||
for x in result.xpath('//*[@link_note or @link_cite]'):
|
||||
x.attrib.pop('link_note', None)
|
||||
x.attrib.pop('link_cite', None)
|
||||
@ -148,7 +148,7 @@ class FB2Input(InputFormatPlugin):
|
||||
cpath = os.path.abspath('fb2_cover_calibre_mi.jpg')
|
||||
else:
|
||||
for img in doc.xpath('//f:coverpage/f:image', namespaces=NAMESPACES):
|
||||
href = img.get('{%s}href'%XLINK_NS, img.get('href', None))
|
||||
href = img.get(f'{{{XLINK_NS}}}href', img.get('href', None))
|
||||
if href is not None:
|
||||
if href.startswith('#'):
|
||||
href = href[1:]
|
||||
@ -182,8 +182,7 @@ class FB2Input(InputFormatPlugin):
|
||||
try:
|
||||
data = base64_decode(raw)
|
||||
except TypeError:
|
||||
self.log.exception('Binary data with id=%s is corrupted, ignoring'%(
|
||||
elem.get('id')))
|
||||
self.log.exception('Binary data with id={} is corrupted, ignoring'.format(elem.get('id')))
|
||||
else:
|
||||
with open(fname, 'wb') as f:
|
||||
f.write(data)
|
||||
|
@ -254,7 +254,7 @@ class HTMLInput(InputFormatPlugin):
|
||||
try:
|
||||
link_ = link_.decode('utf-8', 'error')
|
||||
except:
|
||||
self.log.warn('Failed to decode link %r. Ignoring'%link_)
|
||||
self.log.warn(f'Failed to decode link {link_!r}. Ignoring')
|
||||
return None, None
|
||||
if self.root_dir_for_absolute_links and link_.startswith('/'):
|
||||
link_ = link_.lstrip('/')
|
||||
@ -262,7 +262,7 @@ class HTMLInput(InputFormatPlugin):
|
||||
try:
|
||||
l = Link(link_, base if base else os.getcwd())
|
||||
except:
|
||||
self.log.exception('Failed to process link: %r'%link_)
|
||||
self.log.exception(f'Failed to process link: {link_!r}')
|
||||
return None, None
|
||||
if l.path is None:
|
||||
# Not a local resource
|
||||
@ -311,7 +311,7 @@ class HTMLInput(InputFormatPlugin):
|
||||
bhref = os.path.basename(link)
|
||||
id, href = self.oeb.manifest.generate(id='added', href=sanitize_file_name(bhref))
|
||||
if media_type == 'text/plain':
|
||||
self.log.warn('Ignoring link to text file %r'%link_)
|
||||
self.log.warn(f'Ignoring link to text file {link_!r}')
|
||||
return None
|
||||
if media_type == self.BINARY_MIME:
|
||||
# Check for the common case, images
|
||||
|
@ -47,7 +47,7 @@ class LITInput(InputFormatPlugin):
|
||||
self.log('LIT file with all text in single <pre> tag detected')
|
||||
html = separate_paragraphs_single_line(pre.text)
|
||||
html = convert_basic(html).replace('<html>',
|
||||
'<html xmlns="%s">'%XHTML_NS)
|
||||
f'<html xmlns="{XHTML_NS}">')
|
||||
html = xml_to_unicode(html, strip_encoding_pats=True,
|
||||
resolve_entities=True)[0]
|
||||
if opts.smarten_punctuation:
|
||||
|
@ -39,19 +39,18 @@ class LRFInput(InputFormatPlugin):
|
||||
char_button_map = {}
|
||||
for x in doc.xpath('//CharButton[@refobj]'):
|
||||
ro = x.get('refobj')
|
||||
jump_button = doc.xpath('//*[@objid="%s"]'%ro)
|
||||
jump_button = doc.xpath(f'//*[@objid="{ro}"]')
|
||||
if jump_button:
|
||||
jump_to = jump_button[0].xpath('descendant::JumpTo[@refpage and @refobj]')
|
||||
if jump_to:
|
||||
char_button_map[ro] = '%s.xhtml#%s'%(jump_to[0].get('refpage'),
|
||||
char_button_map[ro] = '{}.xhtml#{}'.format(jump_to[0].get('refpage'),
|
||||
jump_to[0].get('refobj'))
|
||||
plot_map = {}
|
||||
for x in doc.xpath('//Plot[@refobj]'):
|
||||
ro = x.get('refobj')
|
||||
image = doc.xpath('//Image[@objid="%s" and @refstream]'%ro)
|
||||
image = doc.xpath(f'//Image[@objid="{ro}" and @refstream]')
|
||||
if image:
|
||||
imgstr = doc.xpath('//ImageStream[@objid="%s" and @file]'%
|
||||
image[0].get('refstream'))
|
||||
imgstr = doc.xpath('//ImageStream[@objid="{}" and @file]'.format(image[0].get('refstream')))
|
||||
if imgstr:
|
||||
plot_map[ro] = imgstr[0].get('file')
|
||||
|
||||
|
@ -153,7 +153,7 @@ class LRFOutput(OutputFormatPlugin):
|
||||
ps['textheight'] = height
|
||||
book = Book(title=opts.title, author=opts.author,
|
||||
bookid=uuid4().hex,
|
||||
publisher='%s %s'%(__appname__, __version__),
|
||||
publisher=f'{__appname__} {__version__}',
|
||||
category=_('Comic'), pagestyledefault=ps,
|
||||
booksetting=BookSetting(screenwidth=width, screenheight=height))
|
||||
for page in pages:
|
||||
|
@ -37,7 +37,7 @@ class MOBIInput(InputFormatPlugin):
|
||||
mr.extract_content('.', parse_cache)
|
||||
|
||||
if mr.kf8_type is not None:
|
||||
log('Found KF8 MOBI of type %r'%mr.kf8_type)
|
||||
log(f'Found KF8 MOBI of type {mr.kf8_type!r}')
|
||||
if mr.kf8_type == 'joint':
|
||||
self.mobi_is_joint = True
|
||||
from calibre.ebooks.mobi.reader.mobi8 import Mobi8Reader
|
||||
|
@ -83,7 +83,7 @@ class OEBOutput(OutputFormatPlugin):
|
||||
|
||||
def manifest_items_with_id(id_):
|
||||
return root.xpath('//*[local-name() = "manifest"]/*[local-name() = "item" '
|
||||
' and @id="%s"]'%id_)
|
||||
f' and @id="{id_}"]')
|
||||
|
||||
if len(cov) == 1:
|
||||
cov = cov[0]
|
||||
|
@ -24,8 +24,7 @@ class PDBInput(InputFormatPlugin):
|
||||
Reader = get_reader(header.ident)
|
||||
|
||||
if Reader is None:
|
||||
raise PDBError('No reader available for format within container.\n Identity is %s. Book type is %s' %
|
||||
(header.ident, IDENTITY_TO_NAME.get(header.ident, _('Unknown'))))
|
||||
raise PDBError('No reader available for format within container.\n Identity is {}. Book type is {}'.format(header.ident, IDENTITY_TO_NAME.get(header.ident, _('Unknown'))))
|
||||
|
||||
log.debug(f'Detected ebook format as: {IDENTITY_TO_NAME[header.ident]} with identity: {header.ident}')
|
||||
|
||||
|
@ -44,7 +44,7 @@ class PDBOutput(OutputFormatPlugin):
|
||||
Writer = get_writer(opts.format)
|
||||
|
||||
if Writer is None:
|
||||
raise PDBError('No writer available for format %s.' % format)
|
||||
raise PDBError(f'No writer available for format {format}.')
|
||||
|
||||
setattr(opts, 'max_line_length', 0)
|
||||
setattr(opts, 'force_max_line_length', False)
|
||||
|
@ -47,7 +47,7 @@ class PMLInput(InputFormatPlugin):
|
||||
self.log.debug('Converting PML to HTML...')
|
||||
hizer = PML_HTMLizer()
|
||||
html = hizer.parse_pml(pml_stream.read().decode(ienc), html_path)
|
||||
html = '<html><head><title></title></head><body>%s</body></html>'%html
|
||||
html = f'<html><head><title></title></head><body>{html}</body></html>'
|
||||
html_stream.write(html.encode('utf-8', 'replace'))
|
||||
|
||||
if pclose:
|
||||
@ -106,7 +106,7 @@ class PMLInput(InputFormatPlugin):
|
||||
html_path = os.path.join(os.getcwd(), html_name)
|
||||
|
||||
pages.append(html_name)
|
||||
log.debug('Processing PML item %s...' % pml)
|
||||
log.debug(f'Processing PML item {pml}...')
|
||||
ttoc = self.process_pml(pml, html_path)
|
||||
toc += ttoc
|
||||
images = self.get_images(stream, tdir, True)
|
||||
|
@ -111,8 +111,7 @@ class RecipeInput(InputFormatPlugin):
|
||||
self.recipe_source = raw
|
||||
if recipe.requires_version > numeric_version:
|
||||
log.warn(
|
||||
'Downloaded recipe needs calibre version at least: %s' %
|
||||
('.'.join(recipe.requires_version)))
|
||||
'Downloaded recipe needs calibre version at least: {}'.format('.'.join(recipe.requires_version)))
|
||||
builtin = True
|
||||
except:
|
||||
log.exception('Failed to compile downloaded recipe. Falling '
|
||||
@ -130,8 +129,7 @@ class RecipeInput(InputFormatPlugin):
|
||||
log('Using downloaded builtin recipe')
|
||||
|
||||
if recipe is None:
|
||||
raise ValueError('%r is not a valid recipe file or builtin recipe' %
|
||||
recipe_or_file)
|
||||
raise ValueError(f'{recipe_or_file!r} is not a valid recipe file or builtin recipe')
|
||||
|
||||
disabled = getattr(recipe, 'recipe_disabled', None)
|
||||
if disabled is not None:
|
||||
|
@ -164,7 +164,7 @@ class RTFInput(InputFormatPlugin):
|
||||
try:
|
||||
return self.rasterize_wmf(name)
|
||||
except Exception:
|
||||
self.log.exception('Failed to convert WMF image %r'%name)
|
||||
self.log.exception(f'Failed to convert WMF image {name!r}')
|
||||
return self.replace_wmf(name)
|
||||
|
||||
def replace_wmf(self, name):
|
||||
@ -217,7 +217,7 @@ class RTFInput(InputFormatPlugin):
|
||||
css += '\n' +'\n'.join(color_classes)
|
||||
|
||||
for cls, val in iteritems(border_styles):
|
||||
css += '\n\n.%s {\n%s\n}'%(cls, val)
|
||||
css += f'\n\n.{cls} {{\n{val}\n}}'
|
||||
|
||||
with open('styles.css', 'ab') as f:
|
||||
f.write(css.encode('utf-8'))
|
||||
@ -229,16 +229,16 @@ class RTFInput(InputFormatPlugin):
|
||||
style = ['border-style: hidden', 'border-width: 1px',
|
||||
'border-color: black']
|
||||
for x in ('bottom', 'top', 'left', 'right'):
|
||||
bs = elem.get('border-cell-%s-style'%x, None)
|
||||
bs = elem.get(f'border-cell-{x}-style', None)
|
||||
if bs:
|
||||
cbs = border_style_map.get(bs, 'solid')
|
||||
style.append('border-%s-style: %s'%(x, cbs))
|
||||
bw = elem.get('border-cell-%s-line-width'%x, None)
|
||||
style.append(f'border-{x}-style: {cbs}')
|
||||
bw = elem.get(f'border-cell-{x}-line-width', None)
|
||||
if bw:
|
||||
style.append('border-%s-width: %spt'%(x, bw))
|
||||
bc = elem.get('border-cell-%s-color'%x, None)
|
||||
style.append(f'border-{x}-width: {bw}pt')
|
||||
bc = elem.get(f'border-cell-{x}-color', None)
|
||||
if bc:
|
||||
style.append('border-%s-color: %s'%(x, bc))
|
||||
style.append(f'border-{x}-color: {bc}')
|
||||
style = ';\n'.join(style)
|
||||
if style not in border_styles:
|
||||
border_styles.append(style)
|
||||
|
@ -98,9 +98,9 @@ class SNBInput(InputFormatPlugin):
|
||||
lines = []
|
||||
for line in snbc.find('.//body'):
|
||||
if line.tag == 'text':
|
||||
lines.append('<p>%s</p>' % html_encode(line.text))
|
||||
lines.append(f'<p>{html_encode(line.text)}</p>')
|
||||
elif line.tag == 'img':
|
||||
lines.append('<p><img src="%s" /></p>' % html_encode(line.text))
|
||||
lines.append(f'<p><img src="{html_encode(line.text)}" /></p>')
|
||||
with open(os.path.join(tdir, fname), 'wb') as f:
|
||||
f.write((HTML_TEMPLATE % (chapterName, '\n'.join(lines))).encode('utf-8', 'replace'))
|
||||
oeb.toc.add(ch.text, fname)
|
||||
|
@ -141,7 +141,7 @@ class SNBOutput(OutputFormatPlugin):
|
||||
if tocitem.href.find('#') != -1:
|
||||
item = tocitem.href.split('#')
|
||||
if len(item) != 2:
|
||||
log.error('Error in TOC item: %s' % tocitem)
|
||||
log.error(f'Error in TOC item: {tocitem}')
|
||||
else:
|
||||
if item[0] in outputFiles:
|
||||
outputFiles[item[0]].append((item[1], tocitem.title))
|
||||
@ -176,16 +176,16 @@ class SNBOutput(OutputFormatPlugin):
|
||||
from calibre.ebooks.oeb.base import OEB_DOCS, OEB_IMAGES
|
||||
if m.hrefs[item.href].media_type in OEB_DOCS:
|
||||
if item.href not in outputFiles:
|
||||
log.debug('File %s is unused in TOC. Continue in last chapter' % item.href)
|
||||
log.debug(f'File {item.href} is unused in TOC. Continue in last chapter')
|
||||
mergeLast = True
|
||||
else:
|
||||
if oldTree is not None and mergeLast:
|
||||
log.debug('Output the modified chapter again: %s' % lastName)
|
||||
log.debug(f'Output the modified chapter again: {lastName}')
|
||||
with open(os.path.join(snbcDir, lastName), 'wb') as f:
|
||||
f.write(etree.tostring(oldTree, pretty_print=True, encoding='utf-8'))
|
||||
mergeLast = False
|
||||
|
||||
log.debug('Converting %s to snbc...' % item.href)
|
||||
log.debug(f'Converting {item.href} to snbc...')
|
||||
snbwriter = SNBMLizer(log)
|
||||
snbcTrees = None
|
||||
if not mergeLast:
|
||||
@ -199,11 +199,11 @@ class SNBOutput(OutputFormatPlugin):
|
||||
with open(os.path.join(snbcDir, lastName), 'wb') as f:
|
||||
f.write(etree.tostring(oldTree, pretty_print=True, encoding='utf-8'))
|
||||
else:
|
||||
log.debug('Merge %s with last TOC item...' % item.href)
|
||||
log.debug(f'Merge {item.href} with last TOC item...')
|
||||
snbwriter.merge_content(oldTree, oeb_book, item, [('', _('Start'))], opts)
|
||||
|
||||
# Output the last one if needed
|
||||
log.debug('Output the last modified chapter again: %s' % lastName)
|
||||
log.debug(f'Output the last modified chapter again: {lastName}')
|
||||
if oldTree is not None and mergeLast:
|
||||
with open(os.path.join(snbcDir, lastName), 'wb') as f:
|
||||
f.write(etree.tostring(oldTree, pretty_print=True, encoding='utf-8'))
|
||||
@ -211,7 +211,7 @@ class SNBOutput(OutputFormatPlugin):
|
||||
|
||||
for item in m:
|
||||
if m.hrefs[item.href].media_type in OEB_IMAGES:
|
||||
log.debug('Converting image: %s ...' % item.href)
|
||||
log.debug(f'Converting image: {item.href} ...')
|
||||
content = m.hrefs[item.href].data
|
||||
# Convert & Resize image
|
||||
self.HandleImage(content, os.path.join(snbiDir, ProcessFileName(item.href)))
|
||||
|
@ -198,13 +198,13 @@ class TXTInput(InputFormatPlugin):
|
||||
if file_ext in {'md', 'textile', 'markdown'}:
|
||||
options.formatting_type = {'md': 'markdown'}.get(file_ext, file_ext)
|
||||
log.info('File extension indicates particular formatting. '
|
||||
'Forcing formatting type to: %s'%options.formatting_type)
|
||||
f'Forcing formatting type to: {options.formatting_type}')
|
||||
options.paragraph_type = 'off'
|
||||
|
||||
# Get the encoding of the document.
|
||||
if options.input_encoding:
|
||||
ienc = options.input_encoding
|
||||
log.debug('Using user specified input encoding of %s' % ienc)
|
||||
log.debug(f'Using user specified input encoding of {ienc}')
|
||||
else:
|
||||
det_encoding = detect(txt[:4096])
|
||||
det_encoding, confidence = det_encoding['encoding'], det_encoding['confidence']
|
||||
@ -218,7 +218,7 @@ class TXTInput(InputFormatPlugin):
|
||||
log.debug(f'Detected input encoding as {ienc} with a confidence of {confidence * 100}%')
|
||||
if not ienc:
|
||||
ienc = 'utf-8'
|
||||
log.debug('No input encoding specified and could not auto detect using %s' % ienc)
|
||||
log.debug(f'No input encoding specified and could not auto detect using {ienc}')
|
||||
# Remove BOM from start of txt as its presence can confuse markdown
|
||||
import codecs
|
||||
for bom in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE, codecs.BOM_UTF8, codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE):
|
||||
@ -240,12 +240,12 @@ class TXTInput(InputFormatPlugin):
|
||||
log.debug('Could not reliably determine paragraph type using block')
|
||||
options.paragraph_type = 'block'
|
||||
else:
|
||||
log.debug('Auto detected paragraph type as %s' % options.paragraph_type)
|
||||
log.debug(f'Auto detected paragraph type as {options.paragraph_type}')
|
||||
|
||||
# Detect formatting
|
||||
if options.formatting_type == 'auto':
|
||||
options.formatting_type = detect_formatting_type(txt)
|
||||
log.debug('Auto detected formatting as %s' % options.formatting_type)
|
||||
log.debug(f'Auto detected formatting as {options.formatting_type}')
|
||||
|
||||
if options.formatting_type == 'heuristic':
|
||||
setattr(options, 'enable_heuristics', True)
|
||||
|
@ -945,7 +945,7 @@ OptionRecommendation(name='search_replace',
|
||||
|
||||
from calibre import browser
|
||||
from calibre.ptempfile import PersistentTemporaryFile
|
||||
self.log('Downloading cover from %r'%url)
|
||||
self.log(f'Downloading cover from {url!r}')
|
||||
br = browser()
|
||||
raw = br.open_novisit(url).read()
|
||||
buf = io.BytesIO(raw)
|
||||
@ -999,7 +999,7 @@ OptionRecommendation(name='search_replace',
|
||||
setattr(self.opts, attr, x)
|
||||
return
|
||||
self.log.warn(
|
||||
'Profile (%s) %r is no longer available, using default'%(which, sval))
|
||||
f'Profile ({which}) {sval!r} is no longer available, using default')
|
||||
for x in profiles():
|
||||
if x.short_name == 'default':
|
||||
setattr(self.opts, attr, x)
|
||||
@ -1017,7 +1017,7 @@ OptionRecommendation(name='search_replace',
|
||||
self.log('Conversion options changed from defaults:')
|
||||
for rec in self.changed_options:
|
||||
if rec.option.name not in ('username', 'password'):
|
||||
self.log(' ', '%s:' % rec.option.name, repr(rec.recommended_value))
|
||||
self.log(' ', f'{rec.option.name}:', repr(rec.recommended_value))
|
||||
if self.opts.verbose > 1:
|
||||
self.log.debug('Resolved conversion options')
|
||||
try:
|
||||
@ -1204,7 +1204,7 @@ OptionRecommendation(name='search_replace',
|
||||
try:
|
||||
fkey = list(map(float, fkey.split(',')))
|
||||
except Exception:
|
||||
self.log.error('Invalid font size key: %r ignoring'%fkey)
|
||||
self.log.error(f'Invalid font size key: {fkey!r} ignoring')
|
||||
fkey = self.opts.dest.fkey
|
||||
|
||||
from calibre.ebooks.oeb.transforms.jacket import Jacket
|
||||
@ -1298,7 +1298,7 @@ OptionRecommendation(name='search_replace',
|
||||
self.dump_oeb(self.oeb, out_dir)
|
||||
self.log('Processed HTML written to:', out_dir)
|
||||
|
||||
self.log.info('Creating %s...'%self.output_plugin.name)
|
||||
self.log.info(f'Creating {self.output_plugin.name}...')
|
||||
our = CompositeProgressReporter(0.67, 1., self.ui_reporter)
|
||||
self.output_plugin.report_progress = our
|
||||
our(0., _('Running %s plugin')%self.output_plugin.name)
|
||||
|
@ -41,7 +41,7 @@ _ligpat = re.compile('|'.join(LIGATURES))
|
||||
def sanitize_head(match):
|
||||
x = match.group(1).strip()
|
||||
x = _span_pat.sub('', x)
|
||||
return '<head>\n%s\n</head>' % x
|
||||
return f'<head>\n{x}\n</head>'
|
||||
|
||||
|
||||
def chap_head(match):
|
||||
@ -200,12 +200,12 @@ class Dehyphenator:
|
||||
"((ed)?ly|'?e?s||a?(t|s)?ion(s|al(ly)?)?|ings?|er|(i)?ous|"
|
||||
"(i|a)ty|(it)?ies|ive|gence|istic(ally)?|(e|a)nce|m?ents?|ism|ated|"
|
||||
"(e|u)ct(ed)?|ed|(i|ed)?ness|(e|a)ncy|ble|ier|al|ex|ian)$")
|
||||
self.suffixes = re.compile(r'^%s' % self.suffix_string, re.IGNORECASE)
|
||||
self.removesuffixes = re.compile(r'%s' % self.suffix_string, re.IGNORECASE)
|
||||
self.suffixes = re.compile(rf'^{self.suffix_string}', re.IGNORECASE)
|
||||
self.removesuffixes = re.compile(rf'{self.suffix_string}', re.IGNORECASE)
|
||||
# remove prefixes if the prefix was not already the point of hyphenation
|
||||
self.prefix_string = '^(dis|re|un|in|ex)'
|
||||
self.prefixes = re.compile(r'%s$' % self.prefix_string, re.IGNORECASE)
|
||||
self.removeprefix = re.compile(r'%s' % self.prefix_string, re.IGNORECASE)
|
||||
self.prefixes = re.compile(rf'{self.prefix_string}$', re.IGNORECASE)
|
||||
self.removeprefix = re.compile(rf'{self.prefix_string}', re.IGNORECASE)
|
||||
|
||||
def dehyphenate(self, match):
|
||||
firsthalf = match.group('firstpart')
|
||||
@ -295,10 +295,10 @@ class CSSPreProcessor:
|
||||
# Remove some of the broken CSS Microsoft products
|
||||
# create
|
||||
MS_PAT = re.compile(r'''
|
||||
(?P<start>^|;|\{)\s* # The end of the previous rule or block start
|
||||
(%s).+? # The invalid selectors
|
||||
(?P<end>$|;|\}) # The end of the declaration
|
||||
'''%'mso-|panose-|text-underline|tab-interval',
|
||||
(?P<start>^|;|\{{)\s* # The end of the previous rule or block start
|
||||
({}).+? # The invalid selectors
|
||||
(?P<end>$|;|\}}) # The end of the declaration
|
||||
'''.format('mso-|panose-|text-underline|tab-interval'),
|
||||
re.MULTILINE|re.IGNORECASE|re.VERBOSE)
|
||||
|
||||
def ms_sub(self, match):
|
||||
@ -433,13 +433,13 @@ def book_designer_rules():
|
||||
lambda match : '<span style="page-break-after:always"> </span>'),
|
||||
# Create header tags
|
||||
(re.compile(r'<h2[^><]*?id=BookTitle[^><]*?(align=)*(?(1)(\w+))*[^><]*?>[^><]*?</h2>', re.IGNORECASE),
|
||||
lambda match : '<h1 id="BookTitle" align="%s">%s</h1>'%(match.group(2) if match.group(2) else 'center', match.group(3))),
|
||||
lambda match : '<h1 id="BookTitle" align="{}">{}</h1>'.format(match.group(2) if match.group(2) else 'center', match.group(3))),
|
||||
(re.compile(r'<h2[^><]*?id=BookAuthor[^><]*?(align=)*(?(1)(\w+))*[^><]*?>[^><]*?</h2>', re.IGNORECASE),
|
||||
lambda match : '<h2 id="BookAuthor" align="%s">%s</h2>'%(match.group(2) if match.group(2) else 'center', match.group(3))),
|
||||
lambda match : '<h2 id="BookAuthor" align="{}">{}</h2>'.format(match.group(2) if match.group(2) else 'center', match.group(3))),
|
||||
(re.compile(r'<span[^><]*?id=title[^><]*?>(.*?)</span>', re.IGNORECASE|re.DOTALL),
|
||||
lambda match : '<h2 class="title">%s</h2>'%(match.group(1),)),
|
||||
lambda match : f'<h2 class="title">{match.group(1)}</h2>'),
|
||||
(re.compile(r'<span[^><]*?id=subtitle[^><]*?>(.*?)</span>', re.IGNORECASE|re.DOTALL),
|
||||
lambda match : '<h3 class="subtitle">%s</h3>'%(match.group(1),)),
|
||||
lambda match : f'<h3 class="subtitle">{match.group(1)}</h3>'),
|
||||
]
|
||||
return ans
|
||||
|
||||
@ -494,8 +494,7 @@ class HTMLPreProcessor:
|
||||
rules.insert(0, (search_re, replace_txt))
|
||||
user_sr_rules[(search_re, replace_txt)] = search_pattern
|
||||
except Exception as e:
|
||||
self.log.error('Failed to parse %r regexp because %s' %
|
||||
(search, as_unicode(e)))
|
||||
self.log.error(f'Failed to parse {search!r} regexp because {as_unicode(e)}')
|
||||
|
||||
# search / replace using the sr?_search / sr?_replace options
|
||||
for i in range(1, 4):
|
||||
@ -572,9 +571,8 @@ class HTMLPreProcessor:
|
||||
except Exception as e:
|
||||
if rule in user_sr_rules:
|
||||
self.log.error(
|
||||
'User supplied search & replace rule: %s -> %s '
|
||||
'failed with error: %s, ignoring.'%(
|
||||
user_sr_rules[rule], rule[1], e))
|
||||
f'User supplied search & replace rule: {user_sr_rules[rule]} -> {rule[1]} '
|
||||
f'failed with error: {e}, ignoring.')
|
||||
else:
|
||||
raise
|
||||
|
||||
@ -595,10 +593,10 @@ class HTMLPreProcessor:
|
||||
# Handle broken XHTML w/ SVG (ugh)
|
||||
if 'svg:' in html and SVG_NS not in html:
|
||||
html = html.replace(
|
||||
'<html', '<html xmlns:svg="%s"' % SVG_NS, 1)
|
||||
'<html', f'<html xmlns:svg="{SVG_NS}"', 1)
|
||||
if 'xlink:' in html and XLINK_NS not in html:
|
||||
html = html.replace(
|
||||
'<html', '<html xmlns:xlink="%s"' % XLINK_NS, 1)
|
||||
'<html', f'<html xmlns:xlink="{XLINK_NS}"', 1)
|
||||
|
||||
html = XMLDECL_RE.sub('', html)
|
||||
|
||||
|
@ -174,7 +174,7 @@ class HeuristicProcessor:
|
||||
]
|
||||
|
||||
for word in ITALICIZE_WORDS:
|
||||
html = re.sub(r'(?<=\s|>)' + re.escape(word) + r'(?=\s|<)', '<i>%s</i>' % word, html)
|
||||
html = re.sub(r'(?<=\s|>)' + re.escape(word) + r'(?=\s|<)', f'<i>{word}</i>', html)
|
||||
|
||||
search_text = re.sub(r'(?s)<head[^>]*>.*?</head>', '', html)
|
||||
search_text = re.sub(r'<[^>]*>', '', search_text)
|
||||
@ -183,7 +183,7 @@ class HeuristicProcessor:
|
||||
ital_string = str(match.group('words'))
|
||||
# self.log.debug("italicising "+str(match.group(0))+" with <i>"+ital_string+"</i>")
|
||||
try:
|
||||
html = re.sub(re.escape(str(match.group(0))), '<i>%s</i>' % ital_string, html)
|
||||
html = re.sub(re.escape(str(match.group(0))), f'<i>{ital_string}</i>', html)
|
||||
except OverflowError:
|
||||
# match.group(0) was too large to be compiled into a regex
|
||||
continue
|
||||
@ -305,7 +305,7 @@ class HeuristicProcessor:
|
||||
|
||||
chapter_marker = arg_ignorecase+init_lookahead+full_chapter_line+blank_lines+lp_n_lookahead_open+n_lookahead+lp_n_lookahead_close+ \
|
||||
lp_opt_title_open+title_line_open+title_header_open+lp_title+title_header_close+title_line_close+lp_opt_title_close
|
||||
chapdetect = re.compile(r'%s' % chapter_marker)
|
||||
chapdetect = re.compile(rf'{chapter_marker}')
|
||||
|
||||
if analyze:
|
||||
hits = len(chapdetect.findall(html))
|
||||
@ -383,9 +383,9 @@ class HeuristicProcessor:
|
||||
em_en_unwrap_regex = em_en_lookahead+line_ending+blanklines+line_opening
|
||||
shy_unwrap_regex = soft_hyphen+line_ending+blanklines+line_opening
|
||||
|
||||
unwrap = re.compile('%s' % unwrap_regex, re.UNICODE)
|
||||
em_en_unwrap = re.compile('%s' % em_en_unwrap_regex, re.UNICODE)
|
||||
shy_unwrap = re.compile('%s' % shy_unwrap_regex, re.UNICODE)
|
||||
unwrap = re.compile(f'{unwrap_regex}', re.UNICODE)
|
||||
em_en_unwrap = re.compile(f'{em_en_unwrap_regex}', re.UNICODE)
|
||||
shy_unwrap = re.compile(f'{shy_unwrap_regex}', re.UNICODE)
|
||||
|
||||
if format == 'txt':
|
||||
content = unwrap.sub(' ', content)
|
||||
@ -449,7 +449,7 @@ class HeuristicProcessor:
|
||||
for i in range(2):
|
||||
html = re.sub(r'\s*<span[^>]*>\s*(<span[^>]*>\s*</span>){0,2}\s*</span>\s*', ' ', html)
|
||||
html = re.sub(
|
||||
r'\s*{open}\s*({open}\s*{close}\s*){{0,2}}\s*{close}'.format(open=open_fmt_pat, close=close_fmt_pat), ' ', html)
|
||||
rf'\s*{open_fmt_pat}\s*({open_fmt_pat}\s*{close_fmt_pat}\s*){{0,2}}\s*{close_fmt_pat}', ' ', html)
|
||||
# delete surrounding divs from empty paragraphs
|
||||
html = re.sub(r'<div[^>]*>\s*<p[^>]*>\s*</p>\s*</div>', '<p> </p>', html)
|
||||
# Empty heading tags
|
||||
@ -560,7 +560,7 @@ class HeuristicProcessor:
|
||||
line_two = '(?P<line_two>'+re.sub(r'(ou|in|cha)', 'linetwo_', self.line_open)+ \
|
||||
r'\s*(?P<line_two_content>.*?)'+re.sub(r'(ou|in|cha)', 'linetwo_', self.line_close)+')'
|
||||
div_break_candidate_pattern = line+r'\s*<div[^>]*>\s*</div>\s*'+line_two
|
||||
div_break_candidate = re.compile(r'%s' % div_break_candidate_pattern, re.IGNORECASE|re.UNICODE)
|
||||
div_break_candidate = re.compile(rf'{div_break_candidate_pattern}', re.IGNORECASE|re.UNICODE)
|
||||
|
||||
def convert_div_softbreaks(match):
|
||||
init_is_paragraph = self.check_paragraph(match.group('init_content'))
|
||||
@ -583,7 +583,7 @@ class HeuristicProcessor:
|
||||
def detect_scene_breaks(self, html):
|
||||
scene_break_regex = self.line_open+'(?!('+self.common_in_text_beginnings+'|.*?'+self.common_in_text_endings+ \
|
||||
r'<))(?P<break>((?P<break_char>((?!\s)\W))\s*(?P=break_char)?){1,10})\s*'+self.line_close
|
||||
scene_breaks = re.compile(r'%s' % scene_break_regex, re.IGNORECASE|re.UNICODE)
|
||||
scene_breaks = re.compile(rf'{scene_break_regex}', re.IGNORECASE|re.UNICODE)
|
||||
html = scene_breaks.sub(self.scene_break_open+r'\g<break></p>', html)
|
||||
return html
|
||||
|
||||
|
@ -762,7 +762,7 @@ def test(scale=0.25):
|
||||
for r, color in enumerate(sorted(default_color_themes)):
|
||||
for c, style in enumerate(sorted(all_styles())):
|
||||
mi.series_index = c + 1
|
||||
mi.title = 'An algorithmic cover [%s]' % color
|
||||
mi.title = f'An algorithmic cover [{color}]'
|
||||
prefs = override_prefs(cprefs, override_color_theme=color, override_style=style)
|
||||
scale_cover(prefs, scale)
|
||||
img = generate_cover(mi, prefs=prefs, as_qimage=True)
|
||||
|
@ -85,7 +85,7 @@ class BZZDecoderError(Exception):
|
||||
self.msg = msg
|
||||
|
||||
def __str__(self):
|
||||
return 'BZZDecoderError: %s' % (self.msg)
|
||||
return f'BZZDecoderError: {self.msg}'
|
||||
|
||||
|
||||
# This table has been designed for the ZPCoder
|
||||
|
@ -39,7 +39,7 @@ inherit = Inherit()
|
||||
|
||||
|
||||
def binary_property(parent, name, XPath, get):
|
||||
vals = XPath('./w:%s' % name)(parent)
|
||||
vals = XPath(f'./w:{name}')(parent)
|
||||
if not vals:
|
||||
return inherit
|
||||
val = get(vals[0], 'w:val', 'on')
|
||||
@ -108,7 +108,7 @@ border_edges = ('left', 'top', 'right', 'bottom', 'between')
|
||||
|
||||
def read_single_border(parent, edge, XPath, get):
|
||||
color = style = width = padding = None
|
||||
for elem in XPath('./w:%s' % edge)(parent):
|
||||
for elem in XPath(f'./w:{edge}')(parent):
|
||||
c = get(elem, 'w:color')
|
||||
if c is not None:
|
||||
color = simple_color(c)
|
||||
@ -145,20 +145,20 @@ def read_border(parent, dest, XPath, get, border_edges=border_edges, name='pBdr'
|
||||
|
||||
|
||||
def border_to_css(edge, style, css):
|
||||
bs = getattr(style, 'border_%s_style' % edge)
|
||||
bc = getattr(style, 'border_%s_color' % edge)
|
||||
bw = getattr(style, 'border_%s_width' % edge)
|
||||
bs = getattr(style, f'border_{edge}_style')
|
||||
bc = getattr(style, f'border_{edge}_color')
|
||||
bw = getattr(style, f'border_{edge}_width')
|
||||
if isinstance(bw, numbers.Number):
|
||||
# WebKit needs at least 1pt to render borders and 3pt to render double borders
|
||||
bw = max(bw, (3 if bs == 'double' else 1))
|
||||
if bs is not inherit and bs is not None:
|
||||
css['border-%s-style' % edge] = bs
|
||||
css[f'border-{edge}-style'] = bs
|
||||
if bc is not inherit and bc is not None:
|
||||
css['border-%s-color' % edge] = bc
|
||||
css[f'border-{edge}-color'] = bc
|
||||
if bw is not inherit and bw is not None:
|
||||
if isinstance(bw, numbers.Number):
|
||||
bw = '%.3gpt' % bw
|
||||
css['border-%s-width' % edge] = bw
|
||||
bw = f'{bw:.3g}pt'
|
||||
css[f'border-{edge}-width'] = bw
|
||||
|
||||
|
||||
def read_indent(parent, dest, XPath, get):
|
||||
@ -305,12 +305,12 @@ class Frame:
|
||||
else:
|
||||
if self.h_rule != 'auto':
|
||||
t = 'min-height' if self.h_rule == 'atLeast' else 'height'
|
||||
ans[t] = '%.3gpt' % self.h
|
||||
ans[t] = f'{self.h:.3g}pt'
|
||||
if self.w is not None:
|
||||
ans['width'] = '%.3gpt' % self.w
|
||||
ans['padding-top'] = ans['padding-bottom'] = '%.3gpt' % self.v_space
|
||||
ans['width'] = f'{self.w:.3g}pt'
|
||||
ans['padding-top'] = ans['padding-bottom'] = f'{self.v_space:.3g}pt'
|
||||
if self.wrap not in {None, 'none'}:
|
||||
ans['padding-left'] = ans['padding-right'] = '%.3gpt' % self.h_space
|
||||
ans['padding-left'] = ans['padding-right'] = f'{self.h_space:.3g}pt'
|
||||
if self.x_align is None:
|
||||
fl = 'left' if self.x/page.width < 0.5 else 'right'
|
||||
else:
|
||||
@ -412,12 +412,12 @@ class ParagraphStyle:
|
||||
c['page-break-after'] = 'avoid'
|
||||
for edge in ('left', 'top', 'right', 'bottom'):
|
||||
border_to_css(edge, self, c)
|
||||
val = getattr(self, 'padding_%s' % edge)
|
||||
val = getattr(self, f'padding_{edge}')
|
||||
if val is not inherit:
|
||||
c['padding-%s' % edge] = '%.3gpt' % val
|
||||
val = getattr(self, 'margin_%s' % edge)
|
||||
c[f'padding-{edge}'] = f'{val:.3g}pt'
|
||||
val = getattr(self, f'margin_{edge}')
|
||||
if val is not inherit:
|
||||
c['margin-%s' % edge] = val
|
||||
c[f'margin-{edge}'] = val
|
||||
|
||||
if self.line_height not in {inherit, '1'}:
|
||||
c['line-height'] = self.line_height
|
||||
@ -426,7 +426,7 @@ class ParagraphStyle:
|
||||
val = getattr(self, x)
|
||||
if val is not inherit:
|
||||
if x == 'font_size':
|
||||
val = '%.3gpt' % val
|
||||
val = f'{val:.3g}pt'
|
||||
c[x.replace('_', '-')] = val
|
||||
ta = self.text_align
|
||||
if ta is not inherit:
|
||||
@ -465,11 +465,11 @@ class ParagraphStyle:
|
||||
|
||||
def apply_between_border(self):
|
||||
for prop in ('width', 'color', 'style'):
|
||||
setattr(self, 'border_bottom_%s' % prop, getattr(self, 'border_between_%s' % prop))
|
||||
setattr(self, f'border_bottom_{prop}', getattr(self, f'border_between_{prop}'))
|
||||
|
||||
def has_visible_border(self):
|
||||
for edge in border_edges[:-1]:
|
||||
bw, bs = getattr(self, 'border_%s_width' % edge), getattr(self, 'border_%s_style' % edge)
|
||||
bw, bs = getattr(self, f'border_{edge}_width'), getattr(self, f'border_{edge}_style')
|
||||
if bw is not inherit and bw and bs is not inherit and bs != 'none':
|
||||
return True
|
||||
return False
|
||||
|
@ -149,7 +149,7 @@ def read_font(parent, dest, XPath, get):
|
||||
for col in XPath('./w:rFonts')(parent):
|
||||
val = get(col, 'w:asciiTheme')
|
||||
if val:
|
||||
val = '|%s|' % val
|
||||
val = f'|{val}|'
|
||||
else:
|
||||
val = get(col, 'w:ascii')
|
||||
if val:
|
||||
@ -168,7 +168,7 @@ def read_font_cs(parent, dest, XPath, get):
|
||||
for col in XPath('./w:rFonts')(parent):
|
||||
val = get(col, 'w:csTheme')
|
||||
if val:
|
||||
val = '|%s|' % val
|
||||
val = f'|{val}|'
|
||||
else:
|
||||
val = get(col, 'w:cs')
|
||||
if val:
|
||||
@ -248,9 +248,9 @@ class RunStyle:
|
||||
for x in ('color', 'style', 'width'):
|
||||
val = getattr(self, 'border_'+x)
|
||||
if x == 'width' and val is not inherit:
|
||||
val = '%.3gpt' % val
|
||||
val = f'{val:.3g}pt'
|
||||
if val is not inherit:
|
||||
ans['border-%s' % x] = val
|
||||
ans[f'border-{x}'] = val
|
||||
|
||||
def clear_border_css(self):
|
||||
for x in ('color', 'style', 'width'):
|
||||
@ -282,7 +282,7 @@ class RunStyle:
|
||||
|
||||
self.get_border_css(c)
|
||||
if self.padding is not inherit:
|
||||
c['padding'] = '%.3gpt' % self.padding
|
||||
c['padding'] = f'{self.padding:.3g}pt'
|
||||
|
||||
for x in ('color', 'background_color'):
|
||||
val = getattr(self, x)
|
||||
@ -292,10 +292,10 @@ class RunStyle:
|
||||
for x in ('letter_spacing', 'font_size'):
|
||||
val = getattr(self, x)
|
||||
if val is not inherit:
|
||||
c[x.replace('_', '-')] = '%.3gpt' % val
|
||||
c[x.replace('_', '-')] = f'{val:.3g}pt'
|
||||
|
||||
if self.position is not inherit:
|
||||
c['vertical-align'] = '%.3gpt' % self.position
|
||||
c['vertical-align'] = f'{self.position:.3g}pt'
|
||||
|
||||
if self.highlight is not inherit and self.highlight != 'transparent':
|
||||
c['background-color'] = self.highlight
|
||||
|
@ -127,7 +127,7 @@ def cleanup_markup(log, root, styles, dest_dir, detect_cover, XPath, uuid):
|
||||
span[-1].tail = '\xa0'
|
||||
|
||||
# Move <hr>s outside paragraphs, if possible.
|
||||
pancestor = XPath('|'.join('ancestor::%s[1]' % x for x in ('p', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6')))
|
||||
pancestor = XPath('|'.join(f'ancestor::{x}[1]' for x in ('p', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6')))
|
||||
for hr in root.xpath('//span/hr'):
|
||||
p = pancestor(hr)
|
||||
if p:
|
||||
@ -156,7 +156,7 @@ def cleanup_markup(log, root, styles, dest_dir, detect_cover, XPath, uuid):
|
||||
# Process dir attributes
|
||||
class_map = dict(itervalues(styles.classes))
|
||||
parents = ('p', 'div') + tuple('h%d' % i for i in range(1, 7))
|
||||
for parent in root.xpath('//*[(%s)]' % ' or '.join('name()="%s"' % t for t in parents)):
|
||||
for parent in root.xpath('//*[({})]'.format(' or '.join(f'name()="{t}"' for t in parents))):
|
||||
# Ensure that children of rtl parents that are not rtl have an
|
||||
# explicit dir set. Also, remove dir from children if it is the same as
|
||||
# that of the parent.
|
||||
@ -172,7 +172,7 @@ def cleanup_markup(log, root, styles, dest_dir, detect_cover, XPath, uuid):
|
||||
|
||||
# Remove unnecessary span tags that are the only child of a parent block
|
||||
# element
|
||||
for parent in root.xpath('//*[(%s) and count(span)=1]' % ' or '.join('name()="%s"' % t for t in parents)):
|
||||
for parent in root.xpath('//*[({}) and count(span)=1]'.format(' or '.join(f'name()="{t}"' for t in parents))):
|
||||
if len(parent) == 1 and not parent.text and not parent[0].tail and not parent[0].get('id', None):
|
||||
# We have a block whose contents are entirely enclosed in a <span>
|
||||
span = parent[0]
|
||||
|
@ -137,7 +137,7 @@ class DOCX:
|
||||
try:
|
||||
raw = self.read('[Content_Types].xml')
|
||||
except KeyError:
|
||||
raise InvalidDOCX('The file %s docx file has no [Content_Types].xml' % self.name)
|
||||
raise InvalidDOCX(f'The file {self.name} docx file has no [Content_Types].xml')
|
||||
root = fromstring(raw)
|
||||
self.content_types = {}
|
||||
self.default_content_types = {}
|
||||
@ -159,7 +159,7 @@ class DOCX:
|
||||
try:
|
||||
raw = self.read('_rels/.rels')
|
||||
except KeyError:
|
||||
raise InvalidDOCX('The file %s docx file has no _rels/.rels' % self.name)
|
||||
raise InvalidDOCX(f'The file {self.name} docx file has no _rels/.rels')
|
||||
root = fromstring(raw)
|
||||
self.relationships = {}
|
||||
self.relationships_rmap = {}
|
||||
@ -177,7 +177,7 @@ class DOCX:
|
||||
if name is None:
|
||||
names = tuple(n for n in self.names if n == 'document.xml' or n.endswith('/document.xml'))
|
||||
if not names:
|
||||
raise InvalidDOCX('The file %s docx file has no main document' % self.name)
|
||||
raise InvalidDOCX(f'The file {self.name} docx file has no main document')
|
||||
name = names[0]
|
||||
return name
|
||||
|
||||
|
@ -145,11 +145,11 @@ class Fields:
|
||||
field_types = ('hyperlink', 'xe', 'index', 'ref', 'noteref')
|
||||
parsers = {x.upper():getattr(self, 'parse_'+x) for x in field_types}
|
||||
parsers.update({x:getattr(self, 'parse_'+x) for x in field_types})
|
||||
field_parsers = {f.upper():globals()['parse_%s' % f] for f in field_types}
|
||||
field_parsers.update({f:globals()['parse_%s' % f] for f in field_types})
|
||||
field_parsers = {f.upper():globals()[f'parse_{f}'] for f in field_types}
|
||||
field_parsers.update({f:globals()[f'parse_{f}'] for f in field_types})
|
||||
|
||||
for f in field_types:
|
||||
setattr(self, '%s_fields' % f, [])
|
||||
setattr(self, f'{f}_fields', [])
|
||||
unknown_fields = {'TOC', 'toc', 'PAGEREF', 'pageref'} # The TOC and PAGEREF fields are handled separately
|
||||
|
||||
for field in self.fields:
|
||||
@ -159,7 +159,7 @@ class Fields:
|
||||
if func is not None:
|
||||
func(field, field_parsers[field.name], log)
|
||||
elif field.name not in unknown_fields:
|
||||
log.warn('Encountered unknown field: %s, ignoring it.' % field.name)
|
||||
log.warn(f'Encountered unknown field: {field.name}, ignoring it.')
|
||||
unknown_fields.add(field.name)
|
||||
|
||||
def get_runs(self, field):
|
||||
|
@ -64,7 +64,7 @@ class Family:
|
||||
|
||||
self.embedded = {}
|
||||
for x in ('Regular', 'Bold', 'Italic', 'BoldItalic'):
|
||||
for y in XPath('./w:embed%s[@r:id]' % x)(elem):
|
||||
for y in XPath(f'./w:embed{x}[@r:id]')(elem):
|
||||
rid = get(y, 'r:id')
|
||||
key = get(y, 'w:fontKey')
|
||||
subsetted = get(y, 'w:subsetted') in {'1', 'true', 'on'}
|
||||
@ -166,14 +166,14 @@ class Fonts:
|
||||
os.mkdir(dest_dir)
|
||||
fname = self.write(name, dest_dir, docx, variant)
|
||||
if fname is not None:
|
||||
d = {'font-family':'"%s"' % name.replace('"', ''), 'src': 'url("fonts/%s")' % fname}
|
||||
d = {'font-family':'"{}"'.format(name.replace('"', '')), 'src': f'url("fonts/{fname}")'}
|
||||
if 'Bold' in variant:
|
||||
d['font-weight'] = 'bold'
|
||||
if 'Italic' in variant:
|
||||
d['font-style'] = 'italic'
|
||||
d = [f'{k}: {v}' for k, v in iteritems(d)]
|
||||
d = ';\n\t'.join(d)
|
||||
defs.append('@font-face {\n\t%s\n}\n' % d)
|
||||
defs.append(f'@font-face {{\n\t{d}\n}}\n')
|
||||
return '\n'.join(defs)
|
||||
|
||||
def write(self, name, dest_dir, docx, variant):
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user