mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-09 03:04:10 -04:00
use f-string instead of format call (extra-edit)
ruff 'UP030,UP032' --extend-exclude "src/calibre/*" !partial
This commit is contained in:
parent
02854d8b8c
commit
19994000c9
@ -26,7 +26,7 @@ for name, src in sources.items():
|
|||||||
os.chdir(iconset)
|
os.chdir(iconset)
|
||||||
try:
|
try:
|
||||||
for sz in (16, 32, 128, 256, 512, 1024):
|
for sz in (16, 32, 128, 256, 512, 1024):
|
||||||
iname = 'icon_{0}x{0}.png'.format(sz)
|
iname = f'icon_{sz}x{sz}.png'
|
||||||
iname2x = 'icon_{0}x{0}@2x.png'.format(sz // 2)
|
iname2x = 'icon_{0}x{0}@2x.png'.format(sz // 2)
|
||||||
if src.endswith('.svg'):
|
if src.endswith('.svg'):
|
||||||
subprocess.check_call(['rsvg-convert', src, '-w', str(sz), '-h', str(sz), '-o', iname])
|
subprocess.check_call(['rsvg-convert', src, '-w', str(sz), '-h', str(sz), '-o', iname])
|
||||||
@ -35,7 +35,7 @@ for name, src in sources.items():
|
|||||||
if sz == 512:
|
if sz == 512:
|
||||||
shutil.copy2(src, iname)
|
shutil.copy2(src, iname)
|
||||||
else:
|
else:
|
||||||
subprocess.check_call(['convert', src, '-resize', '{0}x{0}'.format(sz), iname])
|
subprocess.check_call(['convert', src, '-resize', f'{sz}x{sz}', iname])
|
||||||
if sz > 16:
|
if sz > 16:
|
||||||
shutil.copy2(iname, iname2x)
|
shutil.copy2(iname, iname2x)
|
||||||
if sz > 512:
|
if sz > 512:
|
||||||
|
@ -23,7 +23,7 @@ for name, src in sources.items():
|
|||||||
try:
|
try:
|
||||||
names = []
|
names = []
|
||||||
for sz in (16, 24, 32, 48, 64, 256):
|
for sz in (16, 24, 32, 48, 64, 256):
|
||||||
iname = os.path.join('ico_temp', '{0}x{0}.png'.format(sz))
|
iname = os.path.join('ico_temp', f'{sz}x{sz}.png')
|
||||||
subprocess.check_call(['rsvg-convert', src, '-w', str(sz), '-h', str(sz), '-o', iname])
|
subprocess.check_call(['rsvg-convert', src, '-w', str(sz), '-h', str(sz), '-o', iname])
|
||||||
subprocess.check_call(['optipng', '-o7', '-strip', 'all', iname])
|
subprocess.check_call(['optipng', '-o7', '-strip', 'all', iname])
|
||||||
if sz >= 128:
|
if sz >= 128:
|
||||||
|
@ -26,7 +26,7 @@ def clone_node(node, parent):
|
|||||||
def merge():
|
def merge():
|
||||||
base = os.path.dirname(os.path.abspath(__file__))
|
base = os.path.dirname(os.path.abspath(__file__))
|
||||||
ans = etree.fromstring(
|
ans = etree.fromstring(
|
||||||
'<svg xmlns="{}" xmlns:xlink="{}"/>'.format(SVG_NS, XLINK_NS),
|
f'<svg xmlns="{SVG_NS}" xmlns:xlink="{XLINK_NS}"/>',
|
||||||
parser=etree.XMLParser(
|
parser=etree.XMLParser(
|
||||||
recover=True, no_network=True, resolve_entities=False
|
recover=True, no_network=True, resolve_entities=False
|
||||||
)
|
)
|
||||||
|
@ -97,7 +97,7 @@ today_fmt = '%B %d, %Y'
|
|||||||
unused_docs = ['global', 'cli/global']
|
unused_docs = ['global', 'cli/global']
|
||||||
|
|
||||||
locale_dirs = ['locale/']
|
locale_dirs = ['locale/']
|
||||||
title = '{} User Manual'.format(__appname__)
|
title = f'{__appname__} User Manual'
|
||||||
needs_localization = language not in {'en', 'eng'}
|
needs_localization = language not in {'en', 'eng'}
|
||||||
if needs_localization:
|
if needs_localization:
|
||||||
import gettext
|
import gettext
|
||||||
@ -193,7 +193,7 @@ del sort_languages, get_language
|
|||||||
|
|
||||||
epub_author = 'Kovid Goyal'
|
epub_author = 'Kovid Goyal'
|
||||||
epub_publisher = 'Kovid Goyal'
|
epub_publisher = 'Kovid Goyal'
|
||||||
epub_copyright = '© {} Kovid Goyal'.format(date.today().year)
|
epub_copyright = f'© {date.today().year} Kovid Goyal'
|
||||||
epub_description = 'Comprehensive documentation for calibre'
|
epub_description = 'Comprehensive documentation for calibre'
|
||||||
epub_identifier = 'https://manual.calibre-ebook.com'
|
epub_identifier = 'https://manual.calibre-ebook.com'
|
||||||
epub_scheme = 'url'
|
epub_scheme = 'url'
|
||||||
|
@ -195,13 +195,13 @@ details and examples.
|
|||||||
lines = []
|
lines = []
|
||||||
for cmd in COMMANDS:
|
for cmd in COMMANDS:
|
||||||
parser = option_parser_for(cmd)()
|
parser = option_parser_for(cmd)()
|
||||||
lines += ['.. _calibredb-{}-{}:'.format(language, cmd), '']
|
lines += [f'.. _calibredb-{language}-{cmd}:', '']
|
||||||
lines += [cmd, '~'*20, '']
|
lines += [cmd, '~'*20, '']
|
||||||
usage = parser.usage.strip()
|
usage = parser.usage.strip()
|
||||||
usage = [i for i in usage.replace('%prog', 'calibredb').splitlines()]
|
usage = [i for i in usage.replace('%prog', 'calibredb').splitlines()]
|
||||||
cmdline = ' '+usage[0]
|
cmdline = ' '+usage[0]
|
||||||
usage = usage[1:]
|
usage = usage[1:]
|
||||||
usage = [re.sub(r'({})([^a-zA-Z0-9])'.format(cmd), r':command:`\1`\2', i) for i in usage]
|
usage = [re.sub(rf'({cmd})([^a-zA-Z0-9])', r':command:`\1`\2', i) for i in usage]
|
||||||
lines += ['.. code-block:: none', '', cmdline, '']
|
lines += ['.. code-block:: none', '', cmdline, '']
|
||||||
lines += usage
|
lines += usage
|
||||||
groups = [(None, None, parser.option_list)]
|
groups = [(None, None, parser.option_list)]
|
||||||
@ -257,7 +257,7 @@ def generate_ebook_convert_help(preamble, app):
|
|||||||
def update_cli_doc(name, raw, language):
|
def update_cli_doc(name, raw, language):
|
||||||
if isinstance(raw, bytes):
|
if isinstance(raw, bytes):
|
||||||
raw = raw.decode('utf-8')
|
raw = raw.decode('utf-8')
|
||||||
path = 'generated/{}/{}.rst'.format(language, name)
|
path = f'generated/{language}/{name}.rst'
|
||||||
old_raw = open(path, encoding='utf-8').read() if os.path.exists(path) else ''
|
old_raw = open(path, encoding='utf-8').read() if os.path.exists(path) else ''
|
||||||
if not os.path.exists(path) or old_raw != raw:
|
if not os.path.exists(path) or old_raw != raw:
|
||||||
import difflib
|
import difflib
|
||||||
@ -352,7 +352,7 @@ def cli_docs(language):
|
|||||||
usage = [mark_options(i) for i in parser.usage.replace('%prog', cmd).splitlines()]
|
usage = [mark_options(i) for i in parser.usage.replace('%prog', cmd).splitlines()]
|
||||||
cmdline = usage[0]
|
cmdline = usage[0]
|
||||||
usage = usage[1:]
|
usage = usage[1:]
|
||||||
usage = [i.replace(cmd, ':command:`{}`'.format(cmd)) for i in usage]
|
usage = [i.replace(cmd, f':command:`{cmd}`') for i in usage]
|
||||||
usage = '\n'.join(usage)
|
usage = '\n'.join(usage)
|
||||||
preamble = CLI_PREAMBLE.format(cmd=cmd, cmdref=cmd + '-' + language, cmdline=cmdline, usage=usage)
|
preamble = CLI_PREAMBLE.format(cmd=cmd, cmdref=cmd + '-' + language, cmdline=cmdline, usage=usage)
|
||||||
if cmd == 'ebook-convert':
|
if cmd == 'ebook-convert':
|
||||||
@ -382,7 +382,7 @@ def template_docs(language):
|
|||||||
|
|
||||||
def localized_path(app, langcode, pagename):
|
def localized_path(app, langcode, pagename):
|
||||||
href = app.builder.get_target_uri(pagename)
|
href = app.builder.get_target_uri(pagename)
|
||||||
href = re.sub(r'generated/[a-z]+/', 'generated/{}/'.format(langcode), href)
|
href = re.sub(r'generated/[a-z]+/', f'generated/{langcode}/', href)
|
||||||
prefix = '/'
|
prefix = '/'
|
||||||
if langcode != 'en':
|
if langcode != 'en':
|
||||||
prefix += langcode + '/'
|
prefix += langcode + '/'
|
||||||
@ -405,7 +405,7 @@ def setup_man_pages(app):
|
|||||||
documented_cmds = get_cli_docs()[0]
|
documented_cmds = get_cli_docs()[0]
|
||||||
man_pages = []
|
man_pages = []
|
||||||
for cmd, option_parser in documented_cmds:
|
for cmd, option_parser in documented_cmds:
|
||||||
path = 'generated/{}/{}'.format(app.config.language, cmd)
|
path = f'generated/{app.config.language}/{cmd}'
|
||||||
man_pages.append((
|
man_pages.append((
|
||||||
path, cmd, cmd, 'Kovid Goyal', 1
|
path, cmd, cmd, 'Kovid Goyal', 1
|
||||||
))
|
))
|
||||||
|
@ -49,8 +49,8 @@ class EPUBHelpBuilder(EpubBuilder):
|
|||||||
imgname = container.href_to_name(img.get('src'), name)
|
imgname = container.href_to_name(img.get('src'), name)
|
||||||
fmt, width, height = identify(container.raw_data(imgname))
|
fmt, width, height = identify(container.raw_data(imgname))
|
||||||
if width == -1:
|
if width == -1:
|
||||||
raise ValueError('Failed to read size of: {}'.format(imgname))
|
raise ValueError(f'Failed to read size of: {imgname}')
|
||||||
img.set('style', 'width: {}px; height: {}px'.format(width, height))
|
img.set('style', f'width: {width}px; height: {height}px')
|
||||||
|
|
||||||
def fix_opf(self, container):
|
def fix_opf(self, container):
|
||||||
spine_names = {n for n, l in container.spine_names}
|
spine_names = {n for n, l in container.spine_names}
|
||||||
@ -75,7 +75,7 @@ class EPUBHelpBuilder(EpubBuilder):
|
|||||||
|
|
||||||
# Ensure that the cover-image property is set
|
# Ensure that the cover-image property is set
|
||||||
cover_id = rmap['_static/' + self.config.epub_cover[0]]
|
cover_id = rmap['_static/' + self.config.epub_cover[0]]
|
||||||
for item in container.opf_xpath('//opf:item[@id="{}"]'.format(cover_id)):
|
for item in container.opf_xpath(f'//opf:item[@id="{cover_id}"]'):
|
||||||
item.set('properties', 'cover-image')
|
item.set('properties', 'cover-image')
|
||||||
for item in container.opf_xpath('//opf:item[@href="epub-cover.xhtml"]'):
|
for item in container.opf_xpath('//opf:item[@href="epub-cover.xhtml"]'):
|
||||||
item.set('properties', 'svg calibre:title-page')
|
item.set('properties', 'svg calibre:title-page')
|
||||||
|
@ -54,8 +54,8 @@ class checkbox(nodes.Element):
|
|||||||
def visit_checkbox(self, node):
|
def visit_checkbox(self, node):
|
||||||
cid = node['ids'][0]
|
cid = node['ids'][0]
|
||||||
node['classes'] = []
|
node['classes'] = []
|
||||||
self.body.append('<input id="{0}" type="checkbox" />'
|
self.body.append(f'<input id="{cid}" type="checkbox" />'
|
||||||
'<label for="{0}"> </label>'.format(cid))
|
f'<label for="{cid}"> </label>')
|
||||||
|
|
||||||
|
|
||||||
def modify_li(li):
|
def modify_li(li):
|
||||||
@ -66,7 +66,7 @@ def modify_li(li):
|
|||||||
li['classes'].append('leaf-node')
|
li['classes'].append('leaf-node')
|
||||||
else:
|
else:
|
||||||
c = checkbox()
|
c = checkbox()
|
||||||
c['ids'] = ['collapse-checkbox-{}'.format(next(id_counter))]
|
c['ids'] = [f'collapse-checkbox-{next(id_counter)}']
|
||||||
li.insert(0, c)
|
li.insert(0, c)
|
||||||
|
|
||||||
|
|
||||||
|
4
setup.py
4
setup.py
@ -75,7 +75,7 @@ def main(args=sys.argv):
|
|||||||
print('\nWhere command is one of:')
|
print('\nWhere command is one of:')
|
||||||
print()
|
print()
|
||||||
for x in sorted(commands.__all__):
|
for x in sorted(commands.__all__):
|
||||||
print('{:20} -'.format(x), end=' ')
|
print(f'{x:20} -', end=' ')
|
||||||
c = getattr(commands, x)
|
c = getattr(commands, x)
|
||||||
desc = getattr(c, 'short_description', c.description)
|
desc = getattr(c, 'short_description', c.description)
|
||||||
print(desc)
|
print(desc)
|
||||||
@ -95,7 +95,7 @@ def main(args=sys.argv):
|
|||||||
parser = option_parser()
|
parser = option_parser()
|
||||||
command.add_all_options(parser)
|
command.add_all_options(parser)
|
||||||
parser.set_usage(
|
parser.set_usage(
|
||||||
'Usage: python setup.py {} [options]\n\n'.format(args[1]) + command.description)
|
f'Usage: python setup.py {args[1]} [options]\n\n' + command.description)
|
||||||
|
|
||||||
opts, args = parser.parse_args(args)
|
opts, args = parser.parse_args(args)
|
||||||
opts.cli_args = args[2:]
|
opts.cli_args = args[2:]
|
||||||
|
@ -83,7 +83,7 @@ def lazy_load(name):
|
|||||||
try:
|
try:
|
||||||
return getattr(build_environment, name)
|
return getattr(build_environment, name)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
raise ImportError('The setup.build_environment module has no symbol named: {}'.format(name))
|
raise ImportError(f'The setup.build_environment module has no symbol named: {name}')
|
||||||
|
|
||||||
|
|
||||||
def expand_file_list(items, is_paths=True, cross_compile_for='native'):
|
def expand_file_list(items, is_paths=True, cross_compile_for='native'):
|
||||||
@ -539,7 +539,7 @@ class Build(Command):
|
|||||||
extern_decl = 'extern "C"' if ext.needs_cxx else ''
|
extern_decl = 'extern "C"' if ext.needs_cxx else ''
|
||||||
cflags = [
|
cflags = [
|
||||||
'-DCALIBRE_MODINIT_FUNC='
|
'-DCALIBRE_MODINIT_FUNC='
|
||||||
'{} __attribute__ ((visibility ("default"))) {}'.format(extern_decl, return_type)]
|
f'{extern_decl} __attribute__ ((visibility ("default"))) {return_type}']
|
||||||
if ext.needs_cxx and ext.needs_cxx_std:
|
if ext.needs_cxx and ext.needs_cxx_std:
|
||||||
if env.cc_output_flag.startswith('/') and ext.needs_cxx == '11':
|
if env.cc_output_flag.startswith('/') and ext.needs_cxx == '11':
|
||||||
ext.needs_cxx = '14'
|
ext.needs_cxx = '14'
|
||||||
@ -617,8 +617,8 @@ class Build(Command):
|
|||||||
try:
|
try:
|
||||||
subprocess.check_call(*args, **kwargs)
|
subprocess.check_call(*args, **kwargs)
|
||||||
except:
|
except:
|
||||||
cmdline = ' '.join(['"{}"'.format(arg) if ' ' in arg else arg for arg in args[0]])
|
cmdline = ' '.join([f'"{arg}"' if ' ' in arg else arg for arg in args[0]])
|
||||||
print('Error while executing: {}\n'.format(cmdline))
|
print(f'Error while executing: {cmdline}\n')
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def build_headless(self):
|
def build_headless(self):
|
||||||
|
@ -113,7 +113,7 @@ qraw = subprocess.check_output([QMAKE, '-query']).decode('utf-8')
|
|||||||
|
|
||||||
|
|
||||||
def readvar(name):
|
def readvar(name):
|
||||||
return re.search('^{}:(.+)$'.format(name), qraw, flags=re.M).group(1).strip()
|
return re.search(f'^{name}:(.+)$', qraw, flags=re.M).group(1).strip()
|
||||||
|
|
||||||
|
|
||||||
qt = {x:readvar(y) for x, y in {'libs':'QT_INSTALL_LIBS', 'plugins':'QT_INSTALL_PLUGINS'}.items()}
|
qt = {x:readvar(y) for x, y in {'libs':'QT_INSTALL_LIBS', 'plugins':'QT_INSTALL_PLUGINS'}.items()}
|
||||||
|
@ -56,11 +56,11 @@ class Bug:
|
|||||||
if int(bug) > 100000 and action != 'See':
|
if int(bug) > 100000 and action != 'See':
|
||||||
self.close_bug(bug, action)
|
self.close_bug(bug, action)
|
||||||
return match.group() + f' [{summary}]({LAUNCHPAD_BUG.format(bug)})'
|
return match.group() + f' [{summary}]({LAUNCHPAD_BUG.format(bug)})'
|
||||||
return match.group() + ' ({})'.format(summary)
|
return match.group() + f' ({summary})'
|
||||||
return match.group()
|
return match.group()
|
||||||
|
|
||||||
def close_bug(self, bug, action):
|
def close_bug(self, bug, action):
|
||||||
print('Closing bug #{}'.format(bug))
|
print(f'Closing bug #{bug}')
|
||||||
suffix = (
|
suffix = (
|
||||||
'The fix will be in the next release. '
|
'The fix will be in the next release. '
|
||||||
'calibre is usually released every alternate Friday.'
|
'calibre is usually released every alternate Friday.'
|
||||||
|
@ -24,7 +24,7 @@ class GitVersion(Command):
|
|||||||
nv = nv.replace('-', '.')
|
nv = nv.replace('-', '.')
|
||||||
except subprocess.CalledProcessError:
|
except subprocess.CalledProcessError:
|
||||||
raise SystemExit('Error: not a git checkout')
|
raise SystemExit('Error: not a git checkout')
|
||||||
newsrc = re.sub(r'(git_version = ).*', r'\1{}'.format(repr(nv)), src)
|
newsrc = re.sub(r'(git_version = ).*', rf'\1{nv!r}', src)
|
||||||
self.info('new version is:', nv)
|
self.info('new version is:', nv)
|
||||||
|
|
||||||
with open(constants_file, 'wb') as f:
|
with open(constants_file, 'wb') as f:
|
||||||
|
@ -61,7 +61,7 @@ class GUI(Command):
|
|||||||
if self.newer(self.QRC, sources):
|
if self.newer(self.QRC, sources):
|
||||||
self.info('Creating images.qrc')
|
self.info('Creating images.qrc')
|
||||||
for s in sources:
|
for s in sources:
|
||||||
files.append('<file>{}</file>'.format(s))
|
files.append(f'<file>{s}</file>')
|
||||||
manifest = '<RCC>\n<qresource prefix="/">\n{}\n</qresource>\n</RCC>'.format('\n'.join(sorted(files)))
|
manifest = '<RCC>\n<qresource prefix="/">\n{}\n</qresource>\n</RCC>'.format('\n'.join(sorted(files)))
|
||||||
if not isinstance(manifest, bytes):
|
if not isinstance(manifest, bytes):
|
||||||
manifest = manifest.encode('utf-8')
|
manifest = manifest.encode('utf-8')
|
||||||
|
@ -153,20 +153,16 @@ class GitHub(Base): # {{{
|
|||||||
existing_assets = self.existing_assets(release['id'])
|
existing_assets = self.existing_assets(release['id'])
|
||||||
for path, desc in self.files.items():
|
for path, desc in self.files.items():
|
||||||
self.info('')
|
self.info('')
|
||||||
url = self.API + 'repos/{}/{}/releases/assets/{{}}'.format(
|
url = self.API + f'repos/{self.username}/{self.reponame}/releases/assets/{{}}'
|
||||||
self.username, self.reponame
|
|
||||||
)
|
|
||||||
fname = os.path.basename(path)
|
fname = os.path.basename(path)
|
||||||
if fname in existing_assets:
|
if fname in existing_assets:
|
||||||
self.info(
|
self.info(f'Deleting {fname} from GitHub with id: {existing_assets[fname]}')
|
||||||
'Deleting {} from GitHub with id: {}'.format(fname, existing_assets[fname])
|
|
||||||
)
|
|
||||||
r = self.requests.delete(url.format(existing_assets[fname]))
|
r = self.requests.delete(url.format(existing_assets[fname]))
|
||||||
if r.status_code != 204:
|
if r.status_code != 204:
|
||||||
self.fail(r, 'Failed to delete {} from GitHub'.format(fname))
|
self.fail(r, f'Failed to delete {fname} from GitHub')
|
||||||
r = self.do_upload(upload_url, path, desc, fname)
|
r = self.do_upload(upload_url, path, desc, fname)
|
||||||
if r.status_code != 201:
|
if r.status_code != 201:
|
||||||
self.fail(r, 'Failed to upload file: {}'.format(fname))
|
self.fail(r, f'Failed to upload file: {fname}')
|
||||||
try:
|
try:
|
||||||
r = self.requests.patch(
|
r = self.requests.patch(
|
||||||
url.format(r.json()['id']),
|
url.format(r.json()['id']),
|
||||||
@ -185,7 +181,7 @@ class GitHub(Base): # {{{
|
|||||||
})
|
})
|
||||||
)
|
)
|
||||||
if r.status_code != 200:
|
if r.status_code != 200:
|
||||||
self.fail(r, 'Failed to set label for {}'.format(fname))
|
self.fail(r, f'Failed to set label for {fname}')
|
||||||
|
|
||||||
def clean_older_releases(self, releases):
|
def clean_older_releases(self, releases):
|
||||||
for release in releases:
|
for release in releases:
|
||||||
@ -218,7 +214,7 @@ class GitHub(Base): # {{{
|
|||||||
)
|
)
|
||||||
|
|
||||||
def fail(self, r, msg):
|
def fail(self, r, msg):
|
||||||
print(msg, ' Status Code: {}'.format(r.status_code), file=sys.stderr)
|
print(msg, f' Status Code: {r.status_code}', file=sys.stderr)
|
||||||
print('JSON from response:', file=sys.stderr)
|
print('JSON from response:', file=sys.stderr)
|
||||||
pprint(dict(r.json()), stream=sys.stderr)
|
pprint(dict(r.json()), stream=sys.stderr)
|
||||||
raise SystemExit(1)
|
raise SystemExit(1)
|
||||||
@ -228,9 +224,7 @@ class GitHub(Base): # {{{
|
|||||||
return error_code == 'already_exists'
|
return error_code == 'already_exists'
|
||||||
|
|
||||||
def existing_assets(self, release_id):
|
def existing_assets(self, release_id):
|
||||||
url = self.API + 'repos/{}/{}/releases/{}/assets'.format(
|
url = self.API + f'repos/{self.username}/{self.reponame}/releases/{release_id}/assets'
|
||||||
self.username, self.reponame, release_id
|
|
||||||
)
|
|
||||||
r = self.requests.get(url)
|
r = self.requests.get(url)
|
||||||
if r.status_code != 200:
|
if r.status_code != 200:
|
||||||
self.fail('Failed to get assets for release')
|
self.fail('Failed to get assets for release')
|
||||||
@ -255,14 +249,14 @@ class GitHub(Base): # {{{
|
|||||||
data=json.dumps({
|
data=json.dumps({
|
||||||
'tag_name': self.current_tag_name,
|
'tag_name': self.current_tag_name,
|
||||||
'target_commitish': 'master',
|
'target_commitish': 'master',
|
||||||
'name': 'version {}'.format(self.version),
|
'name': f'version {self.version}',
|
||||||
'body': 'Release version {}'.format(self.version),
|
'body': f'Release version {self.version}',
|
||||||
'draft': False,
|
'draft': False,
|
||||||
'prerelease': False
|
'prerelease': False
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
if r.status_code != 201:
|
if r.status_code != 201:
|
||||||
self.fail(r, 'Failed to create release for version: {}'.format(self.version))
|
self.fail(r, f'Failed to create release for version: {self.version}')
|
||||||
return r.json()
|
return r.json()
|
||||||
|
|
||||||
|
|
||||||
@ -320,12 +314,12 @@ def generate_index(): # {{{
|
|||||||
]
|
]
|
||||||
body = '<ul class="release-list">{}</ul>'.format(' '.join(body))
|
body = '<ul class="release-list">{}</ul>'.format(' '.join(body))
|
||||||
index = template.format(
|
index = template.format(
|
||||||
title='Previous calibre releases ({}.x)'.format(sname),
|
title=f'Previous calibre releases ({sname}.x)',
|
||||||
style=style,
|
style=style,
|
||||||
msg='Choose a calibre release',
|
msg='Choose a calibre release',
|
||||||
body=body
|
body=body
|
||||||
)
|
)
|
||||||
with open('{}.html'.format(sname), 'wb') as f:
|
with open(f'{sname}.html', 'wb') as f:
|
||||||
f.write(index.encode('utf-8'))
|
f.write(index.encode('utf-8'))
|
||||||
|
|
||||||
for r in releases:
|
for r in releases:
|
||||||
@ -385,7 +379,7 @@ def generate_index(): # {{{
|
|||||||
|
|
||||||
body = '<dl>{}</dl>'.format(''.join(body))
|
body = '<dl>{}</dl>'.format(''.join(body))
|
||||||
index = template.format(
|
index = template.format(
|
||||||
title='calibre release ({})'.format(rname),
|
title=f'calibre release ({rname})',
|
||||||
style=style,
|
style=style,
|
||||||
msg='',
|
msg='',
|
||||||
body=body
|
body=body
|
||||||
|
@ -84,7 +84,7 @@ class Hyphenation(ReVendor):
|
|||||||
NAME = 'hyphenation'
|
NAME = 'hyphenation'
|
||||||
TAR_NAME = 'hyphenation dictionaries'
|
TAR_NAME = 'hyphenation dictionaries'
|
||||||
VERSION = 'master'
|
VERSION = 'master'
|
||||||
DOWNLOAD_URL = 'https://github.com/LibreOffice/dictionaries/archive/{}.tar.gz'.format(VERSION)
|
DOWNLOAD_URL = f'https://github.com/LibreOffice/dictionaries/archive/{VERSION}.tar.gz'
|
||||||
CAN_USE_SYSTEM_VERSION = False
|
CAN_USE_SYSTEM_VERSION = False
|
||||||
|
|
||||||
def run(self, opts):
|
def run(self, opts):
|
||||||
|
@ -288,14 +288,14 @@ class Install(Develop):
|
|||||||
class Sdist(Command):
|
class Sdist(Command):
|
||||||
|
|
||||||
description = 'Create a source distribution'
|
description = 'Create a source distribution'
|
||||||
DEST = os.path.join('dist', '{}-{}.tar.xz'.format(__appname__, __version__))
|
DEST = os.path.join('dist', f'{__appname__}-{__version__}.tar.xz')
|
||||||
|
|
||||||
def run(self, opts):
|
def run(self, opts):
|
||||||
if not self.e(self.d(self.DEST)):
|
if not self.e(self.d(self.DEST)):
|
||||||
os.makedirs(self.d(self.DEST))
|
os.makedirs(self.d(self.DEST))
|
||||||
tdir = tempfile.mkdtemp()
|
tdir = tempfile.mkdtemp()
|
||||||
atexit.register(shutil.rmtree, tdir)
|
atexit.register(shutil.rmtree, tdir)
|
||||||
tdir = self.j(tdir, 'calibre-{}'.format(__version__))
|
tdir = self.j(tdir, f'calibre-{__version__}')
|
||||||
self.info('\tRunning git export...')
|
self.info('\tRunning git export...')
|
||||||
os.mkdir(tdir)
|
os.mkdir(tdir)
|
||||||
subprocess.check_call('git archive HEAD | tar -x -C ' + tdir, shell=True)
|
subprocess.check_call('git archive HEAD | tar -x -C ' + tdir, shell=True)
|
||||||
@ -336,7 +336,7 @@ class Sdist(Command):
|
|||||||
self.info('\tCreating tarfile...')
|
self.info('\tCreating tarfile...')
|
||||||
dest = self.DEST.rpartition('.')[0]
|
dest = self.DEST.rpartition('.')[0]
|
||||||
shutil.rmtree(os.path.join(tdir, '.github'))
|
shutil.rmtree(os.path.join(tdir, '.github'))
|
||||||
subprocess.check_call(['tar', '--mtime=now', '-cf', self.a(dest), 'calibre-{}'.format(__version__)], cwd=self.d(tdir))
|
subprocess.check_call(['tar', '--mtime=now', '-cf', self.a(dest), f'calibre-{__version__}'], cwd=self.d(tdir))
|
||||||
self.info('\tCompressing tarfile...')
|
self.info('\tCompressing tarfile...')
|
||||||
if os.path.exists(self.a(self.DEST)):
|
if os.path.exists(self.a(self.DEST)):
|
||||||
os.remove(self.a(self.DEST))
|
os.remove(self.a(self.DEST))
|
||||||
@ -396,4 +396,4 @@ class Bootstrap(Command):
|
|||||||
subprocess.check_call(clone_cmd, cwd=self.d(self.SRC))
|
subprocess.check_call(clone_cmd, cwd=self.d(self.SRC))
|
||||||
|
|
||||||
def run(self, opts):
|
def run(self, opts):
|
||||||
self.info('\n\nAll done! You should now be able to run "{} setup.py install" to install calibre'.format(sys.executable))
|
self.info(f'\n\nAll done! You should now be able to run "{sys.executable} setup.py install" to install calibre')
|
||||||
|
@ -20,7 +20,7 @@ def generate_data():
|
|||||||
ans = []
|
ans = []
|
||||||
for x, limit in (('day', 8), ('mon', 13)):
|
for x, limit in (('day', 8), ('mon', 13)):
|
||||||
for attr in ('ab' + x, x):
|
for attr in ('ab' + x, x):
|
||||||
ans.append((attr, tuple(map(nl, (getattr(locale, '{}_{}'.format(attr.upper(), i)) for i in range(1, limit)))))),
|
ans.append((attr, tuple(map(nl, (getattr(locale, f'{attr.upper()}_{i}') for i in range(1, limit)))))),
|
||||||
for x in ('d_t_fmt', 'd_fmt', 't_fmt', 't_fmt_ampm', 'radixchar', 'thousep', 'yesexpr', 'noexpr'):
|
for x in ('d_t_fmt', 'd_fmt', 't_fmt', 't_fmt_ampm', 'radixchar', 'thousep', 'yesexpr', 'noexpr'):
|
||||||
ans.append((x, nl(getattr(locale, x.upper()))))
|
ans.append((x, nl(getattr(locale, x.upper()))))
|
||||||
return ans
|
return ans
|
||||||
|
@ -18,7 +18,7 @@ class MathJax(ReVendor):
|
|||||||
NAME = 'mathjax'
|
NAME = 'mathjax'
|
||||||
TAR_NAME = 'MathJax'
|
TAR_NAME = 'MathJax'
|
||||||
VERSION = '3.1.4'
|
VERSION = '3.1.4'
|
||||||
DOWNLOAD_URL = 'https://github.com/mathjax/MathJax/archive/{}.tar.gz'.format(VERSION)
|
DOWNLOAD_URL = f'https://github.com/mathjax/MathJax/archive/{VERSION}.tar.gz'
|
||||||
|
|
||||||
def add_file_pre(self, name, raw):
|
def add_file_pre(self, name, raw):
|
||||||
self.h.update(raw)
|
self.h.update(raw)
|
||||||
|
@ -222,7 +222,7 @@ def get_import_data(name, mod, zf, names):
|
|||||||
return module
|
return module
|
||||||
raise ValueError(f'Failed to find name: {name!r} in module: {mod!r}')
|
raise ValueError(f'Failed to find name: {name!r} in module: {mod!r}')
|
||||||
else:
|
else:
|
||||||
raise ValueError('Failed to find module: {!r}'.format(mod))
|
raise ValueError(f'Failed to find module: {mod!r}')
|
||||||
|
|
||||||
|
|
||||||
def parse_metadata(raw, namelist, zf):
|
def parse_metadata(raw, namelist, zf):
|
||||||
@ -372,7 +372,7 @@ def fetch_plugin(old_index, entry):
|
|||||||
raw = read(entry.url).decode('utf-8', 'replace')
|
raw = read(entry.url).decode('utf-8', 'replace')
|
||||||
url, name = parse_plugin_zip_url(raw)
|
url, name = parse_plugin_zip_url(raw)
|
||||||
if url is None:
|
if url is None:
|
||||||
raise ValueError('Failed to find zip file URL for entry: {}'.format(repr(entry)))
|
raise ValueError(f'Failed to find zip file URL for entry: {entry!r}')
|
||||||
plugin = lm_map.get(entry.thread_id, None)
|
plugin = lm_map.get(entry.thread_id, None)
|
||||||
|
|
||||||
if plugin is not None:
|
if plugin is not None:
|
||||||
@ -392,7 +392,7 @@ def fetch_plugin(old_index, entry):
|
|||||||
slm = datetime(*parsedate(info.get('Last-Modified'))[:6])
|
slm = datetime(*parsedate(info.get('Last-Modified'))[:6])
|
||||||
plugin = get_plugin_info(raw)
|
plugin = get_plugin_info(raw)
|
||||||
plugin['last_modified'] = slm.isoformat()
|
plugin['last_modified'] = slm.isoformat()
|
||||||
plugin['file'] = 'staging_{}.zip'.format(entry.thread_id)
|
plugin['file'] = f'staging_{entry.thread_id}.zip'
|
||||||
plugin['size'] = len(raw)
|
plugin['size'] = len(raw)
|
||||||
plugin['original_url'] = url
|
plugin['original_url'] = url
|
||||||
update_plugin_from_entry(plugin, entry)
|
update_plugin_from_entry(plugin, entry)
|
||||||
@ -474,14 +474,14 @@ def plugin_to_index(plugin, count):
|
|||||||
for li in details:
|
for li in details:
|
||||||
if li.startswith('calibre:'):
|
if li.startswith('calibre:'):
|
||||||
block.append('<br>')
|
block.append('<br>')
|
||||||
block.append('<li>{}</li>'.format(li))
|
block.append(f'<li>{li}</li>')
|
||||||
block = '<ul>{}</ul>'.format('\n'.join(block))
|
block = '<ul>{}</ul>'.format('\n'.join(block))
|
||||||
downloads = ('\xa0<span class="download-count">[{} total downloads]</span>'.format(count)) if count else ''
|
downloads = (f'\xa0<span class="download-count">[{count} total downloads]</span>') if count else ''
|
||||||
zipfile = '<div class="end"><a href={} title="Download plugin" download={}>Download plugin \u2193</a>{}</div>'.format(
|
zipfile = '<div class="end"><a href={} title="Download plugin" download={}>Download plugin \u2193</a>{}</div>'.format(
|
||||||
quoteattr(plugin['file']), quoteattr(plugin['name'] + '.zip'), downloads)
|
quoteattr(plugin['file']), quoteattr(plugin['name'] + '.zip'), downloads)
|
||||||
desc = plugin['description'] or ''
|
desc = plugin['description'] or ''
|
||||||
if desc:
|
if desc:
|
||||||
desc = '<p>{}</p>'.format(desc)
|
desc = f'<p>{desc}</p>'
|
||||||
return f'{title}\n{desc}\n{block}\n{zipfile}\n\n'
|
return f'{title}\n{desc}\n{block}\n{zipfile}\n\n'
|
||||||
|
|
||||||
|
|
||||||
|
@ -67,9 +67,7 @@ class Stage2(Command):
|
|||||||
for installer in installer_names(include_source=False):
|
for installer in installer_names(include_source=False):
|
||||||
installer = self.j(self.d(self.SRC), installer)
|
installer = self.j(self.d(self.SRC), installer)
|
||||||
if not os.path.exists(installer) or os.path.getsize(installer) < 10000:
|
if not os.path.exists(installer) or os.path.getsize(installer) < 10000:
|
||||||
raise SystemExit(
|
raise SystemExit(f'The installer {os.path.basename(installer)} does not exist')
|
||||||
'The installer {} does not exist'.format(os.path.basename(installer))
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Stage3(Command):
|
class Stage3(Command):
|
||||||
@ -129,7 +127,7 @@ class PublishBetas(Command):
|
|||||||
def run(self, opts):
|
def run(self, opts):
|
||||||
dist = self.a(self.j(self.d(self.SRC), 'dist'))
|
dist = self.a(self.j(self.d(self.SRC), 'dist'))
|
||||||
subprocess.check_call((
|
subprocess.check_call((
|
||||||
'rsync --partial -rh --info=progress2 --delete-after {}/ download.calibre-ebook.com:/srv/download/betas/'.format(dist)
|
f'rsync --partial -rh --info=progress2 --delete-after {dist}/ download.calibre-ebook.com:/srv/download/betas/'
|
||||||
).split())
|
).split())
|
||||||
|
|
||||||
|
|
||||||
@ -202,8 +200,8 @@ class Manual(Command):
|
|||||||
jobs.append(create_job([
|
jobs.append(create_job([
|
||||||
sys.executable, self.j(self.d(self.SRC), 'manual', 'build.py'),
|
sys.executable, self.j(self.d(self.SRC), 'manual', 'build.py'),
|
||||||
language, self.j(tdir, language)
|
language, self.j(tdir, language)
|
||||||
], '\n\n**************** Building translations for: {}'.format(language)))
|
], f'\n\n**************** Building translations for: {language}'))
|
||||||
self.info('Building manual for {} languages'.format(len(jobs)))
|
self.info(f'Building manual for {len(jobs)} languages')
|
||||||
subprocess.check_call(jobs[0].cmd)
|
subprocess.check_call(jobs[0].cmd)
|
||||||
if not parallel_build(jobs[1:], self.info):
|
if not parallel_build(jobs[1:], self.info):
|
||||||
raise SystemExit(1)
|
raise SystemExit(1)
|
||||||
@ -298,7 +296,7 @@ class ManPages(Command):
|
|||||||
for l in languages:
|
for l in languages:
|
||||||
jobs.append(create_job(
|
jobs.append(create_job(
|
||||||
[sys.executable, self.j(base, 'build.py'), '--man-pages', l, dest],
|
[sys.executable, self.j(base, 'build.py'), '--man-pages', l, dest],
|
||||||
'\n\n**************** Building translations for: {}'.format(l))
|
f'\n\n**************** Building translations for: {l}')
|
||||||
)
|
)
|
||||||
self.info(f'\tCreating man pages in {dest} for {len(jobs)} languages...')
|
self.info(f'\tCreating man pages in {dest} for {len(jobs)} languages...')
|
||||||
subprocess.check_call(jobs[0].cmd)
|
subprocess.check_call(jobs[0].cmd)
|
||||||
|
@ -16,12 +16,12 @@ class ReVendor(Command):
|
|||||||
CAN_USE_SYSTEM_VERSION = True
|
CAN_USE_SYSTEM_VERSION = True
|
||||||
|
|
||||||
def add_options(self, parser):
|
def add_options(self, parser):
|
||||||
parser.add_option('--path-to-{}'.format(self.NAME), help='Path to the extracted {} source'.format(self.TAR_NAME))
|
parser.add_option(f'--path-to-{self.NAME}', help=f'Path to the extracted {self.TAR_NAME} source')
|
||||||
parser.add_option('--{}-url'.format(self.NAME), default=self.DOWNLOAD_URL,
|
parser.add_option(f'--{self.NAME}-url', default=self.DOWNLOAD_URL,
|
||||||
help='URL to {} source archive in tar.gz format'.format(self.TAR_NAME))
|
help=f'URL to {self.TAR_NAME} source archive in tar.gz format')
|
||||||
if self.CAN_USE_SYSTEM_VERSION:
|
if self.CAN_USE_SYSTEM_VERSION:
|
||||||
parser.add_option('--system-{}'.format(self.NAME), default=False, action='store_true',
|
parser.add_option(f'--system-{self.NAME}', default=False, action='store_true',
|
||||||
help='Treat {} as system copy and symlink instead of copy'.format(self.TAR_NAME))
|
help=f'Treat {self.TAR_NAME} as system copy and symlink instead of copy')
|
||||||
|
|
||||||
def download_securely(self, url: str) -> bytes:
|
def download_securely(self, url: str) -> bytes:
|
||||||
num = 5 if is_ci else 1
|
num = 5 if is_ci else 1
|
||||||
@ -35,7 +35,7 @@ class ReVendor(Command):
|
|||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
|
|
||||||
def download_vendor_release(self, tdir, url):
|
def download_vendor_release(self, tdir, url):
|
||||||
self.info('Downloading {}:'.format(self.TAR_NAME), url)
|
self.info(f'Downloading {self.TAR_NAME}:', url)
|
||||||
raw = self.download_securely(url)
|
raw = self.download_securely(url)
|
||||||
with tarfile.open(fileobj=BytesIO(raw)) as tf:
|
with tarfile.open(fileobj=BytesIO(raw)) as tf:
|
||||||
tf.extractall(tdir)
|
tf.extractall(tdir)
|
||||||
|
@ -93,11 +93,11 @@ class POT(Command): # {{{
|
|||||||
|
|
||||||
ans = []
|
ans = []
|
||||||
for lineno, msg in msgs:
|
for lineno, msg in msgs:
|
||||||
ans.append('#: {}:{}'.format(path, lineno))
|
ans.append(f'#: {path}:{lineno}')
|
||||||
slash = codepoint_to_chr(92)
|
slash = codepoint_to_chr(92)
|
||||||
msg = msg.replace(slash, slash*2).replace('"', r'\"').replace('\n',
|
msg = msg.replace(slash, slash*2).replace('"', r'\"').replace('\n',
|
||||||
r'\n').replace('\r', r'\r').replace('\t', r'\t')
|
r'\n').replace('\r', r'\r').replace('\t', r'\t')
|
||||||
ans.append('msgid "{}"'.format(msg))
|
ans.append(f'msgid "{msg}"')
|
||||||
ans.append('msgstr ""')
|
ans.append('msgstr ""')
|
||||||
ans.append('')
|
ans.append('')
|
||||||
|
|
||||||
@ -135,8 +135,8 @@ class POT(Command): # {{{
|
|||||||
lines = f.read().decode('utf-8').splitlines()
|
lines = f.read().decode('utf-8').splitlines()
|
||||||
for i in range(len(lines)):
|
for i in range(len(lines)):
|
||||||
line = lines[i].strip()
|
line = lines[i].strip()
|
||||||
if line == '[calibre.{}]'.format(slug):
|
if line == f'[calibre.{slug}]':
|
||||||
lines.insert(i+1, 'file_filter = manual/<lang>/{}.po'.format(bname))
|
lines.insert(i+1, f'file_filter = manual/<lang>/{bname}.po')
|
||||||
f.seek(0), f.truncate(), f.write('\n'.join(lines).encode('utf-8'))
|
f.seek(0), f.truncate(), f.write('\n'.join(lines).encode('utf-8'))
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
@ -397,12 +397,12 @@ class Translations(POT): # {{{
|
|||||||
iso_data.extract_po_files('iso_639-3', tdir)
|
iso_data.extract_po_files('iso_639-3', tdir)
|
||||||
for f, (locale, dest) in iteritems(fmap):
|
for f, (locale, dest) in iteritems(fmap):
|
||||||
iscpo = {'zh_HK':'zh_CN'}.get(locale, locale)
|
iscpo = {'zh_HK':'zh_CN'}.get(locale, locale)
|
||||||
iso639 = self.j(tdir, '{}.po'.format(iscpo))
|
iso639 = self.j(tdir, f'{iscpo}.po')
|
||||||
if os.path.exists(iso639):
|
if os.path.exists(iso639):
|
||||||
files.append((iso639, self.j(self.d(dest), 'iso639.mo')))
|
files.append((iso639, self.j(self.d(dest), 'iso639.mo')))
|
||||||
else:
|
else:
|
||||||
iscpo = iscpo.partition('_')[0]
|
iscpo = iscpo.partition('_')[0]
|
||||||
iso639 = self.j(tdir, '{}.po'.format(iscpo))
|
iso639 = self.j(tdir, f'{iscpo}.po')
|
||||||
if os.path.exists(iso639):
|
if os.path.exists(iso639):
|
||||||
files.append((iso639, self.j(self.d(dest), 'iso639.mo')))
|
files.append((iso639, self.j(self.d(dest), 'iso639.mo')))
|
||||||
elif locale not in skip_iso:
|
elif locale not in skip_iso:
|
||||||
|
@ -72,9 +72,9 @@ def upload_signatures():
|
|||||||
f.write(fingerprint)
|
f.write(fingerprint)
|
||||||
scp.append(sha512)
|
scp.append(sha512)
|
||||||
for srv in 'code main'.split():
|
for srv in 'code main'.split():
|
||||||
check_call(scp + ['{0}:/srv/{0}/signatures/'.format(srv)])
|
check_call(scp + [f'{srv}:/srv/{srv}/signatures/'])
|
||||||
check_call(
|
check_call(
|
||||||
['ssh', srv, 'chown', '-R', 'http:http', '/srv/{}/signatures'.format(srv)]
|
['ssh', srv, 'chown', '-R', 'http:http', f'/srv/{srv}/signatures']
|
||||||
)
|
)
|
||||||
finally:
|
finally:
|
||||||
shutil.rmtree(tdir)
|
shutil.rmtree(tdir)
|
||||||
@ -199,9 +199,7 @@ def upload_to_fosshub():
|
|||||||
entries = []
|
entries = []
|
||||||
for fname in files:
|
for fname in files:
|
||||||
desc = installer_description(fname)
|
desc = installer_description(fname)
|
||||||
url = 'https://download.calibre-ebook.com/{}/{}'.format(
|
url = f'https://download.calibre-ebook.com/{__version__}/{os.path.basename(fname)}'
|
||||||
__version__, os.path.basename(fname)
|
|
||||||
)
|
|
||||||
entries.append({
|
entries.append({
|
||||||
'fileUrl': url,
|
'fileUrl': url,
|
||||||
'type': desc,
|
'type': desc,
|
||||||
@ -378,7 +376,7 @@ class UploadDemo(Command): # {{{
|
|||||||
|
|
||||||
lrf = self.j(self.SRC, 'calibre', 'ebooks', 'lrf', 'html', 'demo')
|
lrf = self.j(self.SRC, 'calibre', 'ebooks', 'lrf', 'html', 'demo')
|
||||||
check_call(
|
check_call(
|
||||||
'cd {} && zip -j /tmp/html-demo.zip * /tmp/html2lrf.lrf'.format(lrf),
|
f'cd {lrf} && zip -j /tmp/html-demo.zip * /tmp/html2lrf.lrf',
|
||||||
shell=True
|
shell=True
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -409,7 +407,7 @@ class UploadToServer(Command): # {{{
|
|||||||
('ssh code /apps/update-calibre-version.py ' + __version__).split()
|
('ssh code /apps/update-calibre-version.py ' + __version__).split()
|
||||||
)
|
)
|
||||||
check_call((
|
check_call((
|
||||||
'ssh main /usr/local/bin/update-calibre-version.py {} && /usr/local/bin/update-calibre-code.py && /apps/static/generate.py'.format(__version__)
|
f'ssh main /usr/local/bin/update-calibre-version.py {__version__} && /usr/local/bin/update-calibre-code.py && /apps/static/generate.py'
|
||||||
).split())
|
).split())
|
||||||
|
|
||||||
|
|
||||||
|
@ -49,9 +49,7 @@ def sanitize_path():
|
|||||||
needed_paths.append(p)
|
needed_paths.append(p)
|
||||||
executables.remove(x)
|
executables.remove(x)
|
||||||
sw = os.environ['SW']
|
sw = os.environ['SW']
|
||||||
paths = r'{0}\private\python\bin {0}\private\python\Lib\site-packages\pywin32_system32 {0}\bin {0}\qt\bin C:\Windows\System32'.format(
|
paths = rf'{sw}\private\python\bin {sw}\private\python\Lib\site-packages\pywin32_system32 {sw}\bin {sw}\qt\bin C:\Windows\System32'.split() + needed_paths
|
||||||
sw
|
|
||||||
).split() + needed_paths
|
|
||||||
os.environ['PATH'] = os.pathsep.join(paths)
|
os.environ['PATH'] = os.pathsep.join(paths)
|
||||||
print('PATH:', os.environ['PATH'])
|
print('PATH:', os.environ['PATH'])
|
||||||
|
|
||||||
|
@ -50,7 +50,7 @@ pattern_vector3D = re.compile(r'\([ ]*-?([0-9]+(\.[0-9]*)?|\.[0-9]+)([ ]+-?([0-9
|
|||||||
|
|
||||||
def make_NCName(arg):
|
def make_NCName(arg):
|
||||||
for c in (':',' '):
|
for c in (':',' '):
|
||||||
arg = arg.replace(c,'_{:x}_'.format(ord(c)))
|
arg = arg.replace(c,f'_{ord(c):x}_')
|
||||||
return arg
|
return arg
|
||||||
|
|
||||||
|
|
||||||
@ -78,13 +78,13 @@ def cnv_color(attribute, arg, element):
|
|||||||
def cnv_configtype(attribute, arg, element):
|
def cnv_configtype(attribute, arg, element):
|
||||||
if unicode_type(arg) not in ('boolean', 'short', 'int', 'long',
|
if unicode_type(arg) not in ('boolean', 'short', 'int', 'long',
|
||||||
'double', 'string', 'datetime', 'base64Binary'):
|
'double', 'string', 'datetime', 'base64Binary'):
|
||||||
raise ValueError("'{}' not allowed".format(unicode_type(arg)))
|
raise ValueError(f"'{unicode_type(arg)}' not allowed")
|
||||||
return unicode_type(arg)
|
return unicode_type(arg)
|
||||||
|
|
||||||
|
|
||||||
def cnv_data_source_has_labels(attribute, arg, element):
|
def cnv_data_source_has_labels(attribute, arg, element):
|
||||||
if unicode_type(arg) not in ('none','row','column','both'):
|
if unicode_type(arg) not in ('none','row','column','both'):
|
||||||
raise ValueError("'{}' not allowed".format(unicode_type(arg)))
|
raise ValueError(f"'{unicode_type(arg)}' not allowed")
|
||||||
return unicode_type(arg)
|
return unicode_type(arg)
|
||||||
|
|
||||||
# Understand different date formats
|
# Understand different date formats
|
||||||
@ -116,7 +116,7 @@ def cnv_family(attribute, arg, element):
|
|||||||
''' A style family '''
|
''' A style family '''
|
||||||
if unicode_type(arg) not in ('text', 'paragraph', 'section', 'ruby', 'table', 'table-column', 'table-row', 'table-cell',
|
if unicode_type(arg) not in ('text', 'paragraph', 'section', 'ruby', 'table', 'table-column', 'table-row', 'table-cell',
|
||||||
'graphic', 'presentation', 'drawing-page', 'chart'):
|
'graphic', 'presentation', 'drawing-page', 'chart'):
|
||||||
raise ValueError("'{}' not allowed".format(unicode_type(arg)))
|
raise ValueError(f"'{unicode_type(arg)}' not allowed")
|
||||||
return unicode_type(arg)
|
return unicode_type(arg)
|
||||||
|
|
||||||
|
|
||||||
@ -154,7 +154,7 @@ def cnv_integer(attribute, arg, element):
|
|||||||
|
|
||||||
def cnv_legend_position(attribute, arg, element):
|
def cnv_legend_position(attribute, arg, element):
|
||||||
if unicode_type(arg) not in ('start', 'end', 'top', 'bottom', 'top-start', 'bottom-start', 'top-end', 'bottom-end'):
|
if unicode_type(arg) not in ('start', 'end', 'top', 'bottom', 'top-start', 'bottom-start', 'top-end', 'bottom-end'):
|
||||||
raise ValueError("'{}' not allowed".format(unicode_type(arg)))
|
raise ValueError(f"'{unicode_type(arg)}' not allowed")
|
||||||
return unicode_type(arg)
|
return unicode_type(arg)
|
||||||
|
|
||||||
|
|
||||||
@ -167,7 +167,7 @@ def cnv_length(attribute, arg, element):
|
|||||||
'''
|
'''
|
||||||
global pattern_length
|
global pattern_length
|
||||||
if not pattern_length.match(arg):
|
if not pattern_length.match(arg):
|
||||||
raise ValueError("'{}' is not a valid length".format(arg))
|
raise ValueError(f"'{arg}' is not a valid length")
|
||||||
return arg
|
return arg
|
||||||
|
|
||||||
|
|
||||||
@ -182,19 +182,19 @@ def cnv_lengthorpercent(attribute, arg, element):
|
|||||||
except:
|
except:
|
||||||
failed = True
|
failed = True
|
||||||
if failed:
|
if failed:
|
||||||
raise ValueError("'{}' is not a valid length or percent".format(arg))
|
raise ValueError(f"'{arg}' is not a valid length or percent")
|
||||||
return arg
|
return arg
|
||||||
|
|
||||||
|
|
||||||
def cnv_metavaluetype(attribute, arg, element):
|
def cnv_metavaluetype(attribute, arg, element):
|
||||||
if unicode_type(arg) not in ('float', 'date', 'time', 'boolean', 'string'):
|
if unicode_type(arg) not in ('float', 'date', 'time', 'boolean', 'string'):
|
||||||
raise ValueError("'{}' not allowed".format(unicode_type(arg)))
|
raise ValueError(f"'{unicode_type(arg)}' not allowed")
|
||||||
return unicode_type(arg)
|
return unicode_type(arg)
|
||||||
|
|
||||||
|
|
||||||
def cnv_major_minor(attribute, arg, element):
|
def cnv_major_minor(attribute, arg, element):
|
||||||
if arg not in ('major','minor'):
|
if arg not in ('major','minor'):
|
||||||
raise ValueError("'{}' is not either 'minor' or 'major'".format(arg))
|
raise ValueError(f"'{arg}' is not either 'minor' or 'major'")
|
||||||
|
|
||||||
|
|
||||||
pattern_namespacedToken = re.compile(r'[0-9a-zA-Z_]+:[0-9a-zA-Z._\-]+')
|
pattern_namespacedToken = re.compile(r'[0-9a-zA-Z_]+:[0-9a-zA-Z._\-]+')
|
||||||
@ -204,7 +204,7 @@ def cnv_namespacedToken(attribute, arg, element):
|
|||||||
global pattern_namespacedToken
|
global pattern_namespacedToken
|
||||||
|
|
||||||
if not pattern_namespacedToken.match(arg):
|
if not pattern_namespacedToken.match(arg):
|
||||||
raise ValueError("'{}' is not a valid namespaced token".format(arg))
|
raise ValueError(f"'{arg}' is not a valid namespaced token")
|
||||||
return __save_prefix(attribute, arg, element)
|
return __save_prefix(attribute, arg, element)
|
||||||
|
|
||||||
|
|
||||||
@ -258,7 +258,7 @@ pattern_percent = re.compile(r'-?([0-9]+(\.[0-9]*)?|\.[0-9]+)%')
|
|||||||
def cnv_percent(attribute, arg, element):
|
def cnv_percent(attribute, arg, element):
|
||||||
global pattern_percent
|
global pattern_percent
|
||||||
if not pattern_percent.match(arg):
|
if not pattern_percent.match(arg):
|
||||||
raise ValueError("'{}' is not a valid length".format(arg))
|
raise ValueError(f"'{arg}' is not a valid length")
|
||||||
return arg
|
return arg
|
||||||
|
|
||||||
|
|
||||||
@ -277,7 +277,7 @@ def cnv_points(attribute, arg, element):
|
|||||||
try:
|
try:
|
||||||
strarg = ' '.join(['{},{}'.format(*p) for p in arg])
|
strarg = ' '.join(['{},{}'.format(*p) for p in arg])
|
||||||
except:
|
except:
|
||||||
raise ValueError('Points must be string or [(0,0),(1,1)] - not {}'.format(arg))
|
raise ValueError(f'Points must be string or [(0,0),(1,1)] - not {arg}')
|
||||||
return strarg
|
return strarg
|
||||||
|
|
||||||
|
|
||||||
@ -291,7 +291,7 @@ def cnv_string(attribute, arg, element):
|
|||||||
|
|
||||||
def cnv_textnoteclass(attribute, arg, element):
|
def cnv_textnoteclass(attribute, arg, element):
|
||||||
if unicode_type(arg) not in ('footnote', 'endnote'):
|
if unicode_type(arg) not in ('footnote', 'endnote'):
|
||||||
raise ValueError("'{}' not allowed".format(unicode_type(arg)))
|
raise ValueError(f"'{unicode_type(arg)}' not allowed")
|
||||||
return unicode_type(arg)
|
return unicode_type(arg)
|
||||||
|
|
||||||
# Understand different time formats
|
# Understand different time formats
|
||||||
@ -317,7 +317,7 @@ def cnv_viewbox(attribute, arg, element):
|
|||||||
|
|
||||||
def cnv_xlinkshow(attribute, arg, element):
|
def cnv_xlinkshow(attribute, arg, element):
|
||||||
if unicode_type(arg) not in ('new', 'replace', 'embed'):
|
if unicode_type(arg) not in ('new', 'replace', 'embed'):
|
||||||
raise ValueError("'{}' not allowed".format(unicode_type(arg)))
|
raise ValueError(f"'{unicode_type(arg)}' not allowed")
|
||||||
return unicode_type(arg)
|
return unicode_type(arg)
|
||||||
|
|
||||||
|
|
||||||
|
@ -69,9 +69,9 @@ def _quoteattr(data, entities={}):
|
|||||||
if "'" in data:
|
if "'" in data:
|
||||||
data = '"{}"'.format(data.replace('"', '"'))
|
data = '"{}"'.format(data.replace('"', '"'))
|
||||||
else:
|
else:
|
||||||
data = "'{}'".format(data)
|
data = f"'{data}'"
|
||||||
else:
|
else:
|
||||||
data = '"{}"'.format(data)
|
data = f'"{data}"'
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
@ -383,7 +383,7 @@ class Element(Node):
|
|||||||
Setting check_grammar=False turns off grammar checking
|
Setting check_grammar=False turns off grammar checking
|
||||||
'''
|
'''
|
||||||
if check_grammar and self.qname not in grammar.allows_text:
|
if check_grammar and self.qname not in grammar.allows_text:
|
||||||
raise IllegalText('The <{}> element does not allow text'.format(self.tagName))
|
raise IllegalText(f'The <{self.tagName}> element does not allow text')
|
||||||
else:
|
else:
|
||||||
if text != '':
|
if text != '':
|
||||||
self.appendChild(Text(text))
|
self.appendChild(Text(text))
|
||||||
@ -393,7 +393,7 @@ class Element(Node):
|
|||||||
Setting check_grammar=False turns off grammar checking
|
Setting check_grammar=False turns off grammar checking
|
||||||
'''
|
'''
|
||||||
if check_grammar and self.qname not in grammar.allows_text:
|
if check_grammar and self.qname not in grammar.allows_text:
|
||||||
raise IllegalText('The <{}> element does not allow text'.format(self.tagName))
|
raise IllegalText(f'The <{self.tagName}> element does not allow text')
|
||||||
else:
|
else:
|
||||||
self.appendChild(CDATASection(cdata))
|
self.appendChild(CDATASection(cdata))
|
||||||
|
|
||||||
|
@ -75,7 +75,7 @@ class LoadParser(handler.ContentHandler):
|
|||||||
e = Element(qname=tag, qattributes=attrdict, check_grammar=False)
|
e = Element(qname=tag, qattributes=attrdict, check_grammar=False)
|
||||||
self.curr = e
|
self.curr = e
|
||||||
except AttributeError as v:
|
except AttributeError as v:
|
||||||
print('Error: {}'.format(v))
|
print(f'Error: {v}')
|
||||||
|
|
||||||
if tag == (OFFICENS, 'automatic-styles'):
|
if tag == (OFFICENS, 'automatic-styles'):
|
||||||
e = self.doc.automaticstyles
|
e = self.doc.automaticstyles
|
||||||
|
@ -326,16 +326,16 @@ class ODF2MoinMoin:
|
|||||||
|
|
||||||
link = node.getAttribute('xlink:href')
|
link = node.getAttribute('xlink:href')
|
||||||
if link and link[:2] == './': # Indicates a sub-object, which isn't supported
|
if link and link[:2] == './': # Indicates a sub-object, which isn't supported
|
||||||
return '{}\n'.format(link)
|
return f'{link}\n'
|
||||||
if link and link[:9] == 'Pictures/':
|
if link and link[:9] == 'Pictures/':
|
||||||
link = link[9:]
|
link = link[9:]
|
||||||
return '[[Image({})]]\n'.format(link)
|
return f'[[Image({link})]]\n'
|
||||||
|
|
||||||
def text_a(self, node):
|
def text_a(self, node):
|
||||||
text = self.textToString(node)
|
text = self.textToString(node)
|
||||||
link = node.getAttribute('xlink:href')
|
link = node.getAttribute('xlink:href')
|
||||||
if link.strip() == text.strip():
|
if link.strip() == text.strip():
|
||||||
return '[{}] '.format(link.strip())
|
return f'[{link.strip()}] '
|
||||||
else:
|
else:
|
||||||
return f'[{link.strip()} {text.strip()}] '
|
return f'[{link.strip()} {text.strip()}] '
|
||||||
|
|
||||||
@ -348,7 +348,7 @@ class ODF2MoinMoin:
|
|||||||
body = (node.getElementsByTagName('text:note-body')[0]
|
body = (node.getElementsByTagName('text:note-body')[0]
|
||||||
.childNodes[0])
|
.childNodes[0])
|
||||||
self.footnotes.append((cite, self.textToString(body)))
|
self.footnotes.append((cite, self.textToString(body)))
|
||||||
return '^{}^'.format(cite)
|
return f'^{cite}^'
|
||||||
|
|
||||||
def text_s(self, node):
|
def text_s(self, node):
|
||||||
try:
|
try:
|
||||||
|
@ -203,7 +203,7 @@ class StyleToCSS:
|
|||||||
if generic is not None:
|
if generic is not None:
|
||||||
self.save_font(fontstyle, fontstyle, generic)
|
self.save_font(fontstyle, fontstyle, generic)
|
||||||
family, htmlgeneric = self.fontdict.get(fontstyle, (fontstyle, 'serif'))
|
family, htmlgeneric = self.fontdict.get(fontstyle, (fontstyle, 'serif'))
|
||||||
sdict['font-family'] = '{}, {}'.format(family, htmlgeneric)
|
sdict['font-family'] = f'{family}, {htmlgeneric}'
|
||||||
|
|
||||||
def c_text_position(self, ruleset, sdict, rule, tp):
|
def c_text_position(self, ruleset, sdict, rule, tp):
|
||||||
''' Text position. This is used e.g. to make superscript and subscript
|
''' Text position. This is used e.g. to make superscript and subscript
|
||||||
@ -510,7 +510,7 @@ class ODF2XHTML(handler.ContentHandler):
|
|||||||
if media:
|
if media:
|
||||||
self.metatags.append(f'<link rel="stylesheet" type="text/css" href="{stylefilename}" media="{media}"/>\n')
|
self.metatags.append(f'<link rel="stylesheet" type="text/css" href="{stylefilename}" media="{media}"/>\n')
|
||||||
else:
|
else:
|
||||||
self.metatags.append('<link rel="stylesheet" type="text/css" href="{}"/>\n'.format(stylefilename))
|
self.metatags.append(f'<link rel="stylesheet" type="text/css" href="{stylefilename}"/>\n')
|
||||||
|
|
||||||
def _resetfootnotes(self):
|
def _resetfootnotes(self):
|
||||||
# Footnotes and endnotes
|
# Footnotes and endnotes
|
||||||
@ -564,7 +564,7 @@ class ODF2XHTML(handler.ContentHandler):
|
|||||||
for key,val in attrs.items():
|
for key,val in attrs.items():
|
||||||
a.append(f'''{key}={quoteattr(val)}''')
|
a.append(f'''{key}={quoteattr(val)}''')
|
||||||
if len(a) == 0:
|
if len(a) == 0:
|
||||||
self.writeout('<{}>'.format(tag))
|
self.writeout(f'<{tag}>')
|
||||||
else:
|
else:
|
||||||
self.writeout('<{} {}>'.format(tag, ' '.join(a)))
|
self.writeout('<{} {}>'.format(tag, ' '.join(a)))
|
||||||
if block:
|
if block:
|
||||||
@ -573,7 +573,7 @@ class ODF2XHTML(handler.ContentHandler):
|
|||||||
def closetag(self, tag, block=True):
|
def closetag(self, tag, block=True):
|
||||||
''' Close an open HTML tag '''
|
''' Close an open HTML tag '''
|
||||||
self.htmlstack.pop()
|
self.htmlstack.pop()
|
||||||
self.writeout('</{}>'.format(tag))
|
self.writeout(f'</{tag}>')
|
||||||
if block:
|
if block:
|
||||||
self.writeout('\n')
|
self.writeout('\n')
|
||||||
|
|
||||||
@ -675,14 +675,14 @@ class ODF2XHTML(handler.ContentHandler):
|
|||||||
''' Set the content language. Identifies the targeted audience
|
''' Set the content language. Identifies the targeted audience
|
||||||
'''
|
'''
|
||||||
self.language = ''.join(self.data)
|
self.language = ''.join(self.data)
|
||||||
self.metatags.append('<meta http-equiv="content-language" content="{}"/>\n'.format(escape(self.language)))
|
self.metatags.append(f'<meta http-equiv="content-language" content="{escape(self.language)}"/>\n')
|
||||||
self.data = []
|
self.data = []
|
||||||
|
|
||||||
def e_dc_creator(self, tag, attrs):
|
def e_dc_creator(self, tag, attrs):
|
||||||
''' Set the content creator. Identifies the targeted audience
|
''' Set the content creator. Identifies the targeted audience
|
||||||
'''
|
'''
|
||||||
self.creator = ''.join(self.data)
|
self.creator = ''.join(self.data)
|
||||||
self.metatags.append('<meta http-equiv="creator" content="{}"/>\n'.format(escape(self.creator)))
|
self.metatags.append(f'<meta http-equiv="creator" content="{escape(self.creator)}"/>\n')
|
||||||
self.data = []
|
self.data = []
|
||||||
|
|
||||||
def s_custom_shape(self, tag, attrs):
|
def s_custom_shape(self, tag, attrs):
|
||||||
@ -940,13 +940,13 @@ dl.notes dd:last-of-type { page-break-after: avoid }
|
|||||||
for key in range(1,self.currentnote+1):
|
for key in range(1,self.currentnote+1):
|
||||||
note = self.notedict[key]
|
note = self.notedict[key]
|
||||||
# for key,note in self.notedict.items():
|
# for key,note in self.notedict.items():
|
||||||
self.opentag('dt', {'id':'footnote-{}'.format(key)})
|
self.opentag('dt', {'id':f'footnote-{key}'})
|
||||||
# self.opentag('sup')
|
# self.opentag('sup')
|
||||||
# self.writeout(escape(note['citation']))
|
# self.writeout(escape(note['citation']))
|
||||||
# self.closetag('sup', False)
|
# self.closetag('sup', False)
|
||||||
self.writeout('[')
|
self.writeout('[')
|
||||||
self.opentag('a', {'href': '#citation-{}'.format(key)})
|
self.opentag('a', {'href': f'#citation-{key}'})
|
||||||
self.writeout('←{}'.format(key))
|
self.writeout(f'←{key}')
|
||||||
self.closetag('a')
|
self.closetag('a')
|
||||||
self.writeout(']\xa0')
|
self.writeout(']\xa0')
|
||||||
self.closetag('dt')
|
self.closetag('dt')
|
||||||
@ -970,7 +970,7 @@ dl.notes dd:last-of-type { page-break-after: avoid }
|
|||||||
self.emptytag('meta', {'http-equiv':'Content-Type', 'content':'text/html;charset=UTF-8'})
|
self.emptytag('meta', {'http-equiv':'Content-Type', 'content':'text/html;charset=UTF-8'})
|
||||||
for metaline in self.metatags:
|
for metaline in self.metatags:
|
||||||
self.writeout(metaline)
|
self.writeout(metaline)
|
||||||
self.writeout('<title>{}</title>\n'.format(escape(self.title)))
|
self.writeout(f'<title>{escape(self.title)}</title>\n')
|
||||||
|
|
||||||
def e_office_document_content(self, tag, attrs):
|
def e_office_document_content(self, tag, attrs):
|
||||||
''' Last tag '''
|
''' Last tag '''
|
||||||
@ -1172,7 +1172,7 @@ dl.notes dd:last-of-type { page-break-after: avoid }
|
|||||||
c = attrs.get((TABLENS,'style-name'), None)
|
c = attrs.get((TABLENS,'style-name'), None)
|
||||||
if c and self.generate_css:
|
if c and self.generate_css:
|
||||||
c = c.replace('.','_')
|
c = c.replace('.','_')
|
||||||
self.opentag('table',{'class': 'T-{}'.format(c)})
|
self.opentag('table',{'class': f'T-{c}'})
|
||||||
else:
|
else:
|
||||||
self.opentag('table')
|
self.opentag('table')
|
||||||
self.purgedata()
|
self.purgedata()
|
||||||
@ -1280,9 +1280,9 @@ dl.notes dd:last-of-type { page-break-after: avoid }
|
|||||||
self.headinglevels[x] = 0
|
self.headinglevels[x] = 0
|
||||||
special = special_styles.get('P-'+name)
|
special = special_styles.get('P-'+name)
|
||||||
if special or not self.generate_css:
|
if special or not self.generate_css:
|
||||||
self.opentag('h{}'.format(level))
|
self.opentag(f'h{level}')
|
||||||
else:
|
else:
|
||||||
self.opentag('h{}'.format(level), {'class':'P-{}'.format(name)})
|
self.opentag(f'h{level}', {'class':f'P-{name}'})
|
||||||
self.purgedata()
|
self.purgedata()
|
||||||
|
|
||||||
def e_text_h(self, tag, attrs):
|
def e_text_h(self, tag, attrs):
|
||||||
@ -1309,7 +1309,7 @@ dl.notes dd:last-of-type { page-break-after: avoid }
|
|||||||
self.closetag('a', False)
|
self.closetag('a', False)
|
||||||
self.opentag('a', {'id': anchor2})
|
self.opentag('a', {'id': anchor2})
|
||||||
self.closetag('a', False)
|
self.closetag('a', False)
|
||||||
self.closetag('h{}'.format(level))
|
self.closetag(f'h{level}')
|
||||||
self.purgedata()
|
self.purgedata()
|
||||||
|
|
||||||
def s_text_line_break(self, tag, attrs):
|
def s_text_line_break(self, tag, attrs):
|
||||||
@ -1335,7 +1335,7 @@ dl.notes dd:last-of-type { page-break-after: avoid }
|
|||||||
# the list level must return to 1, even though the table or
|
# the list level must return to 1, even though the table or
|
||||||
# textbox itself may be nested within another list.
|
# textbox itself may be nested within another list.
|
||||||
name = self.tagstack.rfindattr((TEXTNS,'style-name'))
|
name = self.tagstack.rfindattr((TEXTNS,'style-name'))
|
||||||
list_class = '{}_{}'.format(name, level)
|
list_class = f'{name}_{level}'
|
||||||
tag_name = self.listtypes.get(list_class,'ul')
|
tag_name = self.listtypes.get(list_class,'ul')
|
||||||
number_class = tag_name + list_class
|
number_class = tag_name + list_class
|
||||||
if list_id:
|
if list_id:
|
||||||
@ -1372,7 +1372,7 @@ dl.notes dd:last-of-type { page-break-after: avoid }
|
|||||||
# the list level must return to 1, even though the table or
|
# the list level must return to 1, even though the table or
|
||||||
# textbox itself may be nested within another list.
|
# textbox itself may be nested within another list.
|
||||||
name = self.tagstack.rfindattr((TEXTNS,'style-name'))
|
name = self.tagstack.rfindattr((TEXTNS,'style-name'))
|
||||||
list_class = '{}_{}'.format(name, level)
|
list_class = f'{name}_{level}'
|
||||||
self.closetag(self.listtypes.get(list_class,'ul'))
|
self.closetag(self.listtypes.get(list_class,'ul'))
|
||||||
self.purgedata()
|
self.purgedata()
|
||||||
|
|
||||||
@ -1473,9 +1473,9 @@ dl.notes dd:last-of-type { page-break-after: avoid }
|
|||||||
self.notedict[self.currentnote]['citation'] = mark
|
self.notedict[self.currentnote]['citation'] = mark
|
||||||
self.opentag('sup')
|
self.opentag('sup')
|
||||||
self.opentag('a', {
|
self.opentag('a', {
|
||||||
'href': '#footnote-{}'.format(self.currentnote),
|
'href': f'#footnote-{self.currentnote}',
|
||||||
'class': 'citation',
|
'class': 'citation',
|
||||||
'id':'citation-{}'.format(self.currentnote)
|
'id':f'citation-{self.currentnote}'
|
||||||
})
|
})
|
||||||
# self.writeout( escape(mark) )
|
# self.writeout( escape(mark) )
|
||||||
# Since HTML only knows about endnotes, there is too much risk that the
|
# Since HTML only knows about endnotes, there is too much risk that the
|
||||||
@ -1496,7 +1496,7 @@ dl.notes dd:last-of-type { page-break-after: avoid }
|
|||||||
if specialtag is None:
|
if specialtag is None:
|
||||||
specialtag = 'p'
|
specialtag = 'p'
|
||||||
if self.generate_css:
|
if self.generate_css:
|
||||||
htmlattrs['class'] = 'P-{}'.format(c)
|
htmlattrs['class'] = f'P-{c}'
|
||||||
self.opentag(specialtag, htmlattrs)
|
self.opentag(specialtag, htmlattrs)
|
||||||
self.purgedata()
|
self.purgedata()
|
||||||
|
|
||||||
@ -1548,7 +1548,7 @@ dl.notes dd:last-of-type { page-break-after: avoid }
|
|||||||
if special is None:
|
if special is None:
|
||||||
special = 'span'
|
special = 'span'
|
||||||
if self.generate_css:
|
if self.generate_css:
|
||||||
htmlattrs['class'] = 'S-{}'.format(c)
|
htmlattrs['class'] = f'S-{c}'
|
||||||
|
|
||||||
self.opentag(special, htmlattrs)
|
self.opentag(special, htmlattrs)
|
||||||
self.purgedata()
|
self.purgedata()
|
||||||
|
@ -385,10 +385,10 @@ class OpenDocument:
|
|||||||
'''
|
'''
|
||||||
self.childobjects.append(document)
|
self.childobjects.append(document)
|
||||||
if objectname is None:
|
if objectname is None:
|
||||||
document.folder = '{}/Object {}'.format(self.folder, len(self.childobjects))
|
document.folder = f'{self.folder}/Object {len(self.childobjects)}'
|
||||||
else:
|
else:
|
||||||
document.folder = objectname
|
document.folder = objectname
|
||||||
return '.{}'.format(document.folder)
|
return f'.{document.folder}'
|
||||||
|
|
||||||
def _savePictures(self, object, folder):
|
def _savePictures(self, object, folder):
|
||||||
for arcname, picturerec in object.Pictures.items():
|
for arcname, picturerec in object.Pictures.items():
|
||||||
@ -407,7 +407,7 @@ class OpenDocument:
|
|||||||
# Look in subobjects
|
# Look in subobjects
|
||||||
subobjectnum = 1
|
subobjectnum = 1
|
||||||
for subobject in object.childobjects:
|
for subobject in object.childobjects:
|
||||||
self._savePictures(subobject,'{}Object {}/'.format(folder, subobjectnum))
|
self._savePictures(subobject,f'{folder}Object {subobjectnum}/')
|
||||||
subobjectnum += 1
|
subobjectnum += 1
|
||||||
|
|
||||||
def __replaceGenerator(self):
|
def __replaceGenerator(self):
|
||||||
@ -492,23 +492,23 @@ class OpenDocument:
|
|||||||
else:
|
else:
|
||||||
self.manifest.addElement(manifest.FileEntry(fullpath=folder, mediatype=object.mimetype))
|
self.manifest.addElement(manifest.FileEntry(fullpath=folder, mediatype=object.mimetype))
|
||||||
# Write styles
|
# Write styles
|
||||||
self.manifest.addElement(manifest.FileEntry(fullpath='{}styles.xml'.format(folder), mediatype='text/xml'))
|
self.manifest.addElement(manifest.FileEntry(fullpath=f'{folder}styles.xml', mediatype='text/xml'))
|
||||||
zi = zipfile.ZipInfo('{}styles.xml'.format(folder), self._now)
|
zi = zipfile.ZipInfo(f'{folder}styles.xml', self._now)
|
||||||
zi.compress_type = zipfile.ZIP_DEFLATED
|
zi.compress_type = zipfile.ZIP_DEFLATED
|
||||||
zi.external_attr = UNIXPERMS
|
zi.external_attr = UNIXPERMS
|
||||||
self._z.writestr(zi, object.stylesxml())
|
self._z.writestr(zi, object.stylesxml())
|
||||||
|
|
||||||
# Write content
|
# Write content
|
||||||
self.manifest.addElement(manifest.FileEntry(fullpath='{}content.xml'.format(folder), mediatype='text/xml'))
|
self.manifest.addElement(manifest.FileEntry(fullpath=f'{folder}content.xml', mediatype='text/xml'))
|
||||||
zi = zipfile.ZipInfo('{}content.xml'.format(folder), self._now)
|
zi = zipfile.ZipInfo(f'{folder}content.xml', self._now)
|
||||||
zi.compress_type = zipfile.ZIP_DEFLATED
|
zi.compress_type = zipfile.ZIP_DEFLATED
|
||||||
zi.external_attr = UNIXPERMS
|
zi.external_attr = UNIXPERMS
|
||||||
self._z.writestr(zi, object.contentxml())
|
self._z.writestr(zi, object.contentxml())
|
||||||
|
|
||||||
# Write settings
|
# Write settings
|
||||||
if object.settings.hasChildNodes():
|
if object.settings.hasChildNodes():
|
||||||
self.manifest.addElement(manifest.FileEntry(fullpath='{}settings.xml'.format(folder), mediatype='text/xml'))
|
self.manifest.addElement(manifest.FileEntry(fullpath=f'{folder}settings.xml', mediatype='text/xml'))
|
||||||
zi = zipfile.ZipInfo('{}settings.xml'.format(folder), self._now)
|
zi = zipfile.ZipInfo(f'{folder}settings.xml', self._now)
|
||||||
zi.compress_type = zipfile.ZIP_DEFLATED
|
zi.compress_type = zipfile.ZIP_DEFLATED
|
||||||
zi.external_attr = UNIXPERMS
|
zi.external_attr = UNIXPERMS
|
||||||
self._z.writestr(zi, object.settingsxml())
|
self._z.writestr(zi, object.settingsxml())
|
||||||
@ -524,7 +524,7 @@ class OpenDocument:
|
|||||||
# Write subobjects
|
# Write subobjects
|
||||||
subobjectnum = 1
|
subobjectnum = 1
|
||||||
for subobject in object.childobjects:
|
for subobject in object.childobjects:
|
||||||
self._saveXmlObjects(subobject, '{}Object {}/'.format(folder, subobjectnum))
|
self._saveXmlObjects(subobject, f'{folder}Object {subobjectnum}/')
|
||||||
subobjectnum += 1
|
subobjectnum += 1
|
||||||
|
|
||||||
# Document's DOM methods
|
# Document's DOM methods
|
||||||
|
@ -65,7 +65,7 @@ class UserFields:
|
|||||||
if isinstance(self.src_file, (bytes, str)):
|
if isinstance(self.src_file, (bytes, str)):
|
||||||
# src_file is a filename, check if it is a zip-file
|
# src_file is a filename, check if it is a zip-file
|
||||||
if not zipfile.is_zipfile(self.src_file):
|
if not zipfile.is_zipfile(self.src_file):
|
||||||
raise TypeError('{} is no odt file.'.format(self.src_file))
|
raise TypeError(f'{self.src_file} is no odt file.')
|
||||||
elif self.src_file is None:
|
elif self.src_file is None:
|
||||||
# use stdin if no file given
|
# use stdin if no file given
|
||||||
self.src_file = sys.stdin
|
self.src_file = sys.stdin
|
||||||
|
Loading…
x
Reference in New Issue
Block a user