mirror of
https://github.com/kovidgoyal/calibre.git
synced 2025-07-09 03:04:10 -04:00
more misc recomendation (manual)
ruff 'PIE'
This commit is contained in:
parent
e006114716
commit
ddfdc80d3a
@ -125,7 +125,7 @@ img { background: none !important; float: none; margin: 0px; }
|
|||||||
break
|
break
|
||||||
elif strpost.startswith('<a href'):
|
elif strpost.startswith('<a href'):
|
||||||
url = post['href']
|
url = post['href']
|
||||||
if url.startswith('http://www1.folha.uol.com.br/') or url.startswith('https://www1.folha.uol.com.br/') :
|
if url.startswith(('http://www1.folha.uol.com.br/', 'https://www1.folha.uol.com.br/')) :
|
||||||
title = self.tag_to_string(post)
|
title = self.tag_to_string(post)
|
||||||
self.log()
|
self.log()
|
||||||
self.log('--> post: ', post)
|
self.log('--> post: ', post)
|
||||||
|
@ -22,7 +22,7 @@ quote-style = 'single'
|
|||||||
explicit-preview-rules = true
|
explicit-preview-rules = true
|
||||||
ignore = [
|
ignore = [
|
||||||
'E402', 'E722', 'E741',
|
'E402', 'E722', 'E741',
|
||||||
'UP012', 'UP030', 'UP032', 'UP038', 'C413', 'C420', 'PIE790', 'PIE794', 'PIE810',
|
'UP012', 'UP030', 'UP032', 'UP038', 'C413', 'C420', 'PIE790', 'PIE794',
|
||||||
'RUF001', 'RUF002', 'RUF003', 'RUF005', 'RUF012', 'RUF013', 'RUF015', 'RUF031', 'RUF100',
|
'RUF001', 'RUF002', 'RUF003', 'RUF005', 'RUF012', 'RUF013', 'RUF015', 'RUF031', 'RUF100',
|
||||||
'F841', # because in preview, unused tuple unpacking variable that not use dummy syntax (prefix '_' underscore)
|
'F841', # because in preview, unused tuple unpacking variable that not use dummy syntax (prefix '_' underscore)
|
||||||
# raise error 'unused-variable', sigh (https://github.com/astral-sh/ruff/issues/8884)
|
# raise error 'unused-variable', sigh (https://github.com/astral-sh/ruff/issues/8884)
|
||||||
|
@ -520,7 +520,7 @@ class Build(Command):
|
|||||||
def get(src: str, env: Environment, for_tooling: bool = False) -> CompileCommand:
|
def get(src: str, env: Environment, for_tooling: bool = False) -> CompileCommand:
|
||||||
compiler = env.cxx if ext.needs_cxx else env.cc
|
compiler = env.cxx if ext.needs_cxx else env.cc
|
||||||
obj = self.j(obj_dir, os.path.splitext(self.b(src))[0]+env.obj_suffix)
|
obj = self.j(obj_dir, os.path.splitext(self.b(src))[0]+env.obj_suffix)
|
||||||
inf = env.cc_input_cpp_flag if src.endswith('.cpp') or src.endswith('.cxx') else env.cc_input_c_flag
|
inf = env.cc_input_cpp_flag if src.endswith(('.cpp', '.cxx')) else env.cc_input_c_flag
|
||||||
sinc = [inf, src]
|
sinc = [inf, src]
|
||||||
if env.cc_output_flag.startswith('/'):
|
if env.cc_output_flag.startswith('/'):
|
||||||
if for_tooling: # clangd gets confused by cl.exe style source and output flags
|
if for_tooling: # clangd gets confused by cl.exe style source and output flags
|
||||||
|
@ -30,7 +30,7 @@ class GUI(Command):
|
|||||||
path = os.path.abspath(os.path.join(root, name))
|
path = os.path.abspath(os.path.join(root, name))
|
||||||
if name.endswith('.ui'):
|
if name.endswith('.ui'):
|
||||||
forms.append(path)
|
forms.append(path)
|
||||||
elif name.endswith('_ui.py') or name.endswith('_ui.pyc'):
|
elif name.endswith(('_ui.py', '_ui.pyc')):
|
||||||
fname = path.rpartition('_')[0] + '.ui'
|
fname = path.rpartition('_')[0] + '.ui'
|
||||||
if not os.path.exists(fname):
|
if not os.path.exists(fname):
|
||||||
os.remove(path)
|
os.remove(path)
|
||||||
|
@ -349,7 +349,7 @@ def generate_index(): # {{{
|
|||||||
.format(osx[0], 'OS X Disk Image (.dmg)')
|
.format(osx[0], 'OS X Disk Image (.dmg)')
|
||||||
)
|
)
|
||||||
linux = [
|
linux = [
|
||||||
x for x in files if x.endswith('.txz') or x.endswith('tar.bz2')
|
x for x in files if x.endswith(('.txz', 'tar.bz2'))
|
||||||
]
|
]
|
||||||
if linux:
|
if linux:
|
||||||
def ldesc(x):
|
def ldesc(x):
|
||||||
@ -366,7 +366,7 @@ def generate_index(): # {{{
|
|||||||
' '.join(linux)
|
' '.join(linux)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
source = [x for x in files if x.endswith('.xz') or x.endswith('.gz')]
|
source = [x for x in files if x.endswith(('.xz', '.gz'))]
|
||||||
if source:
|
if source:
|
||||||
body.append(
|
body.append(
|
||||||
'<dt>Source Code</dt><dd><a href="{0}" title="{1}">{1}</a></dd>'
|
'<dt>Source Code</dt><dd><a href="{0}" title="{1}">{1}</a></dd>'
|
||||||
|
@ -637,7 +637,7 @@ class KOBO(USBMS):
|
|||||||
# print('Add book to metadata: ')
|
# print('Add book to metadata: ')
|
||||||
# print('prefix: ' + prefix)
|
# print('prefix: ' + prefix)
|
||||||
lpath = path.partition(prefix)[2]
|
lpath = path.partition(prefix)[2]
|
||||||
if lpath.startswith('/') or lpath.startswith('\\'):
|
if lpath.startswith(('/', '\\')):
|
||||||
lpath = lpath[1:]
|
lpath = lpath[1:]
|
||||||
# print('path: ' + lpath)
|
# print('path: ' + lpath)
|
||||||
book = self.book_class(prefix, lpath, info.title, other=info)
|
book = self.book_class(prefix, lpath, info.title, other=info)
|
||||||
|
@ -472,7 +472,7 @@ class SMART_DEVICE_APP(DeviceConfig, DevicePlugin):
|
|||||||
special_tag = None
|
special_tag = None
|
||||||
if mdata.tags:
|
if mdata.tags:
|
||||||
for t in mdata.tags:
|
for t in mdata.tags:
|
||||||
if t.startswith(_('News')) or t.startswith('/'):
|
if t.startswith((_('News'), '/')):
|
||||||
special_tag = t
|
special_tag = t
|
||||||
break
|
break
|
||||||
|
|
||||||
|
@ -386,7 +386,7 @@ class USBMS(CLI, Device):
|
|||||||
self._main_prefix)
|
self._main_prefix)
|
||||||
continue
|
continue
|
||||||
lpath = path.partition(prefix)[2]
|
lpath = path.partition(prefix)[2]
|
||||||
if lpath.startswith('/') or lpath.startswith('\\'):
|
if lpath.startswith(('/', '\\')):
|
||||||
lpath = lpath[1:]
|
lpath = lpath[1:]
|
||||||
book = self.book_class(prefix, lpath, other=info)
|
book = self.book_class(prefix, lpath, other=info)
|
||||||
if book.size is None:
|
if book.size is None:
|
||||||
|
@ -80,7 +80,7 @@ def create_upload_path(mdata, fname, template, sanitize,
|
|||||||
special_tag = None
|
special_tag = None
|
||||||
if mdata.tags:
|
if mdata.tags:
|
||||||
for t in mdata.tags:
|
for t in mdata.tags:
|
||||||
if t.startswith(_('News')) or t.startswith('/'):
|
if t.startswith((_('News'), '/')):
|
||||||
special_tag = t
|
special_tag = t
|
||||||
break
|
break
|
||||||
|
|
||||||
|
@ -347,7 +347,7 @@ def create_option_parser(args, log):
|
|||||||
|
|
||||||
|
|
||||||
def abspath(x):
|
def abspath(x):
|
||||||
if x.startswith('http:') or x.startswith('https:'):
|
if x.startswith(('http:', 'https:')):
|
||||||
return x
|
return x
|
||||||
return os.path.abspath(os.path.expanduser(x))
|
return os.path.abspath(os.path.expanduser(x))
|
||||||
|
|
||||||
|
@ -326,8 +326,7 @@ class CSSPreProcessor:
|
|||||||
ans, namespaced = [], False
|
ans, namespaced = [], False
|
||||||
for line in data.splitlines():
|
for line in data.splitlines():
|
||||||
ll = line.lstrip()
|
ll = line.lstrip()
|
||||||
if not (namespaced or ll.startswith('@import') or not ll or
|
if not (namespaced or ll.startswith(('@import', '@charset')) or not ll):
|
||||||
ll.startswith('@charset')):
|
|
||||||
ans.append(XHTML_CSS_NAMESPACE.strip())
|
ans.append(XHTML_CSS_NAMESPACE.strip())
|
||||||
namespaced = True
|
namespaced = True
|
||||||
ans.append(line)
|
ans.append(line)
|
||||||
|
@ -17,7 +17,7 @@ from calibre.utils.zipfile import ZipFile
|
|||||||
|
|
||||||
def pretty_all_xml_in_dir(path):
|
def pretty_all_xml_in_dir(path):
|
||||||
for f in walk(path):
|
for f in walk(path):
|
||||||
if f.endswith('.xml') or f.endswith('.rels'):
|
if f.endswith(('.xml', '.rels')):
|
||||||
with open(f, 'r+b') as stream:
|
with open(f, 'r+b') as stream:
|
||||||
raw = stream.read()
|
raw = stream.read()
|
||||||
if raw:
|
if raw:
|
||||||
|
@ -145,7 +145,7 @@ def update_flow_links(mobi8_reader, resource_map, log):
|
|||||||
|
|
||||||
for j in range(1, len(srcpieces), 2):
|
for j in range(1, len(srcpieces), 2):
|
||||||
tag = srcpieces[j]
|
tag = srcpieces[j]
|
||||||
if tag.startswith('<im') or tag.startswith('<svg:image'):
|
if tag.startswith(('<im', '<svg:image')):
|
||||||
for m in img_index_pattern.finditer(tag):
|
for m in img_index_pattern.finditer(tag):
|
||||||
num = int(m.group(1), 32)
|
num = int(m.group(1), 32)
|
||||||
href = resource_map[num-1]
|
href = resource_map[num-1]
|
||||||
|
@ -101,7 +101,7 @@ def read_images_from_folder(path):
|
|||||||
name = os.path.relpath(filepath, path).replace(os.sep, '/')
|
name = os.path.relpath(filepath, path).replace(os.sep, '/')
|
||||||
ext = name.rpartition('.')[-1]
|
ext = name.rpartition('.')[-1]
|
||||||
bname = os.path.basename(name)
|
bname = os.path.basename(name)
|
||||||
if bname.startswith('.') or bname.startswith('_'):
|
if bname.startswith(('.', '_')):
|
||||||
continue
|
continue
|
||||||
if ext == 'svg':
|
if ext == 'svg':
|
||||||
render_svg(filepath)
|
render_svg(filepath)
|
||||||
|
@ -1750,7 +1750,7 @@ class IdentifiersEdit(QLineEdit, ToMetadataMixin, LineEditIndicators):
|
|||||||
if identifier_found:
|
if identifier_found:
|
||||||
return
|
return
|
||||||
text = str(QApplication.clipboard().text()).strip()
|
text = str(QApplication.clipboard().text()).strip()
|
||||||
if text.startswith('http://') or text.startswith('https://'):
|
if text.startswith(('http://', 'https://')):
|
||||||
return self.paste_prefix('url')
|
return self.paste_prefix('url')
|
||||||
try:
|
try:
|
||||||
prefix = gprefs['paste_isbn_prefixes'][0]
|
prefix = gprefs['paste_isbn_prefixes'][0]
|
||||||
@ -1803,7 +1803,7 @@ class IdentifiersEdit(QLineEdit, ToMetadataMixin, LineEditIndicators):
|
|||||||
try:
|
try:
|
||||||
url_pattern = formatter.safe_format(template, vals, '', vals)
|
url_pattern = formatter.safe_format(template, vals, '', vals)
|
||||||
url_pattern = re.escape(url_pattern).replace('__ID_REGEX_PLACEHOLDER__', '(?P<new_id>.+)')
|
url_pattern = re.escape(url_pattern).replace('__ID_REGEX_PLACEHOLDER__', '(?P<new_id>.+)')
|
||||||
if url_pattern.startswith('http:') or url_pattern.startswith('https:'):
|
if url_pattern.startswith(('http:', 'https:')):
|
||||||
url_pattern = '(?:http|https):' + url_pattern.partition(':')[2]
|
url_pattern = '(?:http|https):' + url_pattern.partition(':')[2]
|
||||||
new_id = re.compile(url_pattern)
|
new_id = re.compile(url_pattern)
|
||||||
new_id = new_id.search(text).group('new_id')
|
new_id = new_id.search(text).group('new_id')
|
||||||
|
@ -1443,7 +1443,7 @@ class TagsModel(QAbstractItemModel): # {{{
|
|||||||
def key_func(val):
|
def key_func(val):
|
||||||
if order == 'display_name':
|
if order == 'display_name':
|
||||||
return icu_lower(self.db.field_metadata[val]['name'])
|
return icu_lower(self.db.field_metadata[val]['name'])
|
||||||
return icu_lower(val[1:] if val.startswith('#') or val.startswith('@') else val)
|
return icu_lower(val[1:] if val.startswith(('#', '@')) else val)
|
||||||
direction = tweaks.get('tag_browser_category_default_sort_direction', 'ascending')
|
direction = tweaks.get('tag_browser_category_default_sort_direction', 'ascending')
|
||||||
if direction not in ('ascending', 'descending'):
|
if direction not in ('ascending', 'descending'):
|
||||||
print('Tweak tag_browser_category_default_sort_direction is not valid. Ignored')
|
print('Tweak tag_browser_category_default_sort_direction is not valid. Ignored')
|
||||||
|
@ -203,8 +203,7 @@ def main(args=sys.argv):
|
|||||||
opts, args = parser.parse_args(args)
|
opts, args = parser.parse_args(args)
|
||||||
oat = opts.open_at
|
oat = opts.open_at
|
||||||
if oat and not (
|
if oat and not (
|
||||||
oat.startswith('toc:') or oat.startswith('toc-href:') or oat.startswith('toc-href-contains:') or
|
oat.startswith(('toc:', 'toc-href:', 'toc-href-contains:', 'epubcfi(/', 'ref:', 'search:', 'regex:')) or is_float(oat)):
|
||||||
oat.startswith('epubcfi(/') or is_float(oat) or oat.startswith('ref:') or oat.startswith('search:') or oat.startswith('regex:')):
|
|
||||||
raise SystemExit(f'Not a valid --open-at value: {opts.open_at}')
|
raise SystemExit(f'Not a valid --open-at value: {opts.open_at}')
|
||||||
|
|
||||||
if not opts.new_instance and get_session_pref('singleinstance', False):
|
if not opts.new_instance and get_session_pref('singleinstance', False):
|
||||||
|
@ -112,7 +112,7 @@ def background_image(encoded_fname=''):
|
|||||||
with open(make_long_path_useable(img_path), 'rb') as f:
|
with open(make_long_path_useable(img_path), 'rb') as f:
|
||||||
return mt, f.read()
|
return mt, f.read()
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
if fname.startswith('https://') or fname.startswith('http://'):
|
if fname.startswith(('https://', 'http://')):
|
||||||
from calibre import browser
|
from calibre import browser
|
||||||
br = browser()
|
br = browser()
|
||||||
try:
|
try:
|
||||||
|
@ -1012,13 +1012,13 @@ class PythonHighlighter(QSyntaxHighlighter): # {{{
|
|||||||
self.setFormat(0, textLength,
|
self.setFormat(0, textLength,
|
||||||
self.Formats['normal'])
|
self.Formats['normal'])
|
||||||
|
|
||||||
if text.startswith('Traceback') or text.startswith('Error: '):
|
if text.startswith(('Traceback', 'Error: ')):
|
||||||
self.setCurrentBlockState(ERROR)
|
self.setCurrentBlockState(ERROR)
|
||||||
self.setFormat(0, textLength,
|
self.setFormat(0, textLength,
|
||||||
self.Formats['error'])
|
self.Formats['error'])
|
||||||
return
|
return
|
||||||
if prevState == ERROR and \
|
if prevState == ERROR and \
|
||||||
not (text.startswith('>>>') or text.startswith('#')):
|
not (text.startswith(('>>>', '#'))):
|
||||||
self.setCurrentBlockState(ERROR)
|
self.setCurrentBlockState(ERROR)
|
||||||
self.setFormat(0, textLength,
|
self.setFormat(0, textLength,
|
||||||
self.Formats['error'])
|
self.Formats['error'])
|
||||||
|
@ -395,7 +395,7 @@ class ResultCache(SearchQueryParser): # {{{
|
|||||||
elif query in self.local_thismonth:
|
elif query in self.local_thismonth:
|
||||||
qd = now()
|
qd = now()
|
||||||
field_count = 2
|
field_count = 2
|
||||||
elif query.endswith(self.local_daysago) or query.endswith(self.untrans_daysago):
|
elif query.endswith((self.local_daysago, self.untrans_daysago)):
|
||||||
num = query[0:-(self.untrans_daysago_len if query.endswith(self.untrans_daysago) else self.local_daysago_len)]
|
num = query[0:-(self.untrans_daysago_len if query.endswith(self.untrans_daysago) else self.local_daysago_len)]
|
||||||
try:
|
try:
|
||||||
qd = now() - timedelta(int(num))
|
qd = now() - timedelta(int(num))
|
||||||
|
@ -677,8 +677,7 @@ class HTTPConnection(HTTPRequest):
|
|||||||
else:
|
else:
|
||||||
output = GeneratedOutput(output)
|
output = GeneratedOutput(output)
|
||||||
ct = outheaders.get('Content-Type', '').partition(';')[0]
|
ct = outheaders.get('Content-Type', '').partition(';')[0]
|
||||||
compressible = (not ct or ct.startswith('text/') or ct.startswith('image/svg') or
|
compressible = (not ct or ct.startswith(('text/', 'image/svg')) or ct.partition(';')[0] in COMPRESSIBLE_TYPES)
|
||||||
ct.partition(';')[0] in COMPRESSIBLE_TYPES)
|
|
||||||
compressible = (compressible and request.status_code == http_client.OK and
|
compressible = (compressible and request.status_code == http_client.OK and
|
||||||
(opts.compress_min_size > -1 and output.content_length >= opts.compress_min_size) and
|
(opts.compress_min_size > -1 and output.content_length >= opts.compress_min_size) and
|
||||||
acceptable_encoding(request.inheaders.get('Accept-Encoding', '')) and not is_http1)
|
acceptable_encoding(request.inheaders.get('Accept-Encoding', '')) and not is_http1)
|
||||||
|
@ -65,7 +65,7 @@ def find_tests_in_package(package, excludes=('main.py',)):
|
|||||||
excludes = set(excludes) | {x + 'c' for x in excludes}
|
excludes = set(excludes) | {x + 'c' for x in excludes}
|
||||||
seen = set()
|
seen = set()
|
||||||
for x in items:
|
for x in items:
|
||||||
if (x.endswith('.py') or x.endswith('.pyc')) and x not in excludes:
|
if (x.endswith(('.py', '.pyc'))) and x not in excludes:
|
||||||
q = x.rpartition('.')[0]
|
q = x.rpartition('.')[0]
|
||||||
if q in seen:
|
if q in seen:
|
||||||
continue
|
continue
|
||||||
|
@ -623,7 +623,7 @@ class BasicNewsRecipe(Recipe):
|
|||||||
for key in article.keys():
|
for key in article.keys():
|
||||||
if key.endswith('_origlink'):
|
if key.endswith('_origlink'):
|
||||||
url = article[key]
|
url = article[key]
|
||||||
if url and (url.startswith('http://') or url.startswith('https://')):
|
if url and (url.startswith(('http://', 'https://'))):
|
||||||
return url
|
return url
|
||||||
ans = article.get('link', None)
|
ans = article.get('link', None)
|
||||||
if not ans and getattr(article, 'links', None):
|
if not ans and getattr(article, 'links', None):
|
||||||
|
Loading…
x
Reference in New Issue
Block a user