Silence some ResourceWarnings

This commit is contained in:
Kovid Goyal 2021-06-24 08:35:54 +05:30
parent ca8e54cc02
commit f1b3ed07f8
No known key found for this signature in database
GPG Key ID: 06BC317B515ACE7C
3 changed files with 27 additions and 9 deletions

View File

@ -1528,6 +1528,7 @@ class Cache(object):
:param run_hooks: If True, file type plugins are run on the format before and after being added. :param run_hooks: If True, file type plugins are run on the format before and after being added.
:param dbapi: Internal use only. :param dbapi: Internal use only.
''' '''
needs_close = False
if run_hooks: if run_hooks:
# Run import plugins, the write lock is not held to cater for # Run import plugins, the write lock is not held to cater for
# broken plugins that might spin the event loop by popping up a # broken plugins that might spin the event loop by popping up a
@ -1535,6 +1536,7 @@ class Cache(object):
npath = run_import_plugins(stream_or_path, fmt) npath = run_import_plugins(stream_or_path, fmt)
fmt = os.path.splitext(npath)[-1].lower().replace('.', '').upper() fmt = os.path.splitext(npath)[-1].lower().replace('.', '').upper()
stream_or_path = lopen(npath, 'rb') stream_or_path = lopen(npath, 'rb')
needs_close = True
fmt = check_ebook_format(stream_or_path, fmt) fmt = check_ebook_format(stream_or_path, fmt)
with self.write_lock: with self.write_lock:
@ -1550,8 +1552,17 @@ class Cache(object):
if name and not replace: if name and not replace:
return False return False
stream = stream_or_path if hasattr(stream_or_path, 'read') else lopen(stream_or_path, 'rb') if hasattr(stream_or_path, 'read'):
size, fname = self._do_add_format(book_id, fmt, stream, name) stream = stream_or_path
else:
stream = lopen(stream_or_path, 'rb')
needs_close = True
try:
stream = stream_or_path if hasattr(stream_or_path, 'read') else lopen(stream_or_path, 'rb')
size, fname = self._do_add_format(book_id, fmt, stream, name)
finally:
if needs_close:
stream.close()
del stream del stream
max_size = self.fields['formats'].table.update_fmt(book_id, fmt, fname, size, self.backend) max_size = self.fields['formats'].table.update_fmt(book_id, fmt, fname, size, self.backend)

View File

@ -20,10 +20,11 @@ def node_mountpoint(node):
return raw.replace(b'\\040', b' ').replace(b'\\011', b'\t').replace(b'\\012', return raw.replace(b'\\040', b' ').replace(b'\\011', b'\t').replace(b'\\012',
b'\n').replace(b'\\0134', b'\\').decode('utf-8') b'\n').replace(b'\\0134', b'\\').decode('utf-8')
for line in open('/proc/mounts', 'rb').readlines(): with open('/proc/mounts', 'rb') as src:
line = line.split() for line in src.readlines():
if line[0] == node: line = line.split()
return de_mangle(line[1]) if line[0] == node:
return de_mangle(line[1])
return None return None

View File

@ -456,6 +456,12 @@ class Device(DeviceConfig, DevicePlugin):
q = getattr(detected_device, attr) q = getattr(detected_device, attr)
return q == val return q == val
def getnum(usb_dir):
def rc(q):
with open(j(usb_dir, q), 'rb') as f:
return raw2num(f.read().decode('utf-8'))
return rc
for x, isfile in walk('/sys/devices'): for x, isfile in walk('/sys/devices'):
if isfile and x.endswith('idVendor'): if isfile and x.endswith('idVendor'):
usb_dir = d(x) usb_dir = d(x)
@ -465,8 +471,7 @@ class Device(DeviceConfig, DevicePlugin):
break break
if usb_dir is None: if usb_dir is None:
continue continue
e = lambda q : raw2num(open(j(usb_dir, q), 'rb').read().decode('utf-8')) ven, prod, bcd = map(getnum(usb_dir), ('idVendor', 'idProduct', 'bcdDevice'))
ven, prod, bcd = map(e, ('idVendor', 'idProduct', 'bcdDevice'))
if not (test(ven, 'idVendor') and test(prod, 'idProduct') and if not (test(ven, 'idVendor') and test(prod, 'idProduct') and
test(bcd, 'bcdDevice')): test(bcd, 'bcdDevice')):
usb_dir = None usb_dir = None
@ -487,7 +492,8 @@ class Device(DeviceConfig, DevicePlugin):
sz = j(x, 'size') sz = j(x, 'size')
node = parts[idx+1] node = parts[idx+1]
try: try:
exists = int(open(sz, 'rb').read().decode('utf-8')) > 0 with open(sz, 'rb') as szf:
exists = szf.read().decode('utf-8') > 0
if exists: if exists:
node = self.find_largest_partition(x) node = self.find_largest_partition(x)
ok[node] = True ok[node] = True