Address some pylint issues

This commit is contained in:
Disassembler 2020-05-02 21:37:34 +02:00
parent bb694671f2
commit ede631b8e1
No known key found for this signature in database
GPG Key ID: 524BD33A0EE29499
4 changed files with 18 additions and 15 deletions

View File

@ -21,7 +21,7 @@ class ActionItem:
time.sleep(0.2)
self.print_progress()
# Get the result of the future and let it raise exception, if there was any
data = future.result()
future.result()
self.print_progress('\n')
def print_progress(self, end='\r'):

View File

@ -8,8 +8,8 @@ from contextlib import contextmanager
@contextmanager
def lock(lock_file, fail_callback=None):
# Open the lock file in append mode first to ensure its existence but not modify any data if it already exists
with open(lock_file, 'a'):
# Open the lock file in append mode first to ensure its existence but not modify any data if it already exists
pass
# Open the lock file in read + write mode without truncation
with open(lock_file, 'r+') as f:

View File

@ -28,7 +28,7 @@ def load_leases():
leases = [lease.strip().split(None, 1) for lease in f]
leases = {ip: hostname for ip, hostname in leases}
mtime = file_mtime
except:
except FileNotFoundError:
interface = get_bridge_interface()
leases = {str(interface.ip): 'host'}

View File

@ -52,18 +52,21 @@ def download_archive(archive_url, archive_path, expected_hash, observer=None):
# If the signature is invalid, redownload the file
pass
if do_download:
# Download archive via http(s) and store in temporary directory
with open(archive_path, 'wb') as f, requests.Session() as session:
resource = session.get(archive_url, stream=True)
resource.raise_for_status()
if observer:
for chunk in resource.iter_content(chunk_size=64*1024):
if chunk:
observer.units_done += f.write(chunk)
else:
for chunk in resource.iter_content(chunk_size=64*1024):
if chunk:
f.write(chunk)
do_download_archive(archive_url, archive_path, expected_hash, observer)
def do_download_archive(archive_url, archive_path, expected_hash, observer=None):
# Download archive via http(s) and store in temporary directory
with open(archive_path, 'wb') as f, requests.Session() as session:
resource = session.get(archive_url, stream=True)
resource.raise_for_status()
if observer:
for chunk in resource.iter_content(chunk_size=64*1024):
if chunk:
observer.units_done += f.write(chunk)
else:
for chunk in resource.iter_content(chunk_size=64*1024):
if chunk:
f.write(chunk)
def unpack_archive(archive_path, destination, expected_hash, observer):
with open(archive_path, 'rb') as f: