Browse Source

chore: fix linter errors (#25996)

David Sanders 4 years ago
parent
commit
c27e5fdbb6

+ 2 - 1
script/lib/patches.py

@@ -12,7 +12,8 @@ def read_patch(patch_dir, patch_filename):
   patch_path = os.path.join(patch_dir, patch_filename)
   with codecs.open(patch_path, encoding='utf-8') as f:
     for l in f.readlines():
-      if not added_filename_line and (l.startswith('diff -') or l.startswith('---')):
+      line_has_correct_start = l.startswith('diff -') or l.startswith('---')
+      if not added_filename_line and line_has_correct_start:
         ret.append('Patch-Filename: {}\n'.format(patch_filename))
         added_filename_line = True
       ret.append(l)

+ 3 - 1
script/release/uploaders/upload-symbols.py

@@ -46,7 +46,9 @@ def main():
 
   for symbol_file in files:
     print("Generating Sentry src bundle for: " + symbol_file)
-    subprocess.check_output([NPX_CMD, '@sentry/[email protected]', 'difutil', 'bundle-sources', symbol_file])
+    subprocess.check_output([
+      NPX_CMD, '@sentry/[email protected]', 'difutil', 'bundle-sources',
+      symbol_file])
 
   files += glob.glob(SYMBOLS_DIR + '/*/*/*.src.zip')
 

+ 19 - 14
script/release/uploaders/upload.py

@@ -36,7 +36,8 @@ SYMBOLS_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'symbols')
 DSYM_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'dsym')
 PDB_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'pdb')
 DEBUG_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'debug')
-TOOLCHAIN_PROFILE_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'toolchain-profile')
+TOOLCHAIN_PROFILE_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION,
+                                      'toolchain-profile')
 
 
 def main():
@@ -59,7 +60,8 @@ def main():
     tag_exists = True
 
   if not args.upload_to_s3:
-    assert release['exists'], 'Release does not exist; cannot upload to GitHub!'
+    assert release['exists'], \
+          'Release does not exist; cannot upload to GitHub!'
     assert tag_exists == args.overwrite, \
           'You have to pass --overwrite to overwrite a published release'
 
@@ -114,7 +116,8 @@ def main():
 
   if PLATFORM == 'linux' and get_target_arch() == 'x64':
     # Upload the hunspell dictionaries only from the linux x64 build
-    hunspell_dictionaries_zip = os.path.join(OUT_DIR, 'hunspell_dictionaries.zip')
+    hunspell_dictionaries_zip = os.path.join(
+      OUT_DIR, 'hunspell_dictionaries.zip')
     upload_electron(release, hunspell_dictionaries_zip, args)
 
   if not tag_exists and not args.upload_to_s3:
@@ -126,7 +129,9 @@ def main():
   if PLATFORM == 'win32':
     toolchain_profile_zip = os.path.join(OUT_DIR, TOOLCHAIN_PROFILE_NAME)
     with ZipFile(toolchain_profile_zip, 'w') as myzip:
-      myzip.write(os.path.join(OUT_DIR, 'windows_toolchain_profile.json'), 'toolchain_profile.json')
+      myzip.write(
+        os.path.join(OUT_DIR, 'windows_toolchain_profile.json'),
+        'toolchain_profile.json')
     upload_electron(release, toolchain_profile_zip, args)
 
 
@@ -170,12 +175,10 @@ class NonZipFileError(ValueError):
 def zero_zip_date_time(fname):
   """ Wrap strip-zip zero_zip_date_time within a file opening operation """
   try:
-    zip = open(fname, 'r+b')
-    _zero_zip_date_time(zip)
+    with open(fname, 'r+b') as f:
+      _zero_zip_date_time(f)
   except:
     raise NonZipFileError(fname)
-  finally:
-    zip.close()
 
 
 def _zero_zip_date_time(zip_):
@@ -208,7 +211,7 @@ def _zero_zip_date_time(zip_):
       _, header_length = values
       extra_struct = Struct("<HH" + "B" * header_length)
       values = list(extra_struct.unpack_from(mm, offset))
-      header_id, header_length, rest = values[0], values[1], values[2:]
+      header_id, header_length = values[:2]
 
       if header_id in (EXTENDED_TIME_DATA, UNIX_EXTRA_DATA):
         values[0] = STRIPZIP_OPTION_HEADER
@@ -218,7 +221,7 @@ def _zero_zip_date_time(zip_):
       if header_id == ZIP64_EXTRA_HEADER:
         assert header_length == 16
         values = list(zip64_extra_struct.unpack_from(mm, offset))
-        header_id, header_length, uncompressed_size, compressed_size = values
+        header_id, header_length, _, compressed_size = values
 
       offset += extra_header_struct.size + header_length
 
@@ -266,7 +269,7 @@ def _zero_zip_date_time(zip_):
     if signature_struct.unpack_from(mm, offset) != (FILE_HEADER_SIGNATURE,):
       break
     values = list(local_file_header_struct.unpack_from(mm, offset))
-    _, _, _, _, _, _, _, compressed_size, _, name_length, extra_field_length = values
+    compressed_size, _, name_length, extra_field_length = values[7:11]
     # reset last_mod_time
     values[4] = 0
     # reset last_mod_date
@@ -274,20 +277,22 @@ def _zero_zip_date_time(zip_):
     local_file_header_struct.pack_into(mm, offset, *values)
     offset += local_file_header_struct.size + name_length
     if extra_field_length != 0:
-      compressed_size = purify_extra_data(mm, offset, extra_field_length, compressed_size)
+      compressed_size = purify_extra_data(mm, offset, extra_field_length,
+                                          compressed_size)
     offset += compressed_size + extra_field_length
 
   while offset < archive_size:
     if signature_struct.unpack_from(mm, offset) != (CENDIR_HEADER_SIGNATURE,):
       break
     values = list(central_directory_header_struct.unpack_from(mm, offset))
-    _, _, _, _, _, _, _, _, _, _, file_name_length, extra_field_length, file_comment_length, _, _, _, _ = values
+    file_name_length, extra_field_length, file_comment_length = values[10:13]
     # reset last_mod_time
     values[5] = 0
     # reset last_mod_date
     values[6] = 0x21
     central_directory_header_struct.pack_into(mm, offset, *values)
-    offset += central_directory_header_struct.size + file_name_length + extra_field_length + file_comment_length
+    offset += central_directory_header_struct.size
+    offset += file_name_length + extra_field_length + file_comment_length
     if extra_field_length != 0:
       purify_extra_data(mm, offset - extra_field_length, extra_field_length)
 

+ 3 - 1
script/strip-binaries.py

@@ -22,7 +22,9 @@ def strip_binary(binary_path, target_cpu):
     strip = 'mips64el-redhat-linux-strip'
   else:
     strip = 'strip'
-  execute([strip, '--discard-all', '--strip-debug', '--preserve-dates', binary_path])
+  execute([
+    strip, '--discard-all', '--strip-debug', '--preserve-dates',
+    binary_path])
 
 def main():
   args = parse_args()

+ 7 - 2
script/update-external-binaries.py

@@ -20,7 +20,10 @@ def parse_args():
 
   parser.add_argument('--base-url', required=False,
                       help="Base URL for all downloads")
-  parser.add_argument('--force', action='store_true', default=False, required=False)
+  parser.add_argument('--force',
+                      action='store_true',
+                      default=False,
+                      required=False)
 
   return parser.parse_args()
 
@@ -111,7 +114,9 @@ def download_binary(base_url, sha, binary_name, attempt=3):
 def validate_sha(file_path, sha):
   downloaded_sha = sha256(file_path)
   if downloaded_sha != sha:
-    raise Exception("SHA for external binary file {} does not match expected '{}' != '{}'".format(file_path, downloaded_sha, sha))
+    raise Exception("SHA for external binary file {} does not match "
+                    "expected '{}' != '{}'".format(
+                      file_path, downloaded_sha, sha))
 
 
 def download_to_temp_dir(url, filename, sha):

+ 3 - 1
script/verify-chromedriver.py

@@ -33,7 +33,9 @@ def main():
 
   returncode = 0
   match = re.search(
-    '^Starting ChromeDriver [0-9]+.[0-9]+.[0-9]+.[0-9]+ .* on port [0-9]+$', output)
+    '^Starting ChromeDriver [0-9]+.[0-9]+.[0-9]+.[0-9]+ .* on port [0-9]+$',
+    output
+  )
 
   if match is None:
     returncode = 1