|
@@ -36,7 +36,8 @@ SYMBOLS_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'symbols')
|
|
|
DSYM_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'dsym')
|
|
|
PDB_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'pdb')
|
|
|
DEBUG_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'debug')
|
|
|
-TOOLCHAIN_PROFILE_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'toolchain-profile')
|
|
|
+TOOLCHAIN_PROFILE_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION,
|
|
|
+ 'toolchain-profile')
|
|
|
|
|
|
|
|
|
def main():
|
|
@@ -59,7 +60,8 @@ def main():
|
|
|
tag_exists = True
|
|
|
|
|
|
if not args.upload_to_s3:
|
|
|
- assert release['exists'], 'Release does not exist; cannot upload to GitHub!'
|
|
|
+ assert release['exists'], \
|
|
|
+ 'Release does not exist; cannot upload to GitHub!'
|
|
|
assert tag_exists == args.overwrite, \
|
|
|
'You have to pass --overwrite to overwrite a published release'
|
|
|
|
|
@@ -114,7 +116,8 @@ def main():
|
|
|
|
|
|
if PLATFORM == 'linux' and get_target_arch() == 'x64':
|
|
|
# Upload the hunspell dictionaries only from the linux x64 build
|
|
|
- hunspell_dictionaries_zip = os.path.join(OUT_DIR, 'hunspell_dictionaries.zip')
|
|
|
+ hunspell_dictionaries_zip = os.path.join(
|
|
|
+ OUT_DIR, 'hunspell_dictionaries.zip')
|
|
|
upload_electron(release, hunspell_dictionaries_zip, args)
|
|
|
|
|
|
if not tag_exists and not args.upload_to_s3:
|
|
@@ -126,7 +129,9 @@ def main():
|
|
|
if PLATFORM == 'win32':
|
|
|
toolchain_profile_zip = os.path.join(OUT_DIR, TOOLCHAIN_PROFILE_NAME)
|
|
|
with ZipFile(toolchain_profile_zip, 'w') as myzip:
|
|
|
- myzip.write(os.path.join(OUT_DIR, 'windows_toolchain_profile.json'), 'toolchain_profile.json')
|
|
|
+ myzip.write(
|
|
|
+ os.path.join(OUT_DIR, 'windows_toolchain_profile.json'),
|
|
|
+ 'toolchain_profile.json')
|
|
|
upload_electron(release, toolchain_profile_zip, args)
|
|
|
|
|
|
|
|
@@ -170,12 +175,10 @@ class NonZipFileError(ValueError):
|
|
|
def zero_zip_date_time(fname):
|
|
|
""" Wrap strip-zip zero_zip_date_time within a file opening operation """
|
|
|
try:
|
|
|
- zip = open(fname, 'r+b')
|
|
|
- _zero_zip_date_time(zip)
|
|
|
+ with open(fname, 'r+b') as f:
|
|
|
+ _zero_zip_date_time(f)
|
|
|
except:
|
|
|
raise NonZipFileError(fname)
|
|
|
- finally:
|
|
|
- zip.close()
|
|
|
|
|
|
|
|
|
def _zero_zip_date_time(zip_):
|
|
@@ -208,7 +211,7 @@ def _zero_zip_date_time(zip_):
|
|
|
_, header_length = values
|
|
|
extra_struct = Struct("<HH" + "B" * header_length)
|
|
|
values = list(extra_struct.unpack_from(mm, offset))
|
|
|
- header_id, header_length, rest = values[0], values[1], values[2:]
|
|
|
+ header_id, header_length = values[:2]
|
|
|
|
|
|
if header_id in (EXTENDED_TIME_DATA, UNIX_EXTRA_DATA):
|
|
|
values[0] = STRIPZIP_OPTION_HEADER
|
|
@@ -218,7 +221,7 @@ def _zero_zip_date_time(zip_):
|
|
|
if header_id == ZIP64_EXTRA_HEADER:
|
|
|
assert header_length == 16
|
|
|
values = list(zip64_extra_struct.unpack_from(mm, offset))
|
|
|
- header_id, header_length, uncompressed_size, compressed_size = values
|
|
|
+ header_id, header_length, _, compressed_size = values
|
|
|
|
|
|
offset += extra_header_struct.size + header_length
|
|
|
|
|
@@ -266,7 +269,7 @@ def _zero_zip_date_time(zip_):
|
|
|
if signature_struct.unpack_from(mm, offset) != (FILE_HEADER_SIGNATURE,):
|
|
|
break
|
|
|
values = list(local_file_header_struct.unpack_from(mm, offset))
|
|
|
- _, _, _, _, _, _, _, compressed_size, _, name_length, extra_field_length = values
|
|
|
+ compressed_size, _, name_length, extra_field_length = values[7:11]
|
|
|
# reset last_mod_time
|
|
|
values[4] = 0
|
|
|
# reset last_mod_date
|
|
@@ -274,20 +277,22 @@ def _zero_zip_date_time(zip_):
|
|
|
local_file_header_struct.pack_into(mm, offset, *values)
|
|
|
offset += local_file_header_struct.size + name_length
|
|
|
if extra_field_length != 0:
|
|
|
- compressed_size = purify_extra_data(mm, offset, extra_field_length, compressed_size)
|
|
|
+ compressed_size = purify_extra_data(mm, offset, extra_field_length,
|
|
|
+ compressed_size)
|
|
|
offset += compressed_size + extra_field_length
|
|
|
|
|
|
while offset < archive_size:
|
|
|
if signature_struct.unpack_from(mm, offset) != (CENDIR_HEADER_SIGNATURE,):
|
|
|
break
|
|
|
values = list(central_directory_header_struct.unpack_from(mm, offset))
|
|
|
- _, _, _, _, _, _, _, _, _, _, file_name_length, extra_field_length, file_comment_length, _, _, _, _ = values
|
|
|
+ file_name_length, extra_field_length, file_comment_length = values[10:13]
|
|
|
# reset last_mod_time
|
|
|
values[5] = 0
|
|
|
# reset last_mod_date
|
|
|
values[6] = 0x21
|
|
|
central_directory_header_struct.pack_into(mm, offset, *values)
|
|
|
- offset += central_directory_header_struct.size + file_name_length + extra_field_length + file_comment_length
|
|
|
+ offset += central_directory_header_struct.size
|
|
|
+ offset += file_name_length + extra_field_length + file_comment_length
|
|
|
if extra_field_length != 0:
|
|
|
purify_extra_data(mm, offset - extra_field_length, extra_field_length)
|
|
|
|