upload.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394
  1. #!/usr/bin/env python3
  2. from __future__ import print_function
  3. import argparse
  4. import datetime
  5. import hashlib
  6. import json
  7. import mmap
  8. import os
  9. import shutil
  10. import subprocess
  11. from struct import Struct
  12. import sys
  13. sys.path.append(
  14. os.path.abspath(os.path.dirname(os.path.abspath(__file__)) + "/../.."))
  15. from zipfile import ZipFile
  16. from lib.config import PLATFORM, get_target_arch, \
  17. get_zip_name, enable_verbose_mode, get_platform_key
  18. from lib.util import get_electron_branding, execute, get_electron_version, \
  19. store_artifact, get_electron_exec, get_out_dir, \
  20. SRC_DIR, ELECTRON_DIR, TS_NODE
  21. ELECTRON_VERSION = 'v' + get_electron_version()
  22. PROJECT_NAME = get_electron_branding()['project_name']
  23. PRODUCT_NAME = get_electron_branding()['product_name']
  24. OUT_DIR = get_out_dir()
  25. DIST_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION)
  26. SYMBOLS_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'symbols')
  27. DSYM_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'dsym')
  28. DSYM_SNAPSHOT_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION,
  29. 'dsym-snapshot')
  30. PDB_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'pdb')
  31. DEBUG_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'debug')
  32. TOOLCHAIN_PROFILE_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION,
  33. 'toolchain-profile')
  34. CXX_OBJECTS_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION,
  35. 'libcxx_objects')
  36. def main():
  37. args = parse_args()
  38. if args.verbose:
  39. enable_verbose_mode()
  40. if args.upload_to_storage:
  41. utcnow = datetime.datetime.utcnow()
  42. args.upload_timestamp = utcnow.strftime('%Y%m%d')
  43. build_version = get_electron_build_version()
  44. if not ELECTRON_VERSION.startswith(build_version):
  45. error = 'Tag name ({0}) should match build version ({1})\n'.format(
  46. ELECTRON_VERSION, build_version)
  47. sys.stderr.write(error)
  48. sys.stderr.flush()
  49. return 1
  50. tag_exists = False
  51. release = get_release(args.version)
  52. if not release['draft']:
  53. tag_exists = True
  54. if not args.upload_to_storage:
  55. assert release['exists'], \
  56. 'Release does not exist; cannot upload to GitHub!'
  57. assert tag_exists == args.overwrite, \
  58. 'You have to pass --overwrite to overwrite a published release'
  59. # Upload Electron files.
  60. # Rename dist.zip to get_zip_name('electron', version, suffix='')
  61. electron_zip = os.path.join(OUT_DIR, DIST_NAME)
  62. shutil.copy2(os.path.join(OUT_DIR, 'dist.zip'), electron_zip)
  63. upload_electron(release, electron_zip, args)
  64. symbols_zip = os.path.join(OUT_DIR, SYMBOLS_NAME)
  65. shutil.copy2(os.path.join(OUT_DIR, 'symbols.zip'), symbols_zip)
  66. upload_electron(release, symbols_zip, args)
  67. if PLATFORM == 'darwin':
  68. if get_platform_key() == 'darwin' and get_target_arch() == 'x64':
  69. api_path = os.path.join(ELECTRON_DIR, 'electron-api.json')
  70. upload_electron(release, api_path, args)
  71. ts_defs_path = os.path.join(ELECTRON_DIR, 'electron.d.ts')
  72. upload_electron(release, ts_defs_path, args)
  73. dsym_zip = os.path.join(OUT_DIR, DSYM_NAME)
  74. shutil.copy2(os.path.join(OUT_DIR, 'dsym.zip'), dsym_zip)
  75. upload_electron(release, dsym_zip, args)
  76. dsym_snapshot_zip = os.path.join(OUT_DIR, DSYM_SNAPSHOT_NAME)
  77. shutil.copy2(os.path.join(OUT_DIR, 'dsym-snapshot.zip'), dsym_snapshot_zip)
  78. upload_electron(release, dsym_snapshot_zip, args)
  79. elif PLATFORM == 'win32':
  80. pdb_zip = os.path.join(OUT_DIR, PDB_NAME)
  81. shutil.copy2(os.path.join(OUT_DIR, 'pdb.zip'), pdb_zip)
  82. upload_electron(release, pdb_zip, args)
  83. elif PLATFORM == 'linux':
  84. debug_zip = os.path.join(OUT_DIR, DEBUG_NAME)
  85. shutil.copy2(os.path.join(OUT_DIR, 'debug.zip'), debug_zip)
  86. upload_electron(release, debug_zip, args)
  87. # Upload libcxx_objects.zip for linux only
  88. libcxx_objects = get_zip_name('libcxx-objects', ELECTRON_VERSION)
  89. libcxx_objects_zip = os.path.join(OUT_DIR, libcxx_objects)
  90. shutil.copy2(os.path.join(OUT_DIR, 'libcxx_objects.zip'),
  91. libcxx_objects_zip)
  92. upload_electron(release, libcxx_objects_zip, args)
  93. # Upload headers.zip and abi_headers.zip as non-platform specific
  94. if get_target_arch() == "x64":
  95. cxx_headers_zip = os.path.join(OUT_DIR, 'libcxx_headers.zip')
  96. upload_electron(release, cxx_headers_zip, args)
  97. abi_headers_zip = os.path.join(OUT_DIR, 'libcxxabi_headers.zip')
  98. upload_electron(release, abi_headers_zip, args)
  99. # Upload free version of ffmpeg.
  100. ffmpeg = get_zip_name('ffmpeg', ELECTRON_VERSION)
  101. ffmpeg_zip = os.path.join(OUT_DIR, ffmpeg)
  102. ffmpeg_build_path = os.path.join(SRC_DIR, 'out', 'ffmpeg', 'ffmpeg.zip')
  103. shutil.copy2(ffmpeg_build_path, ffmpeg_zip)
  104. upload_electron(release, ffmpeg_zip, args)
  105. chromedriver = get_zip_name('chromedriver', ELECTRON_VERSION)
  106. chromedriver_zip = os.path.join(OUT_DIR, chromedriver)
  107. shutil.copy2(os.path.join(OUT_DIR, 'chromedriver.zip'), chromedriver_zip)
  108. upload_electron(release, chromedriver_zip, args)
  109. mksnapshot = get_zip_name('mksnapshot', ELECTRON_VERSION)
  110. mksnapshot_zip = os.path.join(OUT_DIR, mksnapshot)
  111. if get_target_arch().startswith('arm') and PLATFORM != 'darwin':
  112. # Upload the x64 binary for arm/arm64 mksnapshot
  113. mksnapshot = get_zip_name('mksnapshot', ELECTRON_VERSION, 'x64')
  114. mksnapshot_zip = os.path.join(OUT_DIR, mksnapshot)
  115. shutil.copy2(os.path.join(OUT_DIR, 'mksnapshot.zip'), mksnapshot_zip)
  116. upload_electron(release, mksnapshot_zip, args)
  117. if PLATFORM == 'linux' and get_target_arch() == 'x64':
  118. # Upload the hunspell dictionaries only from the linux x64 build
  119. hunspell_dictionaries_zip = os.path.join(
  120. OUT_DIR, 'hunspell_dictionaries.zip')
  121. upload_electron(release, hunspell_dictionaries_zip, args)
  122. if not tag_exists and not args.upload_to_storage:
  123. # Upload symbols to symbol server.
  124. run_python_upload_script('upload-symbols.py')
  125. if PLATFORM == 'win32':
  126. run_python_upload_script('upload-node-headers.py', '-v', args.version)
  127. if PLATFORM == 'win32':
  128. toolchain_profile_zip = os.path.join(OUT_DIR, TOOLCHAIN_PROFILE_NAME)
  129. with ZipFile(toolchain_profile_zip, 'w') as myzip:
  130. myzip.write(
  131. os.path.join(OUT_DIR, 'windows_toolchain_profile.json'),
  132. 'toolchain_profile.json')
  133. upload_electron(release, toolchain_profile_zip, args)
  134. return 0
  135. def parse_args():
  136. parser = argparse.ArgumentParser(description='upload distribution file')
  137. parser.add_argument('-v', '--version', help='Specify the version',
  138. default=ELECTRON_VERSION)
  139. parser.add_argument('-o', '--overwrite',
  140. help='Overwrite a published release',
  141. action='store_true')
  142. parser.add_argument('-p', '--publish-release',
  143. help='Publish the release',
  144. action='store_true')
  145. parser.add_argument('-s', '--upload_to_storage',
  146. help='Upload assets to azure bucket',
  147. dest='upload_to_storage',
  148. action='store_true',
  149. default=False,
  150. required=False)
  151. parser.add_argument('--verbose',
  152. action='store_true',
  153. help='Mooooorreee logs')
  154. return parser.parse_args()
  155. def run_python_upload_script(script, *args):
  156. script_path = os.path.join(
  157. ELECTRON_DIR, 'script', 'release', 'uploaders', script)
  158. print(execute([sys.executable, script_path] + list(args)))
  159. def get_electron_build_version():
  160. if get_target_arch().startswith('arm') or 'CI' in os.environ:
  161. # In CI we just build as told.
  162. return ELECTRON_VERSION
  163. electron = get_electron_exec()
  164. return subprocess.check_output([electron, '--version']).strip()
  165. class NonZipFileError(ValueError):
  166. """Raised when a given file does not appear to be a zip"""
  167. def zero_zip_date_time(fname):
  168. """ Wrap strip-zip zero_zip_date_time within a file opening operation """
  169. try:
  170. with open(fname, 'r+b') as f:
  171. _zero_zip_date_time(f)
  172. except Exception:
  173. # pylint: disable=W0707
  174. raise NonZipFileError(fname)
  175. def _zero_zip_date_time(zip_):
  176. def purify_extra_data(mm, offset, length, compressed_size=0):
  177. extra_header_struct = Struct("<HH")
  178. # 0. id
  179. # 1. length
  180. STRIPZIP_OPTION_HEADER = 0xFFFF
  181. EXTENDED_TIME_DATA = 0x5455
  182. # Some sort of extended time data, see
  183. # ftp://ftp.info-zip.org/pub/infozip/src/zip30.zip ./proginfo/extrafld.txt
  184. # fallthrough
  185. UNIX_EXTRA_DATA = 0x7875
  186. # Unix extra data; UID / GID stuff, see
  187. # ftp://ftp.info-zip.org/pub/infozip/src/zip30.zip ./proginfo/extrafld.txt
  188. ZIP64_EXTRA_HEADER = 0x0001
  189. zip64_extra_struct = Struct("<HHQQ")
  190. # ZIP64.
  191. # When a ZIP64 extra field is present this 8byte length
  192. # will override the 4byte length defined in canonical zips.
  193. # This is in the form:
  194. # - 0x0001 (header_id)
  195. # - 0x0010 [16] (header_length)
  196. # - ... (8byte uncompressed_length)
  197. # - ... (8byte compressed_length)
  198. mlen = offset + length
  199. while offset < mlen:
  200. values = list(extra_header_struct.unpack_from(mm, offset))
  201. _, header_length = values
  202. extra_struct = Struct("<HH" + "B" * header_length)
  203. values = list(extra_struct.unpack_from(mm, offset))
  204. header_id, header_length = values[:2]
  205. if header_id in (EXTENDED_TIME_DATA, UNIX_EXTRA_DATA):
  206. values[0] = STRIPZIP_OPTION_HEADER
  207. for i in range(2, len(values)):
  208. values[i] = 0xff
  209. extra_struct.pack_into(mm, offset, *values)
  210. if header_id == ZIP64_EXTRA_HEADER:
  211. assert header_length == 16
  212. values = list(zip64_extra_struct.unpack_from(mm, offset))
  213. header_id, header_length, _, compressed_size = values
  214. offset += extra_header_struct.size + header_length
  215. return compressed_size
  216. FILE_HEADER_SIGNATURE = 0x04034b50
  217. CENDIR_HEADER_SIGNATURE = 0x02014b50
  218. archive_size = os.fstat(zip_.fileno()).st_size
  219. signature_struct = Struct("<L")
  220. local_file_header_struct = Struct("<LHHHHHLLLHH")
  221. # 0. L signature
  222. # 1. H version_needed
  223. # 2. H gp_bits
  224. # 3. H compression_method
  225. # 4. H last_mod_time
  226. # 5. H last_mod_date
  227. # 6. L crc32
  228. # 7. L compressed_size
  229. # 8. L uncompressed_size
  230. # 9. H name_length
  231. # 10. H extra_field_length
  232. central_directory_header_struct = Struct("<LHHHHHHLLLHHHHHLL")
  233. # 0. L signature
  234. # 1. H version_made_by
  235. # 2. H version_needed
  236. # 3. H gp_bits
  237. # 4. H compression_method
  238. # 5. H last_mod_time
  239. # 6. H last_mod_date
  240. # 7. L crc32
  241. # 8. L compressed_size
  242. # 9. L uncompressed_size
  243. # 10. H file_name_length
  244. # 11. H extra_field_length
  245. # 12. H file_comment_length
  246. # 13. H disk_number_start
  247. # 14. H internal_attr
  248. # 15. L external_attr
  249. # 16. L rel_offset_local_header
  250. offset = 0
  251. mm = mmap.mmap(zip_.fileno(), 0)
  252. while offset < archive_size:
  253. if signature_struct.unpack_from(mm, offset) != (FILE_HEADER_SIGNATURE,):
  254. break
  255. values = list(local_file_header_struct.unpack_from(mm, offset))
  256. compressed_size, _, name_length, extra_field_length = values[7:11]
  257. # reset last_mod_time
  258. values[4] = 0
  259. # reset last_mod_date
  260. values[5] = 0x21
  261. local_file_header_struct.pack_into(mm, offset, *values)
  262. offset += local_file_header_struct.size + name_length
  263. if extra_field_length != 0:
  264. compressed_size = purify_extra_data(mm, offset, extra_field_length,
  265. compressed_size)
  266. offset += compressed_size + extra_field_length
  267. while offset < archive_size:
  268. if signature_struct.unpack_from(mm, offset) != (CENDIR_HEADER_SIGNATURE,):
  269. break
  270. values = list(central_directory_header_struct.unpack_from(mm, offset))
  271. file_name_length, extra_field_length, file_comment_length = values[10:13]
  272. # reset last_mod_time
  273. values[5] = 0
  274. # reset last_mod_date
  275. values[6] = 0x21
  276. central_directory_header_struct.pack_into(mm, offset, *values)
  277. offset += central_directory_header_struct.size
  278. offset += file_name_length + extra_field_length + file_comment_length
  279. if extra_field_length != 0:
  280. purify_extra_data(mm, offset - extra_field_length, extra_field_length)
  281. if offset == 0:
  282. raise NonZipFileError(zip_.name)
  283. def upload_electron(release, file_path, args):
  284. filename = os.path.basename(file_path)
  285. # Strip zip non determinism before upload, in-place operation
  286. try:
  287. zero_zip_date_time(file_path)
  288. except NonZipFileError:
  289. pass
  290. # if upload_to_storage is set, skip github upload.
  291. # todo (vertedinde): migrate this variable to upload_to_storage
  292. if args.upload_to_storage:
  293. key_prefix = 'release-builds/{0}_{1}'.format(args.version,
  294. args.upload_timestamp)
  295. store_artifact(os.path.dirname(file_path), key_prefix, [file_path])
  296. upload_sha256_checksum(args.version, file_path, key_prefix)
  297. return
  298. # Upload the file.
  299. upload_io_to_github(release, filename, file_path, args.version)
  300. # Upload the checksum file.
  301. upload_sha256_checksum(args.version, file_path)
  302. def upload_io_to_github(release, filename, filepath, version):
  303. print('Uploading %s to GitHub' % \
  304. (filename))
  305. script_path = os.path.join(
  306. ELECTRON_DIR, 'script', 'release', 'uploaders', 'upload-to-github.ts')
  307. execute([TS_NODE, script_path, filepath, filename, str(release['id']),
  308. version])
  309. def upload_sha256_checksum(version, file_path, key_prefix=None):
  310. checksum_path = '{}.sha256sum'.format(file_path)
  311. if key_prefix is None:
  312. key_prefix = 'checksums-scratchpad/{0}'.format(version)
  313. sha256 = hashlib.sha256()
  314. with open(file_path, 'rb') as f:
  315. sha256.update(f.read())
  316. filename = os.path.basename(file_path)
  317. with open(checksum_path, 'w') as checksum:
  318. checksum.write('{} *{}'.format(sha256.hexdigest(), filename))
  319. store_artifact(os.path.dirname(checksum_path), key_prefix, [checksum_path])
  320. def get_release(version):
  321. script_path = os.path.join(
  322. ELECTRON_DIR, 'script', 'release', 'find-github-release.js')
  323. # Strip warnings from stdout to ensure the only output is the desired object
  324. release_env = os.environ.copy()
  325. release_env['NODE_NO_WARNINGS'] = '1'
  326. release_info = execute(['node', script_path, version], release_env)
  327. release = json.loads(release_info)
  328. return release
  329. if __name__ == '__main__':
  330. sys.exit(main())