release.js 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478
  1. #!/usr/bin/env node
  2. if (!process.env.CI) require('dotenv-safe').load();
  3. const chalk = require('chalk');
  4. const args = require('minimist')(process.argv.slice(2), {
  5. boolean: [
  6. 'validateRelease',
  7. 'verboseNugget'
  8. ],
  9. default: { verboseNugget: false }
  10. });
  11. const fs = require('node:fs');
  12. const { execSync } = require('node:child_process');
  13. const got = require('got');
  14. const path = require('node:path');
  15. const semver = require('semver');
  16. const temp = require('temp').track();
  17. const { BlobServiceClient } = require('@azure/storage-blob');
  18. const { Octokit } = require('@octokit/rest');
  19. const pass = chalk.green('✓');
  20. const fail = chalk.red('✗');
  21. const { ELECTRON_DIR } = require('../lib/utils');
  22. const { getElectronVersion } = require('../lib/get-version');
  23. const getUrlHash = require('./get-url-hash');
  24. const { createGitHubTokenStrategy } = require('./github-token');
  25. const pkgVersion = `v${getElectronVersion()}`;
  26. function getRepo () {
  27. return pkgVersion.indexOf('nightly') > 0 ? 'nightlies' : 'electron';
  28. }
  29. const targetRepo = getRepo();
  30. let failureCount = 0;
  31. const octokit = new Octokit({
  32. authStrategy: createGitHubTokenStrategy(targetRepo)
  33. });
  34. async function getDraftRelease (version, skipValidation) {
  35. const releaseInfo = await octokit.repos.listReleases({
  36. owner: 'electron',
  37. repo: targetRepo
  38. });
  39. const versionToCheck = version || pkgVersion;
  40. const drafts = releaseInfo.data.filter(release => {
  41. return release.tag_name === versionToCheck && release.draft === true;
  42. });
  43. const draft = drafts[0];
  44. if (!skipValidation) {
  45. failureCount = 0;
  46. check(drafts.length === 1, 'one draft exists', true);
  47. if (versionToCheck.includes('beta')) {
  48. check(draft.prerelease, 'draft is a prerelease');
  49. }
  50. check(draft.body.length > 50 && !draft.body.includes('(placeholder)'), 'draft has release notes');
  51. check((failureCount === 0), 'Draft release looks good to go.', true);
  52. }
  53. return draft;
  54. }
  55. async function validateReleaseAssets (release, validatingRelease) {
  56. const requiredAssets = assetsForVersion(release.tag_name, validatingRelease).sort();
  57. const extantAssets = release.assets.map(asset => asset.name).sort();
  58. const downloadUrls = release.assets.map(asset => ({ url: asset.browser_download_url, file: asset.name })).sort((a, b) => a.file.localeCompare(b.file));
  59. failureCount = 0;
  60. for (const asset of requiredAssets) {
  61. check(extantAssets.includes(asset), asset);
  62. }
  63. check((failureCount === 0), 'All required GitHub assets exist for release', true);
  64. if (!validatingRelease || !release.draft) {
  65. if (release.draft) {
  66. await verifyDraftGitHubReleaseAssets(release);
  67. } else {
  68. await verifyShasumsForRemoteFiles(downloadUrls)
  69. .catch(err => {
  70. console.error(`${fail} error verifyingShasums`, err);
  71. });
  72. }
  73. const azRemoteFiles = azRemoteFilesForVersion(release.tag_name);
  74. await verifyShasumsForRemoteFiles(azRemoteFiles, true);
  75. }
  76. }
  77. function check (condition, statement, exitIfFail = false) {
  78. if (condition) {
  79. console.log(`${pass} ${statement}`);
  80. } else {
  81. failureCount++;
  82. console.error(`${fail} ${statement}`);
  83. if (exitIfFail) process.exit(1);
  84. }
  85. }
  86. function assetsForVersion (version, validatingRelease) {
  87. const patterns = [
  88. `chromedriver-${version}-darwin-x64.zip`,
  89. `chromedriver-${version}-darwin-arm64.zip`,
  90. `chromedriver-${version}-linux-arm64.zip`,
  91. `chromedriver-${version}-linux-armv7l.zip`,
  92. `chromedriver-${version}-linux-x64.zip`,
  93. `chromedriver-${version}-mas-x64.zip`,
  94. `chromedriver-${version}-mas-arm64.zip`,
  95. `chromedriver-${version}-win32-ia32.zip`,
  96. `chromedriver-${version}-win32-x64.zip`,
  97. `chromedriver-${version}-win32-arm64.zip`,
  98. `electron-${version}-darwin-x64-dsym.zip`,
  99. `electron-${version}-darwin-x64-dsym-snapshot.zip`,
  100. `electron-${version}-darwin-x64-symbols.zip`,
  101. `electron-${version}-darwin-x64.zip`,
  102. `electron-${version}-darwin-arm64-dsym.zip`,
  103. `electron-${version}-darwin-arm64-dsym-snapshot.zip`,
  104. `electron-${version}-darwin-arm64-symbols.zip`,
  105. `electron-${version}-darwin-arm64.zip`,
  106. `electron-${version}-linux-arm64-symbols.zip`,
  107. `electron-${version}-linux-arm64.zip`,
  108. `electron-${version}-linux-armv7l-symbols.zip`,
  109. `electron-${version}-linux-armv7l.zip`,
  110. `electron-${version}-linux-x64-debug.zip`,
  111. `electron-${version}-linux-x64-symbols.zip`,
  112. `electron-${version}-linux-x64.zip`,
  113. `electron-${version}-mas-x64-dsym.zip`,
  114. `electron-${version}-mas-x64-dsym-snapshot.zip`,
  115. `electron-${version}-mas-x64-symbols.zip`,
  116. `electron-${version}-mas-x64.zip`,
  117. `electron-${version}-mas-arm64-dsym.zip`,
  118. `electron-${version}-mas-arm64-dsym-snapshot.zip`,
  119. `electron-${version}-mas-arm64-symbols.zip`,
  120. `electron-${version}-mas-arm64.zip`,
  121. `electron-${version}-win32-ia32-pdb.zip`,
  122. `electron-${version}-win32-ia32-symbols.zip`,
  123. `electron-${version}-win32-ia32.zip`,
  124. `electron-${version}-win32-x64-pdb.zip`,
  125. `electron-${version}-win32-x64-symbols.zip`,
  126. `electron-${version}-win32-x64.zip`,
  127. `electron-${version}-win32-arm64-pdb.zip`,
  128. `electron-${version}-win32-arm64-symbols.zip`,
  129. `electron-${version}-win32-arm64.zip`,
  130. 'electron-api.json',
  131. 'electron.d.ts',
  132. 'hunspell_dictionaries.zip',
  133. 'libcxx_headers.zip',
  134. 'libcxxabi_headers.zip',
  135. `libcxx-objects-${version}-linux-arm64.zip`,
  136. `libcxx-objects-${version}-linux-armv7l.zip`,
  137. `libcxx-objects-${version}-linux-x64.zip`,
  138. `ffmpeg-${version}-darwin-x64.zip`,
  139. `ffmpeg-${version}-darwin-arm64.zip`,
  140. `ffmpeg-${version}-linux-arm64.zip`,
  141. `ffmpeg-${version}-linux-armv7l.zip`,
  142. `ffmpeg-${version}-linux-x64.zip`,
  143. `ffmpeg-${version}-mas-x64.zip`,
  144. `ffmpeg-${version}-mas-arm64.zip`,
  145. `ffmpeg-${version}-win32-ia32.zip`,
  146. `ffmpeg-${version}-win32-x64.zip`,
  147. `ffmpeg-${version}-win32-arm64.zip`,
  148. `mksnapshot-${version}-darwin-x64.zip`,
  149. `mksnapshot-${version}-darwin-arm64.zip`,
  150. `mksnapshot-${version}-linux-arm64-x64.zip`,
  151. `mksnapshot-${version}-linux-armv7l-x64.zip`,
  152. `mksnapshot-${version}-linux-x64.zip`,
  153. `mksnapshot-${version}-mas-x64.zip`,
  154. `mksnapshot-${version}-mas-arm64.zip`,
  155. `mksnapshot-${version}-win32-ia32.zip`,
  156. `mksnapshot-${version}-win32-x64.zip`,
  157. `mksnapshot-${version}-win32-arm64-x64.zip`,
  158. `electron-${version}-win32-ia32-toolchain-profile.zip`,
  159. `electron-${version}-win32-x64-toolchain-profile.zip`,
  160. `electron-${version}-win32-arm64-toolchain-profile.zip`
  161. ];
  162. if (!validatingRelease) {
  163. patterns.push('SHASUMS256.txt');
  164. }
  165. return patterns;
  166. }
  167. const cloudStoreFilePaths = (version) => [
  168. `iojs-${version}-headers.tar.gz`,
  169. `iojs-${version}.tar.gz`,
  170. `node-${version}.tar.gz`,
  171. 'node.lib',
  172. 'x64/node.lib',
  173. 'win-x64/iojs.lib',
  174. 'win-x86/iojs.lib',
  175. 'win-arm64/iojs.lib',
  176. 'win-x64/node.lib',
  177. 'win-x86/node.lib',
  178. 'win-arm64/node.lib',
  179. 'arm64/node.lib',
  180. 'SHASUMS.txt',
  181. 'SHASUMS256.txt'
  182. ];
  183. function azRemoteFilesForVersion (version) {
  184. const azCDN = 'https://artifacts.electronjs.org/headers/';
  185. const versionPrefix = `${azCDN}dist/${version}/`;
  186. return cloudStoreFilePaths(version).map((filePath) => ({
  187. file: filePath,
  188. url: `${versionPrefix}${filePath}`
  189. }));
  190. }
  191. function runScript (scriptName, scriptArgs, cwd) {
  192. const scriptCommand = `${scriptName} ${scriptArgs.join(' ')}`;
  193. const scriptOptions = {
  194. encoding: 'UTF-8'
  195. };
  196. if (cwd) scriptOptions.cwd = cwd;
  197. try {
  198. return execSync(scriptCommand, scriptOptions);
  199. } catch (err) {
  200. console.error(`${fail} Error running ${scriptName}`, err);
  201. process.exit(1);
  202. }
  203. }
  204. function uploadNodeShasums () {
  205. console.log('Uploading Node SHASUMS file to artifacts.electronjs.org.');
  206. const scriptPath = path.join(ELECTRON_DIR, 'script', 'release', 'uploaders', 'upload-node-checksums.py');
  207. runScript(scriptPath, ['-v', pkgVersion]);
  208. console.log(`${pass} Done uploading Node SHASUMS file to artifacts.electronjs.org.`);
  209. }
  210. function uploadIndexJson () {
  211. console.log('Uploading index.json to artifacts.electronjs.org.');
  212. const scriptPath = path.join(ELECTRON_DIR, 'script', 'release', 'uploaders', 'upload-index-json.py');
  213. runScript(scriptPath, [pkgVersion]);
  214. console.log(`${pass} Done uploading index.json to artifacts.electronjs.org.`);
  215. }
  216. async function mergeShasums (pkgVersion) {
  217. // Download individual checksum files for Electron zip files from artifact storage,
  218. // concatenate them, and upload to GitHub.
  219. const connectionString = process.env.ELECTRON_ARTIFACTS_BLOB_STORAGE;
  220. if (!connectionString) {
  221. throw new Error('Please set the $ELECTRON_ARTIFACTS_BLOB_STORAGE environment variable');
  222. }
  223. const blobServiceClient = BlobServiceClient.fromConnectionString(connectionString);
  224. const containerClient = blobServiceClient.getContainerClient('checksums-scratchpad');
  225. const blobsIter = containerClient.listBlobsFlat({
  226. prefix: `${pkgVersion}/`
  227. });
  228. const shasums = [];
  229. for await (const blob of blobsIter) {
  230. if (blob.name.endsWith('.sha256sum')) {
  231. const blobClient = containerClient.getBlockBlobClient(blob.name);
  232. const response = await blobClient.downloadToBuffer();
  233. shasums.push(response.toString('ascii').trim());
  234. }
  235. }
  236. return shasums.join('\n');
  237. }
  238. async function createReleaseShasums (release) {
  239. const fileName = 'SHASUMS256.txt';
  240. const existingAssets = release.assets.filter(asset => asset.name === fileName);
  241. if (existingAssets.length > 0) {
  242. console.log(`${fileName} already exists on GitHub; deleting before creating new file.`);
  243. await octokit.repos.deleteReleaseAsset({
  244. owner: 'electron',
  245. repo: targetRepo,
  246. asset_id: existingAssets[0].id
  247. }).catch(err => {
  248. console.error(`${fail} Error deleting ${fileName} on GitHub:`, err);
  249. process.exit(1);
  250. });
  251. }
  252. console.log(`Creating and uploading the release ${fileName}.`);
  253. const checksums = await mergeShasums(pkgVersion);
  254. console.log(`${pass} Generated release SHASUMS.`);
  255. const filePath = await saveShaSumFile(checksums, fileName);
  256. console.log(`${pass} Created ${fileName} file.`);
  257. await uploadShasumFile(filePath, fileName, release.id);
  258. console.log(`${pass} Successfully uploaded ${fileName} to GitHub.`);
  259. }
  260. async function uploadShasumFile (filePath, fileName, releaseId) {
  261. const uploadUrl = `https://uploads.github.com/repos/electron/${targetRepo}/releases/${releaseId}/assets{?name,label}`;
  262. return octokit.repos.uploadReleaseAsset({
  263. url: uploadUrl,
  264. headers: {
  265. 'content-type': 'text/plain',
  266. 'content-length': fs.statSync(filePath).size
  267. },
  268. data: fs.createReadStream(filePath),
  269. name: fileName
  270. }).catch(err => {
  271. console.error(`${fail} Error uploading ${filePath} to GitHub:`, err);
  272. process.exit(1);
  273. });
  274. }
  275. function saveShaSumFile (checksums, fileName) {
  276. return new Promise(resolve => {
  277. temp.open(fileName, (err, info) => {
  278. if (err) {
  279. console.error(`${fail} Could not create ${fileName} file`);
  280. process.exit(1);
  281. } else {
  282. fs.writeFileSync(info.fd, checksums);
  283. fs.close(info.fd, (err) => {
  284. if (err) {
  285. console.error(`${fail} Could close ${fileName} file`);
  286. process.exit(1);
  287. }
  288. resolve(info.path);
  289. });
  290. }
  291. });
  292. });
  293. }
  294. async function publishRelease (release) {
  295. let makeLatest = false;
  296. if (!release.prerelease) {
  297. const currentLatest = await octokit.repos.getLatestRelease({
  298. owner: 'electron',
  299. repo: targetRepo
  300. });
  301. makeLatest = semver.gte(release.tag_name, currentLatest.data.tag_name);
  302. }
  303. return octokit.repos.updateRelease({
  304. owner: 'electron',
  305. repo: targetRepo,
  306. release_id: release.id,
  307. tag_name: release.tag_name,
  308. draft: false,
  309. make_latest: makeLatest ? 'true' : 'false'
  310. }).catch(err => {
  311. console.error(`${fail} Error publishing release:`, err);
  312. process.exit(1);
  313. });
  314. }
  315. async function makeRelease (releaseToValidate) {
  316. if (releaseToValidate) {
  317. if (releaseToValidate === true) {
  318. releaseToValidate = pkgVersion;
  319. } else {
  320. console.log('Release to validate !=== true');
  321. }
  322. console.log(`Validating release ${releaseToValidate}`);
  323. const release = await getDraftRelease(releaseToValidate);
  324. await validateReleaseAssets(release, true);
  325. } else {
  326. let draftRelease = await getDraftRelease();
  327. uploadNodeShasums();
  328. await createReleaseShasums(draftRelease);
  329. // Fetch latest version of release before verifying
  330. draftRelease = await getDraftRelease(pkgVersion, true);
  331. await validateReleaseAssets(draftRelease);
  332. // index.json goes live once uploaded so do these uploads as
  333. // late as possible to reduce the chances it contains a release
  334. // which fails to publish. It has to be done before the final
  335. // publish to ensure there aren't published releases not contained
  336. // in index.json, which causes other problems in downstream projects
  337. uploadIndexJson();
  338. await publishRelease(draftRelease);
  339. console.log(`${pass} SUCCESS!!! Release has been published. Please run ` +
  340. '"npm run publish-to-npm" to publish release to npm.');
  341. }
  342. }
  343. const SHASUM_256_FILENAME = 'SHASUMS256.txt';
  344. const SHASUM_1_FILENAME = 'SHASUMS.txt';
  345. async function verifyDraftGitHubReleaseAssets (release) {
  346. console.log('Fetching authenticated GitHub artifact URLs to verify shasums');
  347. const remoteFilesToHash = await Promise.all(release.assets.map(async asset => {
  348. const requestOptions = octokit.repos.getReleaseAsset.endpoint({
  349. owner: 'electron',
  350. repo: targetRepo,
  351. asset_id: asset.id,
  352. headers: {
  353. Accept: 'application/octet-stream'
  354. }
  355. });
  356. const { url, headers } = requestOptions;
  357. headers.authorization = `token ${(await octokit.auth()).token}`;
  358. const response = await got(url, {
  359. followRedirect: false,
  360. method: 'HEAD',
  361. headers,
  362. throwHttpErrors: false
  363. });
  364. if (response.statusCode !== 302 && response.statusCode !== 301) {
  365. console.error('Failed to HEAD github asset: ' + url);
  366. throw new Error('Unexpected status HEAD\'ing github asset: ' + response.statusCode);
  367. }
  368. return { url: response.headers.location, file: asset.name };
  369. })).catch(err => {
  370. console.error(`${fail} Error downloading files from GitHub`, err);
  371. process.exit(1);
  372. });
  373. await verifyShasumsForRemoteFiles(remoteFilesToHash);
  374. }
  375. async function getShaSumMappingFromUrl (shaSumFileUrl, fileNamePrefix) {
  376. const response = await got(shaSumFileUrl, {
  377. throwHttpErrors: false
  378. });
  379. if (response.statusCode !== 200) {
  380. console.error('Failed to fetch SHASUM mapping: ' + shaSumFileUrl);
  381. console.error('Bad SHASUM mapping response: ' + response.body.trim());
  382. throw new Error('Unexpected status fetching SHASUM mapping: ' + response.statusCode);
  383. }
  384. const raw = response.body;
  385. return raw.split('\n').map(line => line.trim()).filter(Boolean).reduce((map, line) => {
  386. const [sha, file] = line.replace(' ', ' ').split(' ');
  387. map[file.slice(fileNamePrefix.length)] = sha;
  388. return map;
  389. }, {});
  390. }
  391. async function validateFileHashesAgainstShaSumMapping (remoteFilesWithHashes, mapping) {
  392. for (const remoteFileWithHash of remoteFilesWithHashes) {
  393. check(remoteFileWithHash.hash === mapping[remoteFileWithHash.file], `Release asset ${remoteFileWithHash.file} should have hash of ${mapping[remoteFileWithHash.file]} but found ${remoteFileWithHash.hash}`, true);
  394. }
  395. }
  396. async function verifyShasumsForRemoteFiles (remoteFilesToHash, filesAreNodeJSArtifacts = false) {
  397. console.log(`Generating SHAs for ${remoteFilesToHash.length} files to verify shasums`);
  398. // Only used for node.js artifact uploads
  399. const shaSum1File = remoteFilesToHash.find(({ file }) => file === SHASUM_1_FILENAME);
  400. // Used for both node.js artifact uploads and normal electron artifacts
  401. const shaSum256File = remoteFilesToHash.find(({ file }) => file === SHASUM_256_FILENAME);
  402. remoteFilesToHash = remoteFilesToHash.filter(({ file }) => file !== SHASUM_1_FILENAME && file !== SHASUM_256_FILENAME);
  403. const remoteFilesWithHashes = await Promise.all(remoteFilesToHash.map(async (file) => {
  404. return {
  405. hash: await getUrlHash(file.url, 'sha256'),
  406. ...file
  407. };
  408. }));
  409. await validateFileHashesAgainstShaSumMapping(remoteFilesWithHashes, await getShaSumMappingFromUrl(shaSum256File.url, filesAreNodeJSArtifacts ? '' : '*'));
  410. if (filesAreNodeJSArtifacts) {
  411. const remoteFilesWithSha1Hashes = await Promise.all(remoteFilesToHash.map(async (file) => {
  412. return {
  413. hash: await getUrlHash(file.url, 'sha1'),
  414. ...file
  415. };
  416. }));
  417. await validateFileHashesAgainstShaSumMapping(remoteFilesWithSha1Hashes, await getShaSumMappingFromUrl(shaSum1File.url, filesAreNodeJSArtifacts ? '' : '*'));
  418. }
  419. }
  420. makeRelease(args.validateRelease)
  421. .catch((err) => {
  422. console.error('Error occurred while making release:', err);
  423. process.exit(1);
  424. });