release.ts 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588
  1. #!/usr/bin/env node
  2. import { BlobServiceClient } from '@azure/storage-blob';
  3. import { Octokit } from '@octokit/rest';
  4. import * as chalk from 'chalk';
  5. import got from 'got';
  6. import { gte } from 'semver';
  7. import { track as trackTemp } from 'temp';
  8. import { execSync, ExecSyncOptions } from 'node:child_process';
  9. import { statSync, createReadStream, writeFileSync, close } from 'node:fs';
  10. import { join } from 'node:path';
  11. import { getUrlHash } from './get-url-hash';
  12. import { createGitHubTokenStrategy } from './github-token';
  13. import { ELECTRON_ORG, ELECTRON_REPO, ElectronReleaseRepo, NIGHTLY_REPO } from './types';
  14. import { getElectronVersion } from '../lib/get-version';
  15. import { ELECTRON_DIR } from '../lib/utils';
  16. const temp = trackTemp();
  17. const pass = chalk.green('✓');
  18. const fail = chalk.red('✗');
  19. const pkgVersion = `v${getElectronVersion()}`;
  20. function getRepo (): ElectronReleaseRepo {
  21. return pkgVersion.indexOf('nightly') > 0 ? NIGHTLY_REPO : ELECTRON_REPO;
  22. }
  23. const targetRepo = getRepo();
  24. let failureCount = 0;
  25. const octokit = new Octokit({
  26. authStrategy: createGitHubTokenStrategy(targetRepo)
  27. });
  28. async function getDraftRelease (
  29. version?: string,
  30. skipValidation: boolean = false
  31. ) {
  32. const releaseInfo = await octokit.repos.listReleases({
  33. owner: ELECTRON_ORG,
  34. repo: targetRepo
  35. });
  36. const versionToCheck = version || pkgVersion;
  37. const drafts = releaseInfo.data.filter((release) => {
  38. return release.tag_name === versionToCheck && release.draft === true;
  39. });
  40. const draft = drafts[0];
  41. if (!skipValidation) {
  42. failureCount = 0;
  43. check(drafts.length === 1, 'one draft exists', true);
  44. if (versionToCheck.includes('beta')) {
  45. check(draft.prerelease, 'draft is a prerelease');
  46. }
  47. check(
  48. !!draft.body &&
  49. draft.body.length > 50 &&
  50. !draft.body.includes('(placeholder)'),
  51. 'draft has release notes'
  52. );
  53. check(failureCount === 0, 'Draft release looks good to go.', true);
  54. }
  55. return draft;
  56. }
  57. type MinimalRelease = {
  58. id: number;
  59. tag_name: string;
  60. draft: boolean;
  61. prerelease: boolean;
  62. assets: {
  63. name: string;
  64. browser_download_url: string;
  65. id: number;
  66. }[];
  67. };
  68. async function validateReleaseAssets (
  69. release: MinimalRelease,
  70. validatingRelease: boolean = false
  71. ) {
  72. const requiredAssets = assetsForVersion(
  73. release.tag_name,
  74. validatingRelease
  75. ).sort();
  76. const extantAssets = release.assets.map((asset) => asset.name).sort();
  77. const downloadUrls = release.assets
  78. .map((asset) => ({ url: asset.browser_download_url, file: asset.name }))
  79. .sort((a, b) => a.file.localeCompare(b.file));
  80. failureCount = 0;
  81. for (const asset of requiredAssets) {
  82. check(extantAssets.includes(asset), asset);
  83. }
  84. check(
  85. failureCount === 0,
  86. 'All required GitHub assets exist for release',
  87. true
  88. );
  89. if (!validatingRelease || !release.draft) {
  90. if (release.draft) {
  91. await verifyDraftGitHubReleaseAssets(release);
  92. } else {
  93. await verifyShasumsForRemoteFiles(downloadUrls).catch((err) => {
  94. console.error(`${fail} error verifyingShasums`, err);
  95. });
  96. }
  97. const azRemoteFiles = azRemoteFilesForVersion(release.tag_name);
  98. await verifyShasumsForRemoteFiles(azRemoteFiles, true);
  99. }
  100. }
  101. function check (condition: boolean, statement: string, exitIfFail = false) {
  102. if (condition) {
  103. console.log(`${pass} ${statement}`);
  104. } else {
  105. failureCount++;
  106. console.error(`${fail} ${statement}`);
  107. if (exitIfFail) process.exit(1);
  108. }
  109. }
  110. function assetsForVersion (version: string, validatingRelease: boolean) {
  111. const patterns = [
  112. `chromedriver-${version}-darwin-x64.zip`,
  113. `chromedriver-${version}-darwin-arm64.zip`,
  114. `chromedriver-${version}-linux-arm64.zip`,
  115. `chromedriver-${version}-linux-armv7l.zip`,
  116. `chromedriver-${version}-linux-x64.zip`,
  117. `chromedriver-${version}-mas-x64.zip`,
  118. `chromedriver-${version}-mas-arm64.zip`,
  119. `chromedriver-${version}-win32-ia32.zip`,
  120. `chromedriver-${version}-win32-x64.zip`,
  121. `chromedriver-${version}-win32-arm64.zip`,
  122. `electron-${version}-darwin-x64-dsym.zip`,
  123. `electron-${version}-darwin-x64-dsym-snapshot.zip`,
  124. `electron-${version}-darwin-x64-symbols.zip`,
  125. `electron-${version}-darwin-x64.zip`,
  126. `electron-${version}-darwin-arm64-dsym.zip`,
  127. `electron-${version}-darwin-arm64-dsym-snapshot.zip`,
  128. `electron-${version}-darwin-arm64-symbols.zip`,
  129. `electron-${version}-darwin-arm64.zip`,
  130. `electron-${version}-linux-arm64-symbols.zip`,
  131. `electron-${version}-linux-arm64.zip`,
  132. `electron-${version}-linux-armv7l-symbols.zip`,
  133. `electron-${version}-linux-armv7l.zip`,
  134. `electron-${version}-linux-x64-debug.zip`,
  135. `electron-${version}-linux-x64-symbols.zip`,
  136. `electron-${version}-linux-x64.zip`,
  137. `electron-${version}-mas-x64-dsym.zip`,
  138. `electron-${version}-mas-x64-dsym-snapshot.zip`,
  139. `electron-${version}-mas-x64-symbols.zip`,
  140. `electron-${version}-mas-x64.zip`,
  141. `electron-${version}-mas-arm64-dsym.zip`,
  142. `electron-${version}-mas-arm64-dsym-snapshot.zip`,
  143. `electron-${version}-mas-arm64-symbols.zip`,
  144. `electron-${version}-mas-arm64.zip`,
  145. `electron-${version}-win32-ia32-pdb.zip`,
  146. `electron-${version}-win32-ia32-symbols.zip`,
  147. `electron-${version}-win32-ia32.zip`,
  148. `electron-${version}-win32-x64-pdb.zip`,
  149. `electron-${version}-win32-x64-symbols.zip`,
  150. `electron-${version}-win32-x64.zip`,
  151. `electron-${version}-win32-arm64-pdb.zip`,
  152. `electron-${version}-win32-arm64-symbols.zip`,
  153. `electron-${version}-win32-arm64.zip`,
  154. 'electron-api.json',
  155. 'electron.d.ts',
  156. 'hunspell_dictionaries.zip',
  157. 'libcxx_headers.zip',
  158. 'libcxxabi_headers.zip',
  159. `libcxx-objects-${version}-linux-arm64.zip`,
  160. `libcxx-objects-${version}-linux-armv7l.zip`,
  161. `libcxx-objects-${version}-linux-x64.zip`,
  162. `ffmpeg-${version}-darwin-x64.zip`,
  163. `ffmpeg-${version}-darwin-arm64.zip`,
  164. `ffmpeg-${version}-linux-arm64.zip`,
  165. `ffmpeg-${version}-linux-armv7l.zip`,
  166. `ffmpeg-${version}-linux-x64.zip`,
  167. `ffmpeg-${version}-mas-x64.zip`,
  168. `ffmpeg-${version}-mas-arm64.zip`,
  169. `ffmpeg-${version}-win32-ia32.zip`,
  170. `ffmpeg-${version}-win32-x64.zip`,
  171. `ffmpeg-${version}-win32-arm64.zip`,
  172. `mksnapshot-${version}-darwin-x64.zip`,
  173. `mksnapshot-${version}-darwin-arm64.zip`,
  174. `mksnapshot-${version}-linux-arm64-x64.zip`,
  175. `mksnapshot-${version}-linux-armv7l-x64.zip`,
  176. `mksnapshot-${version}-linux-x64.zip`,
  177. `mksnapshot-${version}-mas-x64.zip`,
  178. `mksnapshot-${version}-mas-arm64.zip`,
  179. `mksnapshot-${version}-win32-ia32.zip`,
  180. `mksnapshot-${version}-win32-x64.zip`,
  181. `mksnapshot-${version}-win32-arm64-x64.zip`,
  182. `electron-${version}-win32-ia32-toolchain-profile.zip`,
  183. `electron-${version}-win32-x64-toolchain-profile.zip`,
  184. `electron-${version}-win32-arm64-toolchain-profile.zip`
  185. ];
  186. if (!validatingRelease) {
  187. patterns.push('SHASUMS256.txt');
  188. }
  189. return patterns;
  190. }
  191. const cloudStoreFilePaths = (version: string) => [
  192. `iojs-${version}-headers.tar.gz`,
  193. `iojs-${version}.tar.gz`,
  194. `node-${version}.tar.gz`,
  195. 'node.lib',
  196. 'x64/node.lib',
  197. 'win-x64/iojs.lib',
  198. 'win-x86/iojs.lib',
  199. 'win-arm64/iojs.lib',
  200. 'win-x64/node.lib',
  201. 'win-x86/node.lib',
  202. 'win-arm64/node.lib',
  203. 'arm64/node.lib',
  204. 'SHASUMS.txt',
  205. 'SHASUMS256.txt'
  206. ];
  207. function azRemoteFilesForVersion (version: string) {
  208. const azCDN = 'https://artifacts.electronjs.org/headers/';
  209. const versionPrefix = `${azCDN}dist/${version}/`;
  210. return cloudStoreFilePaths(version).map((filePath) => ({
  211. file: filePath,
  212. url: `${versionPrefix}${filePath}`
  213. }));
  214. }
  215. function runScript (scriptName: string, scriptArgs: string[], cwd?: string) {
  216. const scriptCommand = `${scriptName} ${scriptArgs.join(' ')}`;
  217. const scriptOptions: ExecSyncOptions = {
  218. encoding: 'utf-8'
  219. };
  220. if (cwd) scriptOptions.cwd = cwd;
  221. try {
  222. return execSync(scriptCommand, scriptOptions);
  223. } catch (err) {
  224. console.error(`${fail} Error running ${scriptName}`, err);
  225. process.exit(1);
  226. }
  227. }
  228. function uploadNodeShasums () {
  229. console.log('Uploading Node SHASUMS file to artifacts.electronjs.org.');
  230. const scriptPath = join(
  231. ELECTRON_DIR,
  232. 'script',
  233. 'release',
  234. 'uploaders',
  235. 'upload-node-checksums.py'
  236. );
  237. runScript(scriptPath, ['-v', pkgVersion]);
  238. console.log(
  239. `${pass} Done uploading Node SHASUMS file to artifacts.electronjs.org.`
  240. );
  241. }
  242. function uploadIndexJson () {
  243. console.log('Uploading index.json to artifacts.electronjs.org.');
  244. const scriptPath = join(
  245. ELECTRON_DIR,
  246. 'script',
  247. 'release',
  248. 'uploaders',
  249. 'upload-index-json.py'
  250. );
  251. runScript(scriptPath, [pkgVersion]);
  252. console.log(`${pass} Done uploading index.json to artifacts.electronjs.org.`);
  253. }
  254. async function mergeShasums (pkgVersion: string) {
  255. // Download individual checksum files for Electron zip files from artifact storage,
  256. // concatenate them, and upload to GitHub.
  257. const connectionString = process.env.ELECTRON_ARTIFACTS_BLOB_STORAGE;
  258. if (!connectionString) {
  259. throw new Error(
  260. 'Please set the $ELECTRON_ARTIFACTS_BLOB_STORAGE environment variable'
  261. );
  262. }
  263. const blobServiceClient =
  264. BlobServiceClient.fromConnectionString(connectionString);
  265. const containerClient = blobServiceClient.getContainerClient(
  266. 'checksums-scratchpad'
  267. );
  268. const blobsIter = containerClient.listBlobsFlat({
  269. prefix: `${pkgVersion}/`
  270. });
  271. const shasums = [];
  272. for await (const blob of blobsIter) {
  273. if (blob.name.endsWith('.sha256sum')) {
  274. const blobClient = containerClient.getBlockBlobClient(blob.name);
  275. const response = await blobClient.downloadToBuffer();
  276. shasums.push(response.toString('ascii').trim());
  277. }
  278. }
  279. return shasums.join('\n');
  280. }
  281. async function createReleaseShasums (release: MinimalRelease) {
  282. const fileName = 'SHASUMS256.txt';
  283. const existingAssets = release.assets.filter(
  284. (asset) => asset.name === fileName
  285. );
  286. if (existingAssets.length > 0) {
  287. console.log(
  288. `${fileName} already exists on GitHub; deleting before creating new file.`
  289. );
  290. await octokit.repos
  291. .deleteReleaseAsset({
  292. owner: ELECTRON_ORG,
  293. repo: targetRepo,
  294. asset_id: existingAssets[0].id
  295. })
  296. .catch((err) => {
  297. console.error(`${fail} Error deleting ${fileName} on GitHub:`, err);
  298. process.exit(1);
  299. });
  300. }
  301. console.log(`Creating and uploading the release ${fileName}.`);
  302. const checksums = await mergeShasums(pkgVersion);
  303. console.log(`${pass} Generated release SHASUMS.`);
  304. const filePath = await saveShaSumFile(checksums, fileName);
  305. console.log(`${pass} Created ${fileName} file.`);
  306. await uploadShasumFile(filePath, fileName, release.id);
  307. console.log(`${pass} Successfully uploaded ${fileName} to GitHub.`);
  308. }
  309. async function uploadShasumFile (
  310. filePath: string,
  311. fileName: string,
  312. releaseId: number
  313. ) {
  314. const uploadUrl = `https://uploads.github.com/repos/electron/${targetRepo}/releases/${releaseId}/assets{?name,label}`;
  315. return octokit.repos
  316. .uploadReleaseAsset({
  317. url: uploadUrl,
  318. headers: {
  319. 'content-type': 'text/plain',
  320. 'content-length': statSync(filePath).size
  321. },
  322. data: createReadStream(filePath),
  323. name: fileName
  324. } as any)
  325. .catch((err) => {
  326. console.error(`${fail} Error uploading ${filePath} to GitHub:`, err);
  327. process.exit(1);
  328. });
  329. }
  330. function saveShaSumFile (checksums: string, fileName: string) {
  331. return new Promise<string>((resolve) => {
  332. temp.open(fileName, (err, info) => {
  333. if (err) {
  334. console.error(`${fail} Could not create ${fileName} file`);
  335. process.exit(1);
  336. } else {
  337. writeFileSync(info.fd, checksums);
  338. close(info.fd, (err) => {
  339. if (err) {
  340. console.error(`${fail} Could close ${fileName} file`);
  341. process.exit(1);
  342. }
  343. resolve(info.path);
  344. });
  345. }
  346. });
  347. });
  348. }
  349. async function publishRelease (release: MinimalRelease) {
  350. let makeLatest = false;
  351. if (!release.prerelease) {
  352. const currentLatest = await octokit.repos.getLatestRelease({
  353. owner: ELECTRON_ORG,
  354. repo: targetRepo
  355. });
  356. makeLatest = gte(release.tag_name, currentLatest.data.tag_name);
  357. }
  358. return octokit.repos
  359. .updateRelease({
  360. owner: ELECTRON_ORG,
  361. repo: targetRepo,
  362. release_id: release.id,
  363. tag_name: release.tag_name,
  364. draft: false,
  365. make_latest: makeLatest ? 'true' : 'false'
  366. })
  367. .catch((err) => {
  368. console.error(`${fail} Error publishing release:`, err);
  369. process.exit(1);
  370. });
  371. }
  372. export async function validateRelease () {
  373. console.log(`Validating release ${pkgVersion}`);
  374. const release = await getDraftRelease(pkgVersion);
  375. await validateReleaseAssets(release, true);
  376. }
  377. export async function makeRelease () {
  378. let draftRelease = await getDraftRelease();
  379. uploadNodeShasums();
  380. await createReleaseShasums(draftRelease);
  381. // Fetch latest version of release before verifying
  382. draftRelease = await getDraftRelease(pkgVersion, true);
  383. await validateReleaseAssets(draftRelease);
  384. // index.json goes live once uploaded so do these uploads as
  385. // late as possible to reduce the chances it contains a release
  386. // which fails to publish. It has to be done before the final
  387. // publish to ensure there aren't published releases not contained
  388. // in index.json, which causes other problems in downstream projects
  389. uploadIndexJson();
  390. await publishRelease(draftRelease);
  391. console.log(
  392. `${pass} SUCCESS!!! Release has been published. Please run ` +
  393. '"npm run publish-to-npm" to publish release to npm.'
  394. );
  395. }
  396. const SHASUM_256_FILENAME = 'SHASUMS256.txt';
  397. const SHASUM_1_FILENAME = 'SHASUMS.txt';
  398. async function verifyDraftGitHubReleaseAssets (release: MinimalRelease) {
  399. console.log('Fetching authenticated GitHub artifact URLs to verify shasums');
  400. const remoteFilesToHash = await Promise.all(
  401. release.assets.map(async (asset) => {
  402. const requestOptions = octokit.repos.getReleaseAsset.endpoint({
  403. owner: ELECTRON_ORG,
  404. repo: targetRepo,
  405. asset_id: asset.id,
  406. headers: {
  407. Accept: 'application/octet-stream'
  408. }
  409. });
  410. const { url, headers } = requestOptions;
  411. headers.authorization = `token ${
  412. ((await octokit.auth()) as { token: string }).token
  413. }`;
  414. const response = await got(url, {
  415. followRedirect: false,
  416. method: 'HEAD',
  417. headers: headers as any,
  418. throwHttpErrors: false
  419. });
  420. if (response.statusCode !== 302 && response.statusCode !== 301) {
  421. console.error('Failed to HEAD github asset: ' + url);
  422. throw new Error(
  423. "Unexpected status HEAD'ing github asset: " + response.statusCode
  424. );
  425. }
  426. return { url: response.headers.location!, file: asset.name };
  427. })
  428. ).catch((err) => {
  429. console.error(`${fail} Error downloading files from GitHub`, err);
  430. process.exit(1);
  431. });
  432. await verifyShasumsForRemoteFiles(remoteFilesToHash);
  433. }
  434. async function getShaSumMappingFromUrl (
  435. shaSumFileUrl: string,
  436. fileNamePrefix: string
  437. ) {
  438. const response = await got(shaSumFileUrl, {
  439. throwHttpErrors: false
  440. });
  441. if (response.statusCode !== 200) {
  442. console.error('Failed to fetch SHASUM mapping: ' + shaSumFileUrl);
  443. console.error('Bad SHASUM mapping response: ' + response.body.trim());
  444. throw new Error(
  445. 'Unexpected status fetching SHASUM mapping: ' + response.statusCode
  446. );
  447. }
  448. const raw = response.body;
  449. return raw
  450. .split('\n')
  451. .map((line) => line.trim())
  452. .filter(Boolean)
  453. .reduce((map, line) => {
  454. const [sha, file] = line.replace(' ', ' ').split(' ');
  455. map[file.slice(fileNamePrefix.length)] = sha;
  456. return map;
  457. }, Object.create(null) as Record<string, string>);
  458. }
  459. type HashedFile = HashableFile & {
  460. hash: string;
  461. };
  462. type HashableFile = {
  463. file: string;
  464. url: string;
  465. };
  466. async function validateFileHashesAgainstShaSumMapping (
  467. remoteFilesWithHashes: HashedFile[],
  468. mapping: Record<string, string>
  469. ) {
  470. for (const remoteFileWithHash of remoteFilesWithHashes) {
  471. check(
  472. remoteFileWithHash.hash === mapping[remoteFileWithHash.file],
  473. `Release asset ${remoteFileWithHash.file} should have hash of ${
  474. mapping[remoteFileWithHash.file]
  475. } but found ${remoteFileWithHash.hash}`,
  476. true
  477. );
  478. }
  479. }
  480. async function verifyShasumsForRemoteFiles (
  481. remoteFilesToHash: HashableFile[],
  482. filesAreNodeJSArtifacts = false
  483. ) {
  484. console.log(
  485. `Generating SHAs for ${remoteFilesToHash.length} files to verify shasums`
  486. );
  487. // Only used for node.js artifact uploads
  488. const shaSum1File = remoteFilesToHash.find(
  489. ({ file }) => file === SHASUM_1_FILENAME
  490. )!;
  491. // Used for both node.js artifact uploads and normal electron artifacts
  492. const shaSum256File = remoteFilesToHash.find(
  493. ({ file }) => file === SHASUM_256_FILENAME
  494. )!;
  495. remoteFilesToHash = remoteFilesToHash.filter(
  496. ({ file }) => file !== SHASUM_1_FILENAME && file !== SHASUM_256_FILENAME
  497. );
  498. const remoteFilesWithHashes = await Promise.all(
  499. remoteFilesToHash.map(async (file) => {
  500. return {
  501. hash: await getUrlHash(file.url, 'sha256'),
  502. ...file
  503. };
  504. })
  505. );
  506. await validateFileHashesAgainstShaSumMapping(
  507. remoteFilesWithHashes,
  508. await getShaSumMappingFromUrl(
  509. shaSum256File.url,
  510. filesAreNodeJSArtifacts ? '' : '*'
  511. )
  512. );
  513. if (filesAreNodeJSArtifacts) {
  514. const remoteFilesWithSha1Hashes = await Promise.all(
  515. remoteFilesToHash.map(async (file) => {
  516. return {
  517. hash: await getUrlHash(file.url, 'sha1'),
  518. ...file
  519. };
  520. })
  521. );
  522. await validateFileHashesAgainstShaSumMapping(
  523. remoteFilesWithSha1Hashes,
  524. await getShaSumMappingFromUrl(
  525. shaSum1File.url,
  526. filesAreNodeJSArtifacts ? '' : '*'
  527. )
  528. );
  529. }
  530. }