Browse Source

build: offload hash checking logic to lambda worker during release (#29096)

Samuel Attard 3 years ago
parent
commit
35f2ed8978
5 changed files with 105 additions and 156 deletions
  1. 0 1
      package.json
  2. 31 0
      script/release/get-url-hash.js
  3. 1 2
      script/release/prepare-release.js
  4. 73 146
      script/release/release.js
  5. 0 7
      yarn.lock

+ 0 - 1
package.json

@@ -64,7 +64,6 @@
     "shx": "^0.3.2",
     "standard-markdown": "^6.0.0",
     "stream-json": "^1.7.1",
-    "sumchecker": "^2.0.2",
     "tap-xunit": "^2.4.1",
     "temp": "^0.8.3",
     "timers-browserify": "1.4.2",

+ 31 - 0
script/release/get-url-hash.js

@@ -0,0 +1,31 @@
+const AWS = require('aws-sdk');
+
+const lambda = new AWS.Lambda({
+  credentials: {
+    accessKeyId: process.env.AWS_LAMBDA_EXECUTE_KEY,
+    secretAccessKey: process.env.AWS_LAMBDA_EXECUTE_SECRET
+  },
+  region: 'us-east-1'
+});
+
+module.exports = function getUrlHash (targetUrl, algorithm = 'sha256') {
+  return new Promise((resolve, reject) => {
+    lambda.invoke({
+      FunctionName: 'hasher',
+      Payload: JSON.stringify({
+        targetUrl,
+        algorithm
+      })
+    }, (err, data) => {
+      if (err) return reject(err);
+      try {
+        const response = JSON.parse(data.Payload);
+        if (response.statusCode !== 200) return reject(new Error('non-200 status code received from hasher function'));
+        if (!response.hash) return reject(new Error('Successful lambda call but failed to get valid hash'));
+        resolve(response.hash);
+      } catch (err) {
+        return reject(err);
+      }
+    });
+  });
+};

+ 1 - 2
script/release/prepare-release.js

@@ -135,8 +135,7 @@ async function pushRelease (branch) {
 
 async function runReleaseBuilds (branch) {
   await ciReleaseBuild(branch, {
-    ghRelease: true,
-    automaticRelease: args.automaticRelease
+    ghRelease: true
   });
 }
 

+ 73 - 146
script/release/release.js

@@ -5,20 +5,16 @@ if (!process.env.CI) require('dotenv-safe').load();
 const args = require('minimist')(process.argv.slice(2), {
   boolean: [
     'validateRelease',
-    'skipVersionCheck',
-    'automaticRelease',
     'verboseNugget'
   ],
   default: { verboseNugget: false }
 });
 const fs = require('fs');
 const { execSync } = require('child_process');
-const nugget = require('nugget');
 const got = require('got');
 const pkg = require('../../package.json');
 const pkgVersion = `v${pkg.version}`;
 const path = require('path');
-const sumchecker = require('sumchecker');
 const temp = require('temp').track();
 const { URL } = require('url');
 const { Octokit } = require('@octokit/rest');
@@ -29,6 +25,7 @@ const pass = '✓'.green;
 const fail = '✗'.red;
 
 const { ELECTRON_DIR } = require('../lib/utils');
+const getUrlHash = require('./get-url-hash');
 
 const octokit = new Octokit({
   auth: process.env.ELECTRON_GITHUB_TOKEN
@@ -64,7 +61,7 @@ async function getDraftRelease (version, skipValidation) {
 async function validateReleaseAssets (release, validatingRelease) {
   const requiredAssets = assetsForVersion(release.tag_name, validatingRelease).sort();
   const extantAssets = release.assets.map(asset => asset.name).sort();
-  const downloadUrls = release.assets.map(asset => asset.browser_download_url).sort();
+  const downloadUrls = release.assets.map(asset => ({ url: asset.browser_download_url, file: asset.name })).sort((a, b) => a.file.localeCompare(b.file));
 
   failureCount = 0;
   requiredAssets.forEach(asset => {
@@ -74,15 +71,15 @@ async function validateReleaseAssets (release, validatingRelease) {
 
   if (!validatingRelease || !release.draft) {
     if (release.draft) {
-      await verifyAssets(release);
+      await verifyDraftGitHubReleaseAssets(release);
     } else {
-      await verifyShasums(downloadUrls)
+      await verifyShasumsForRemoteFiles(downloadUrls)
         .catch(err => {
           console.log(`${fail} error verifyingShasums`, err);
         });
     }
-    const s3Urls = s3UrlsForVersion(release.tag_name);
-    await verifyShasums(s3Urls, true);
+    const s3RemoteFiles = s3RemoteFilesForVersion(release.tag_name);
+    await verifyShasumsForRemoteFiles(s3RemoteFiles, true);
   }
 }
 
@@ -174,21 +171,29 @@ function assetsForVersion (version, validatingRelease) {
   return patterns;
 }
 
-function s3UrlsForVersion (version) {
+function s3RemoteFilesForVersion (version) {
   const bucket = 'https://gh-contractor-zcbenz.s3.amazonaws.com/';
-  const patterns = [
-    `${bucket}atom-shell/dist/${version}/iojs-${version}-headers.tar.gz`,
-    `${bucket}atom-shell/dist/${version}/iojs-${version}.tar.gz`,
-    `${bucket}atom-shell/dist/${version}/node-${version}.tar.gz`,
-    `${bucket}atom-shell/dist/${version}/node.lib`,
-    `${bucket}atom-shell/dist/${version}/win-x64/iojs.lib`,
-    `${bucket}atom-shell/dist/${version}/win-x86/iojs.lib`,
-    `${bucket}atom-shell/dist/${version}/x64/node.lib`,
-    `${bucket}atom-shell/dist/${version}/SHASUMS.txt`,
-    `${bucket}atom-shell/dist/${version}/SHASUMS256.txt`,
-    `${bucket}atom-shell/dist/index.json`
+  const versionPrefix = `${bucket}atom-shell/dist/${version}/`;
+  const filePaths = [
+    `iojs-${version}-headers.tar.gz`,
+    `iojs-${version}.tar.gz`,
+    `node-${version}.tar.gz`,
+    'node.lib',
+    'x64/node.lib',
+    'win-x64/iojs.lib',
+    'win-x86/iojs.lib',
+    'win-arm64/iojs.lib',
+    'win-x64/node.lib',
+    'win-x86/node.lib',
+    'win-arm64/node.lib',
+    'arm64/node.lib',
+    'SHASUMS.txt',
+    'SHASUMS256.txt'
   ];
-  return patterns;
+  return filePaths.map((filePath) => ({
+    file: filePath,
+    url: `${versionPrefix}${filePath}`
+  }));
 }
 
 function runScript (scriptName, scriptArgs, cwd) {
@@ -366,13 +371,13 @@ async function makeTempDir () {
   });
 }
 
-async function verifyAssets (release) {
-  const downloadDir = await makeTempDir();
+const SHASUM_256_FILENAME = 'SHASUMS256.txt';
+const SHASUM_1_FILENAME = 'SHASUMS.txt';
 
-  console.log('Downloading files from GitHub to verify shasums');
-  const shaSumFile = 'SHASUMS256.txt';
+async function verifyDraftGitHubReleaseAssets (release) {
+  console.log('Fetching authenticated GitHub artifact URLs to verify shasums');
 
-  let filesToCheck = await Promise.all(release.assets.map(async asset => {
+  const remoteFilesToHash = await Promise.all(release.assets.map(async asset => {
     const requestOptions = await octokit.repos.getReleaseAsset.endpoint({
       owner: 'electron',
       repo: targetRepo,
@@ -391,137 +396,59 @@ async function verifyAssets (release) {
       headers
     });
 
-    await downloadFiles(response.headers.location, downloadDir, asset.name);
-    return asset.name;
+    return { url: response.headers.location, file: asset.name };
   })).catch(err => {
     console.log(`${fail} Error downloading files from GitHub`, err);
     process.exit(1);
   });
 
-  filesToCheck = filesToCheck.filter(fileName => fileName !== shaSumFile);
-  let checkerOpts;
-  await validateChecksums({
-    algorithm: 'sha256',
-    filesToCheck,
-    fileDirectory: downloadDir,
-    shaSumFile,
-    checkerOpts,
-    fileSource: 'GitHub'
-  });
+  await verifyShasumsForRemoteFiles(remoteFilesToHash);
 }
 
-function downloadFiles (urls, directory, targetName) {
-  return new Promise((resolve, reject) => {
-    const nuggetOpts = { dir: directory };
-    nuggetOpts.quiet = !args.verboseNugget;
-    if (targetName) nuggetOpts.target = targetName;
-
-    nugget(urls, nuggetOpts, (err) => {
-      if (err) {
-        reject(err);
-      } else {
-        console.log(`${pass} all files downloaded successfully!`);
-        resolve();
-      }
-    });
-  });
+async function getShaSumMappingFromUrl (shaSumFileUrl, fileNamePrefix) {
+  const response = await got(shaSumFileUrl);
+  const raw = response.body;
+  return raw.split('\n').map(line => line.trim()).filter(Boolean).reduce((map, line) => {
+    const [sha, file] = line.split(' ');
+    map[file.slice(fileNamePrefix.length)] = sha;
+    return map;
+  }, {});
 }
 
-async function verifyShasums (urls, isS3) {
-  const fileSource = isS3 ? 'S3' : 'GitHub';
-  console.log(`Downloading files from ${fileSource} to verify shasums`);
-  const downloadDir = await makeTempDir();
-  let filesToCheck = [];
-  try {
-    if (!isS3) {
-      await downloadFiles(urls, downloadDir);
-      filesToCheck = urls.map(url => {
-        const currentUrl = new URL(url);
-        return path.basename(currentUrl.pathname);
-      }).filter(file => file.indexOf('SHASUMS') === -1);
-    } else {
-      const s3VersionPath = `/atom-shell/dist/${pkgVersion}/`;
-      await Promise.all(urls.map(async (url) => {
-        const currentUrl = new URL(url);
-        const dirname = path.dirname(currentUrl.pathname);
-        const filename = path.basename(currentUrl.pathname);
-        const s3VersionPathIdx = dirname.indexOf(s3VersionPath);
-        if (s3VersionPathIdx === -1 || dirname === s3VersionPath) {
-          if (s3VersionPathIdx !== -1 && filename.indexof('SHASUMS') === -1) {
-            filesToCheck.push(filename);
-          }
-          await downloadFiles(url, downloadDir);
-        } else {
-          const subDirectory = dirname.substr(s3VersionPathIdx + s3VersionPath.length);
-          const fileDirectory = path.join(downloadDir, subDirectory);
-          try {
-            fs.statSync(fileDirectory);
-          } catch (err) {
-            fs.mkdirSync(fileDirectory);
-          }
-          filesToCheck.push(path.join(subDirectory, filename));
-          await downloadFiles(url, fileDirectory);
-        }
-      }));
-    }
-  } catch (err) {
-    console.log(`${fail} Error downloading files from ${fileSource}`, err);
-    process.exit(1);
-  }
-  console.log(`${pass} Successfully downloaded the files from ${fileSource}.`);
-  let checkerOpts;
-  if (isS3) {
-    checkerOpts = { defaultTextEncoding: 'binary' };
-  }
-
-  await validateChecksums({
-    algorithm: 'sha256',
-    filesToCheck,
-    fileDirectory: downloadDir,
-    shaSumFile: 'SHASUMS256.txt',
-    checkerOpts,
-    fileSource
-  });
-
-  if (isS3) {
-    await validateChecksums({
-      algorithm: 'sha1',
-      filesToCheck,
-      fileDirectory: downloadDir,
-      shaSumFile: 'SHASUMS.txt',
-      checkerOpts,
-      fileSource
-    });
+async function validateFileHashesAgainstShaSumMapping (remoteFilesWithHashes, mapping) {
+  for (const remoteFileWithHash of remoteFilesWithHashes) {
+    check(remoteFileWithHash.hash === mapping[remoteFileWithHash.file], `Release asset ${remoteFileWithHash.file} should have hash of ${mapping[remoteFileWithHash.file]} but found ${remoteFileWithHash.hash}`, true);
   }
 }
 
-async function validateChecksums (validationArgs) {
-  console.log(`Validating checksums for files from ${validationArgs.fileSource} ` +
-    `against ${validationArgs.shaSumFile}.`);
-  const shaSumFilePath = path.join(validationArgs.fileDirectory, validationArgs.shaSumFile);
-  const checker = new sumchecker.ChecksumValidator(validationArgs.algorithm,
-    shaSumFilePath, validationArgs.checkerOpts);
-  await checker.validate(validationArgs.fileDirectory, validationArgs.filesToCheck)
-    .catch(err => {
-      if (err instanceof sumchecker.ChecksumMismatchError) {
-        console.error(`${fail} The checksum of ${err.filename} from ` +
-          `${validationArgs.fileSource} did not match the shasum in ` +
-          `${validationArgs.shaSumFile}`);
-      } else if (err instanceof sumchecker.ChecksumParseError) {
-        console.error(`${fail} The checksum file ${validationArgs.shaSumFile} ` +
-          `from ${validationArgs.fileSource} could not be parsed.`, err);
-      } else if (err instanceof sumchecker.NoChecksumFoundError) {
-        console.error(`${fail} The file ${err.filename} from ` +
-          `${validationArgs.fileSource} was not in the shasum file ` +
-          `${validationArgs.shaSumFile}.`);
-      } else {
-        console.error(`${fail} Error matching files from ` +
-          `${validationArgs.fileSource} shasums in ${validationArgs.shaSumFile}.`, err);
-      }
-      process.exit(1);
-    });
-  console.log(`${pass} All files from ${validationArgs.fileSource} match ` +
-    `shasums defined in ${validationArgs.shaSumFile}.`);
+async function verifyShasumsForRemoteFiles (remoteFilesToHash, filesAreNodeJSArtifacts = false) {
+  console.log(`Generating SHAs for ${remoteFilesToHash.length} files to verify shasums`);
+
+  // Only used for node.js artifact uploads
+  const shaSum1File = remoteFilesToHash.find(({ file }) => file === SHASUM_1_FILENAME);
+  // Used for both node.js artifact uploads and normal electron artifacts
+  const shaSum256File = remoteFilesToHash.find(({ file }) => file === SHASUM_256_FILENAME);
+  remoteFilesToHash = remoteFilesToHash.filter(({ file }) => file !== SHASUM_1_FILENAME && file !== SHASUM_256_FILENAME);
+
+  const remoteFilesWithHashes = await Promise.all(remoteFilesToHash.map(async (file) => {
+    return {
+      hash: await getUrlHash(file.url, 'sha256'),
+      ...file
+    };
+  }));
+
+  await validateFileHashesAgainstShaSumMapping(remoteFilesWithHashes, await getShaSumMappingFromUrl(shaSum256File.url, filesAreNodeJSArtifacts ? '' : '*'));
+
+  if (filesAreNodeJSArtifacts) {
+    const remoteFilesWithSha1Hashes = await Promise.all(remoteFilesToHash.map(async (file) => {
+      return {
+        hash: await getUrlHash(file.url, 'sha1'),
+        ...file
+      };
+    }));
+
+    await validateFileHashesAgainstShaSumMapping(remoteFilesWithSha1Hashes, await getShaSumMappingFromUrl(shaSum1File.url, filesAreNodeJSArtifacts ? '' : '*'));
+  }
 }
 
 makeRelease(args.validateRelease);

+ 0 - 7
yarn.lock

@@ -7474,13 +7474,6 @@ strip-json-comments@~2.0.1:
   resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a"
   integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo=
 
-sumchecker@^2.0.2:
-  version "2.0.2"
-  resolved "https://registry.yarnpkg.com/sumchecker/-/sumchecker-2.0.2.tgz#0f42c10e5d05da5d42eea3e56c3399a37d6c5b3e"
-  integrity sha1-D0LBDl0F2l1C7qPlbDOZo31sWz4=
-  dependencies:
-    debug "^2.2.0"
-
 supports-color@^4.1.0:
   version "4.5.0"
   resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-4.5.0.tgz#be7a0de484dec5c5cddf8b3d59125044912f635b"