|
@@ -2,25 +2,22 @@
|
|
|
|
|
|
'use strict';
|
|
|
|
|
|
-const childProcess = require('child_process');
|
|
|
const fs = require('fs');
|
|
|
-const os = require('os');
|
|
|
const path = require('path');
|
|
|
|
|
|
const { GitProcess } = require('dugite');
|
|
|
const octokit = require('@octokit/rest')({
|
|
|
auth: process.env.ELECTRON_GITHUB_TOKEN
|
|
|
});
|
|
|
-const semver = require('semver');
|
|
|
|
|
|
-const { ELECTRON_VERSION, SRC_DIR } = require('../../lib/utils');
|
|
|
+const { ELECTRON_DIR } = require('../../lib/utils');
|
|
|
|
|
|
const MAX_FAIL_COUNT = 3;
|
|
|
const CHECK_INTERVAL = 5000;
|
|
|
|
|
|
-const CACHE_DIR = path.resolve(__dirname, '.cache');
|
|
|
+const TROP_LOGIN = 'trop[bot]';
|
|
|
+
|
|
|
const NO_NOTES = 'No notes';
|
|
|
-const FOLLOW_REPOS = ['electron/electron', 'electron/node'];
|
|
|
|
|
|
const docTypes = new Set(['doc', 'docs']);
|
|
|
const featTypes = new Set(['feat', 'feature']);
|
|
@@ -28,6 +25,8 @@ const fixTypes = new Set(['fix']);
|
|
|
const otherTypes = new Set(['spec', 'build', 'test', 'chore', 'deps', 'refactor', 'tools', 'vendor', 'perf', 'style', 'ci']);
|
|
|
const knownTypes = new Set([...docTypes.keys(), ...featTypes.keys(), ...fixTypes.keys(), ...otherTypes.keys()]);
|
|
|
|
|
|
+const getCacheDir = () => process.env.NOTES_CACHE_PATH || path.resolve(__dirname, '.cache');
|
|
|
+
|
|
|
/**
|
|
|
***
|
|
|
**/
|
|
@@ -39,6 +38,13 @@ class GHKey {
|
|
|
this.repo = repo;
|
|
|
this.number = number;
|
|
|
}
|
|
|
+
|
|
|
+ static NewFromPull (pull) {
|
|
|
+ const owner = pull.base.repo.owner.login;
|
|
|
+ const repo = pull.base.repo.name;
|
|
|
+ const number = pull.number;
|
|
|
+ return new GHKey(owner, repo, number);
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
class Commit {
|
|
@@ -47,10 +53,13 @@ class Commit {
|
|
|
this.owner = owner; // string
|
|
|
this.repo = repo; // string
|
|
|
|
|
|
- this.body = null; // string
|
|
|
this.isBreakingChange = false;
|
|
|
- this.issueNumber = null; // number
|
|
|
this.note = null; // string
|
|
|
+
|
|
|
+ // A set of branches to which this change has been merged.
|
|
|
+ // '8-x-y' => GHKey { owner: 'electron', repo: 'electron', number: 23714 }
|
|
|
+ this.trops = new Map(); // Map<string,GHKey>
|
|
|
+
|
|
|
this.prKeys = new Set(); // GHKey
|
|
|
this.revertHash = null; // string
|
|
|
this.semanticType = null; // string
|
|
@@ -99,65 +108,31 @@ const getNoteFromClerk = async (ghKey) => {
|
|
|
return NO_NOTES;
|
|
|
}
|
|
|
if (comment.body.startsWith(PERSIST_LEAD)) {
|
|
|
- return comment.body
|
|
|
+ let lines = comment.body
|
|
|
.slice(PERSIST_LEAD.length).trim() // remove PERSIST_LEAD
|
|
|
- .split('\r?\n') // break into lines
|
|
|
+ .split(/\r?\n/) // split into lines
|
|
|
.map(line => line.trim())
|
|
|
.filter(line => line.startsWith(QUOTE_LEAD)) // notes are quoted
|
|
|
- .map(line => line.slice(QUOTE_LEAD.length)) // unquote the lines
|
|
|
- .join(' ') // join the note lines
|
|
|
+ .map(line => line.slice(QUOTE_LEAD.length)); // unquote the lines
|
|
|
+
|
|
|
+ const firstLine = lines.shift();
|
|
|
+ // indent anything after the first line to ensure that
|
|
|
+ // multiline notes with their own sub-lists don't get
|
|
|
+ // parsed in the markdown as part of the top-level list
|
|
|
+ // (example: https://github.com/electron/electron/pull/25216)
|
|
|
+ lines = lines.map(line => ' ' + line);
|
|
|
+ return [firstLine, ...lines]
|
|
|
+ .join('\n') // join the lines
|
|
|
.trim();
|
|
|
}
|
|
|
}
|
|
|
};
|
|
|
|
|
|
-// copied from https://github.com/electron/clerk/blob/master/src/index.ts#L4-L13
|
|
|
-const OMIT_FROM_RELEASE_NOTES_KEYS = [
|
|
|
- 'no-notes',
|
|
|
- 'no notes',
|
|
|
- 'no_notes',
|
|
|
- 'none',
|
|
|
- 'no',
|
|
|
- 'nothing',
|
|
|
- 'empty',
|
|
|
- 'blank'
|
|
|
-];
|
|
|
-
|
|
|
-const getNoteFromBody = body => {
|
|
|
- if (!body) {
|
|
|
- return null;
|
|
|
- }
|
|
|
-
|
|
|
- const NOTE_PREFIX = 'Notes: ';
|
|
|
- const NOTE_HEADER = '#### Release Notes';
|
|
|
-
|
|
|
- let note = body
|
|
|
- .split(/\r?\n\r?\n/) // split into paragraphs
|
|
|
- .map(paragraph => paragraph.trim())
|
|
|
- .map(paragraph => paragraph.startsWith(NOTE_HEADER) ? paragraph.slice(NOTE_HEADER.length).trim() : paragraph)
|
|
|
- .find(paragraph => paragraph.startsWith(NOTE_PREFIX));
|
|
|
-
|
|
|
- if (note) {
|
|
|
- note = note
|
|
|
- .slice(NOTE_PREFIX.length)
|
|
|
- .replace(/<!--.*-->/, '') // '<!-- change summary here-->'
|
|
|
- .replace(/\r?\n/, ' ') // remove newlines
|
|
|
- .trim();
|
|
|
- }
|
|
|
-
|
|
|
- if (note && OMIT_FROM_RELEASE_NOTES_KEYS.includes(note.toLowerCase())) {
|
|
|
- return NO_NOTES;
|
|
|
- }
|
|
|
-
|
|
|
- return note;
|
|
|
-};
|
|
|
-
|
|
|
/**
|
|
|
* Looks for our project's conventions in the commit message:
|
|
|
*
|
|
|
* 'semantic: some description' -- sets semanticType, subject
|
|
|
* 'some description (#99999)' -- sets subject, pr
|
|
|
- * 'Fixes #3333' -- sets issueNumber
|
|
|
* 'Merge pull request #99999 from ${branchname}' -- sets pr
|
|
|
* 'This reverts commit ${sha}' -- sets revertHash
|
|
|
* line starting with 'BREAKING CHANGE' in body -- sets isBreakingChange
|
|
@@ -175,13 +150,6 @@ const parseCommitMessage = (commitMessage, commit) => {
|
|
|
subject = subject.slice(0, pos).trim();
|
|
|
}
|
|
|
|
|
|
- if (body) {
|
|
|
- commit.body = body;
|
|
|
-
|
|
|
- const note = getNoteFromBody(body);
|
|
|
- if (note) { commit.note = note; }
|
|
|
- }
|
|
|
-
|
|
|
// if the subject ends in ' (#dddd)', treat it as a pull request id
|
|
|
let match;
|
|
|
if ((match = subject.match(/^(.*)\s\(#(\d+)\)$/))) {
|
|
@@ -213,7 +181,6 @@ const parseCommitMessage = (commitMessage, commit) => {
|
|
|
|
|
|
// https://help.github.com/articles/closing-issues-using-keywords/
|
|
|
if ((match = body.match(/\b(?:close|closes|closed|fix|fixes|fixed|resolve|resolves|resolved|for)\s#(\d+)\b/i))) {
|
|
|
- commit.issueNumber = parseInt(match[1]);
|
|
|
commit.semanticType = commit.semanticType || 'fix';
|
|
|
}
|
|
|
|
|
@@ -237,32 +204,36 @@ const parseCommitMessage = (commitMessage, commit) => {
|
|
|
const parsePullText = (pull, commit) => parseCommitMessage(`${pull.data.title}\n\n${pull.data.body}`, commit);
|
|
|
|
|
|
const getLocalCommitHashes = async (dir, ref) => {
|
|
|
- const args = ['log', '-z', '--format=%H', ref];
|
|
|
- return (await runGit(dir, args)).split('\0').map(hash => hash.trim());
|
|
|
+ const args = ['log', '--format=%H', ref];
|
|
|
+ return (await runGit(dir, args))
|
|
|
+ .split(/\r?\n/) // split into lines
|
|
|
+ .map(hash => hash.trim());
|
|
|
};
|
|
|
|
|
|
// return an array of Commits
|
|
|
const getLocalCommits = async (module, point1, point2) => {
|
|
|
const { owner, repo, dir } = module;
|
|
|
|
|
|
- const fieldSep = '||';
|
|
|
- const format = ['%H', '%B'].join(fieldSep);
|
|
|
- const args = ['log', '-z', '--cherry-pick', '--right-only', '--first-parent', `--format=${format}`, `${point1}..${point2}`];
|
|
|
- const logs = (await runGit(dir, args)).split('\0').map(field => field.trim());
|
|
|
+ const fieldSep = ',';
|
|
|
+ const format = ['%H', '%s'].join(fieldSep);
|
|
|
+ const args = ['log', '--cherry-pick', '--right-only', '--first-parent', `--format=${format}`, `${point1}..${point2}`];
|
|
|
+ const logs = (await runGit(dir, args))
|
|
|
+ .split(/\r?\n/) // split into lines
|
|
|
+ .map(field => field.trim());
|
|
|
|
|
|
const commits = [];
|
|
|
for (const log of logs) {
|
|
|
if (!log) {
|
|
|
continue;
|
|
|
}
|
|
|
- const [hash, message] = log.split(fieldSep, 2).map(field => field.trim());
|
|
|
- commits.push(parseCommitMessage(message, new Commit(hash, owner, repo)));
|
|
|
+ const [hash, subject] = log.split(fieldSep, 2).map(field => field.trim());
|
|
|
+ commits.push(parseCommitMessage(subject, new Commit(hash, owner, repo)));
|
|
|
}
|
|
|
return commits;
|
|
|
};
|
|
|
|
|
|
const checkCache = async (name, operation) => {
|
|
|
- const filename = path.resolve(CACHE_DIR, name);
|
|
|
+ const filename = path.resolve(getCacheDir(), name);
|
|
|
if (fs.existsSync(filename)) {
|
|
|
return JSON.parse(fs.readFileSync(filename, 'utf8'));
|
|
|
}
|
|
@@ -286,12 +257,42 @@ async function runRetryable (fn, maxRetries) {
|
|
|
}
|
|
|
}
|
|
|
// Silently eat 404s.
|
|
|
- if (lastError.status !== 404) throw lastError;
|
|
|
+ // Silently eat 422s, which come from "No commit found for SHA"
|
|
|
+ if (lastError.status !== 404 && lastError.status !== 422) throw lastError;
|
|
|
}
|
|
|
|
|
|
+const getPullCacheFilename = ghKey => `${ghKey.owner}-${ghKey.repo}-pull-${ghKey.number}`;
|
|
|
+
|
|
|
+const getCommitPulls = async (owner, repo, hash) => {
|
|
|
+ const name = `${owner}-${repo}-commit-${hash}`;
|
|
|
+ const retryableFunc = () => octokit.repos.listPullRequestsAssociatedWithCommit({ owner, repo, commit_sha: hash });
|
|
|
+ let ret = await checkCache(name, () => runRetryable(retryableFunc, MAX_FAIL_COUNT));
|
|
|
+
|
|
|
+ // only merged pulls belong in release notes
|
|
|
+ if (ret && ret.data) {
|
|
|
+ ret.data = ret.data.filter(pull => pull.merged_at);
|
|
|
+ }
|
|
|
+
|
|
|
+ // cache the pulls
|
|
|
+ if (ret && ret.data) {
|
|
|
+ for (const pull of ret.data) {
|
|
|
+ const cachefile = getPullCacheFilename(GHKey.NewFromPull(pull));
|
|
|
+ const payload = { ...ret, data: pull };
|
|
|
+ await checkCache(cachefile, () => payload);
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ // ensure the return value has the expected structure, even on failure
|
|
|
+ if (!ret || !ret.data) {
|
|
|
+ ret = { data: [] };
|
|
|
+ }
|
|
|
+
|
|
|
+ return ret;
|
|
|
+};
|
|
|
+
|
|
|
const getPullRequest = async (ghKey) => {
|
|
|
const { number, owner, repo } = ghKey;
|
|
|
- const name = `${owner}-${repo}-pull-${number}`;
|
|
|
+ const name = getPullCacheFilename(ghKey);
|
|
|
const retryableFunc = () => octokit.pulls.get({ pull_number: number, owner, repo });
|
|
|
return checkCache(name, () => runRetryable(retryableFunc, MAX_FAIL_COUNT));
|
|
|
};
|
|
@@ -306,10 +307,20 @@ const getComments = async (ghKey) => {
|
|
|
const addRepoToPool = async (pool, repo, from, to) => {
|
|
|
const commonAncestor = await getCommonAncestor(repo.dir, from, to);
|
|
|
|
|
|
- // add the commits
|
|
|
- const oldHashes = await getLocalCommitHashes(repo.dir, from);
|
|
|
- oldHashes.forEach(hash => { pool.processedHashes.add(hash); });
|
|
|
+ // mark the old branch's commits as old news
|
|
|
+ for (const oldHash of await getLocalCommitHashes(repo.dir, from)) {
|
|
|
+ pool.processedHashes.add(oldHash);
|
|
|
+ }
|
|
|
+
|
|
|
+ // get the new branch's commits and the pulls associated with them
|
|
|
const commits = await getLocalCommits(repo, commonAncestor, to);
|
|
|
+ for (const commit of commits) {
|
|
|
+ const { owner, repo, hash } = commit;
|
|
|
+ for (const pull of (await getCommitPulls(owner, repo, hash)).data) {
|
|
|
+ commit.prKeys.add(GHKey.NewFromPull(pull));
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
pool.commits.push(...commits);
|
|
|
|
|
|
// add the pulls
|
|
@@ -317,95 +328,59 @@ const addRepoToPool = async (pool, repo, from, to) => {
|
|
|
let prKey;
|
|
|
for (prKey of commit.prKeys.values()) {
|
|
|
const pull = await getPullRequest(prKey);
|
|
|
- if (!pull || !pull.data) break; // couldn't get it
|
|
|
- if (pool.pulls[prKey.number]) break; // already have it
|
|
|
+ if (!pull || !pull.data) continue; // couldn't get it
|
|
|
pool.pulls[prKey.number] = pull;
|
|
|
parsePullText(pull, commit);
|
|
|
}
|
|
|
}
|
|
|
};
|
|
|
|
|
|
-/***
|
|
|
-**** Other Repos
|
|
|
-***/
|
|
|
-
|
|
|
-// other repos - gn
|
|
|
-
|
|
|
-const getDepsVariable = async (ref, key) => {
|
|
|
- // get a copy of that reference point's DEPS file
|
|
|
- const deps = await runGit(ELECTRON_VERSION, ['show', `${ref}:DEPS`]);
|
|
|
- const filename = path.resolve(os.tmpdir(), 'DEPS');
|
|
|
- fs.writeFileSync(filename, deps);
|
|
|
-
|
|
|
- // query the DEPS file
|
|
|
- const response = childProcess.spawnSync(
|
|
|
- 'gclient',
|
|
|
- ['getdep', '--deps-file', filename, '--var', key],
|
|
|
- { encoding: 'utf8' }
|
|
|
- );
|
|
|
-
|
|
|
- // cleanup
|
|
|
- fs.unlinkSync(filename);
|
|
|
- return response.stdout.trim();
|
|
|
-};
|
|
|
-
|
|
|
-const getDependencyCommitsGN = async (pool, fromRef, toRef) => {
|
|
|
- const repos = [{ // just node
|
|
|
- owner: 'electron',
|
|
|
- repo: 'node',
|
|
|
- dir: path.resolve(SRC_DIR, 'third_party', 'electron_node'),
|
|
|
- deps_variable_name: 'node_version'
|
|
|
- }];
|
|
|
-
|
|
|
- for (const repo of repos) {
|
|
|
- // the 'DEPS' file holds the dependency reference point
|
|
|
- const key = repo.deps_variable_name;
|
|
|
- const from = await getDepsVariable(fromRef, key);
|
|
|
- const to = await getDepsVariable(toRef, key);
|
|
|
- await addRepoToPool(pool, repo, from, to);
|
|
|
+// @return Map<string,GHKey>
|
|
|
+// where the key is a branch name (e.g. '7-1-x' or '8-x-y')
|
|
|
+// and the value is a GHKey to the PR
|
|
|
+async function getMergedTrops (commit, pool) {
|
|
|
+ const branches = new Map();
|
|
|
+
|
|
|
+ for (const prKey of commit.prKeys.values()) {
|
|
|
+ const pull = pool.pulls[prKey.number];
|
|
|
+ const mergedBranches = new Set(
|
|
|
+ ((pull && pull.data && pull.data.labels) ? pull.data.labels : [])
|
|
|
+ .map(label => ((label && label.name) ? label.name : '').match(/merged\/([0-9]+-[x0-9]-[xy0-9])/))
|
|
|
+ .filter(match => match)
|
|
|
+ .map(match => match[1])
|
|
|
+ );
|
|
|
+
|
|
|
+ if (mergedBranches.size > 0) {
|
|
|
+ const isTropComment = (comment) => comment && comment.user && comment.user.login === TROP_LOGIN;
|
|
|
+
|
|
|
+ const ghKey = GHKey.NewFromPull(pull.data);
|
|
|
+ const backportRegex = /backported this PR to "(.*)",\s+please check out #(\d+)/;
|
|
|
+ const getBranchNameAndPullKey = (comment) => {
|
|
|
+ const match = ((comment && comment.body) ? comment.body : '').match(backportRegex);
|
|
|
+ return match ? [match[1], new GHKey(ghKey.owner, ghKey.repo, parseInt(match[2]))] : null;
|
|
|
+ };
|
|
|
+
|
|
|
+ const comments = await getComments(ghKey);
|
|
|
+ ((comments && comments.data) ? comments.data : [])
|
|
|
+ .filter(isTropComment)
|
|
|
+ .map(getBranchNameAndPullKey)
|
|
|
+ .filter(pair => pair)
|
|
|
+ .filter(([branch]) => mergedBranches.has(branch))
|
|
|
+ .forEach(([branch, key]) => branches.set(branch, key));
|
|
|
+ }
|
|
|
}
|
|
|
-};
|
|
|
-
|
|
|
-// Changes are interesting if they make a change relative to a previous
|
|
|
-// release in the same series. For example if you fix a Y.0.0 bug, that
|
|
|
-// should be included in the Y.0.1 notes even if it's also tropped back
|
|
|
-// to X.0.1.
|
|
|
-//
|
|
|
-// The phrase 'previous release' is important: if this is the first
|
|
|
-// prerelease or first stable release in a series, we omit previous
|
|
|
-// branches' changes. Otherwise we will have an overwhelmingly long
|
|
|
-// list of mostly-irrelevant changes.
|
|
|
-const shouldIncludeMultibranchChanges = (version) => {
|
|
|
- let show = true;
|
|
|
-
|
|
|
- if (semver.valid(version)) {
|
|
|
- const prerelease = semver.prerelease(version);
|
|
|
- show = prerelease
|
|
|
- ? parseInt(prerelease.pop()) > 1
|
|
|
- : semver.patch(version) > 0;
|
|
|
- }
|
|
|
-
|
|
|
- return show;
|
|
|
-};
|
|
|
|
|
|
-function getOldestMajorBranchOfPull (pull) {
|
|
|
- return pull.data.labels
|
|
|
- .map(label => label.name.match(/merged\/(\d+)-(\d+)-x/) || label.name.match(/merged\/(\d+)-x-y/))
|
|
|
- .filter(label => !!label)
|
|
|
- .map(label => parseInt(label[1]))
|
|
|
- .filter(major => !!major)
|
|
|
- .sort()
|
|
|
- .shift();
|
|
|
+ return branches;
|
|
|
}
|
|
|
|
|
|
-function getOldestMajorBranchOfCommit (commit, pool) {
|
|
|
- return [ ...commit.prKeys.values() ]
|
|
|
- .map(prKey => pool.pulls[prKey.number])
|
|
|
- .filter(pull => !!pull)
|
|
|
- .map(pull => getOldestMajorBranchOfPull(pull))
|
|
|
- .filter(major => !!major)
|
|
|
- .sort()
|
|
|
- .shift();
|
|
|
+// @return the shorthand name of the branch that `ref` is on,
|
|
|
+// e.g. a ref of '10.0.0-beta.1' will return '10-x-y'
|
|
|
+async function getBranchNameOfRef (ref, dir) {
|
|
|
+ return (await runGit(dir, ['branch', '--all', '--contains', ref, '--sort', 'version:refname']))
|
|
|
+ .split(/\r?\n/) // split into lines
|
|
|
+ .shift() // we sorted by refname and want the first result
|
|
|
+ .match(/(?:.*\/)?(.*)/)[1] // 'remote/origins/10-x-y' -> '10-x-y'
|
|
|
+ .trim();
|
|
|
}
|
|
|
|
|
|
/***
|
|
@@ -413,26 +388,20 @@ function getOldestMajorBranchOfCommit (commit, pool) {
|
|
|
***/
|
|
|
|
|
|
const getNotes = async (fromRef, toRef, newVersion) => {
|
|
|
- if (!fs.existsSync(CACHE_DIR)) {
|
|
|
- fs.mkdirSync(CACHE_DIR);
|
|
|
+ const cacheDir = getCacheDir();
|
|
|
+ if (!fs.existsSync(cacheDir)) {
|
|
|
+ fs.mkdirSync(cacheDir);
|
|
|
}
|
|
|
|
|
|
const pool = new Pool();
|
|
|
+ const toBranch = await getBranchNameOfRef(toRef, ELECTRON_DIR);
|
|
|
+
|
|
|
+ console.log(`Generating release notes between ${fromRef} and ${toRef} for version ${newVersion} in branch ${toBranch}`);
|
|
|
|
|
|
// get the electron/electron commits
|
|
|
- const electron = { owner: 'electron', repo: 'electron', dir: ELECTRON_VERSION };
|
|
|
+ const electron = { owner: 'electron', repo: 'electron', dir: ELECTRON_DIR };
|
|
|
await addRepoToPool(pool, electron, fromRef, toRef);
|
|
|
|
|
|
- // Don't include submodules if comparing across major versions;
|
|
|
- // there's just too much churn otherwise.
|
|
|
- const includeDeps = semver.valid(fromRef) &&
|
|
|
- semver.valid(toRef) &&
|
|
|
- semver.major(fromRef) === semver.major(toRef);
|
|
|
-
|
|
|
- if (includeDeps) {
|
|
|
- await getDependencyCommitsGN(pool, fromRef, toRef);
|
|
|
- }
|
|
|
-
|
|
|
// remove any old commits
|
|
|
pool.commits = pool.commits.filter(commit => !pool.processedHashes.has(commit.hash));
|
|
|
|
|
@@ -457,27 +426,24 @@ const getNotes = async (fromRef, toRef, newVersion) => {
|
|
|
// ensure the commit has a note
|
|
|
for (const commit of pool.commits) {
|
|
|
for (const prKey of commit.prKeys.values()) {
|
|
|
- commit.note = commit.note || await getNoteFromClerk(prKey);
|
|
|
if (commit.note) {
|
|
|
break;
|
|
|
}
|
|
|
+ commit.note = await getNoteFromClerk(prKey);
|
|
|
}
|
|
|
- // use a fallback note in case someone missed a 'Notes' comment
|
|
|
- commit.note = commit.note || commit.subject;
|
|
|
}
|
|
|
|
|
|
// remove non-user-facing commits
|
|
|
pool.commits = pool.commits
|
|
|
+ .filter(commit => commit && commit.note)
|
|
|
.filter(commit => commit.note !== NO_NOTES)
|
|
|
- .filter(commit => !((commit.note || commit.subject).match(/^[Bb]ump v\d+\.\d+\.\d+/)));
|
|
|
+ .filter(commit => commit.note.match(/^[Bb]ump v\d+\.\d+\.\d+/) === null);
|
|
|
|
|
|
- if (!shouldIncludeMultibranchChanges(newVersion)) {
|
|
|
- const currentMajor = semver.parse(newVersion).major;
|
|
|
- pool.commits = pool.commits
|
|
|
- .filter(commit => getOldestMajorBranchOfCommit(commit, pool) >= currentMajor);
|
|
|
+ for (const commit of pool.commits) {
|
|
|
+ commit.trops = await getMergedTrops(commit, pool);
|
|
|
}
|
|
|
|
|
|
- pool.commits = removeSupercededChromiumUpdates(pool.commits);
|
|
|
+ pool.commits = removeSupercededStackUpdates(pool.commits);
|
|
|
|
|
|
const notes = {
|
|
|
breaking: [],
|
|
@@ -486,7 +452,8 @@ const getNotes = async (fromRef, toRef, newVersion) => {
|
|
|
fix: [],
|
|
|
other: [],
|
|
|
unknown: [],
|
|
|
- name: newVersion
|
|
|
+ name: newVersion,
|
|
|
+ toBranch
|
|
|
};
|
|
|
|
|
|
pool.commits.forEach(commit => {
|
|
@@ -511,50 +478,76 @@ const getNotes = async (fromRef, toRef, newVersion) => {
|
|
|
return notes;
|
|
|
};
|
|
|
|
|
|
-const removeSupercededChromiumUpdates = (commits) => {
|
|
|
- const chromiumRegex = /^Updated Chromium to \d+\.\d+\.\d+\.\d+/;
|
|
|
- const updates = commits.filter(commit => (commit.note || commit.subject).match(chromiumRegex));
|
|
|
- const keepers = commits.filter(commit => !updates.includes(commit));
|
|
|
+const removeSupercededStackUpdates = (commits) => {
|
|
|
+ const updateRegex = /^Updated ([a-zA-Z.]+) to v?([\d.]+)/;
|
|
|
+ const notupdates = [];
|
|
|
|
|
|
- // keep the newest update.
|
|
|
- if (updates.length) {
|
|
|
- const compare = (a, b) => (a.note || a.subject).localeCompare(b.note || b.subject);
|
|
|
- keepers.push(updates.sort(compare).pop());
|
|
|
+ const newest = {};
|
|
|
+ for (const commit of commits) {
|
|
|
+ const match = (commit.note || commit.subject).match(updateRegex);
|
|
|
+ if (!match) {
|
|
|
+ notupdates.push(commit);
|
|
|
+ continue;
|
|
|
+ }
|
|
|
+ const [, dep, version] = match;
|
|
|
+ if (!newest[dep] || newest[dep].version < version) {
|
|
|
+ newest[dep] = { commit, version };
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
- return keepers;
|
|
|
+ return [...notupdates, ...Object.values(newest).map(o => o.commit)];
|
|
|
};
|
|
|
|
|
|
/***
|
|
|
**** Render
|
|
|
***/
|
|
|
|
|
|
-const renderLink = (commit, explicitLinks) => {
|
|
|
- let link;
|
|
|
- const { owner, repo } = commit;
|
|
|
- const keyIt = commit.prKeys.values().next();
|
|
|
- if (keyIt.done) /* no PRs */ {
|
|
|
- const { hash } = commit;
|
|
|
- const url = `https://github.com/${owner}/${repo}/commit/${hash}`;
|
|
|
- const text = owner === 'electron' && repo === 'electron'
|
|
|
- ? `${hash.slice(0, 8)}`
|
|
|
- : `${owner}/${repo}@${hash.slice(0, 8)}`;
|
|
|
- link = explicitLinks ? `[${text}](${url})` : text;
|
|
|
- } else {
|
|
|
- const { number } = keyIt.value;
|
|
|
- const url = `https://github.com/${owner}/${repo}/pull/${number}`;
|
|
|
- const text = owner === 'electron' && repo === 'electron'
|
|
|
- ? `#${number}`
|
|
|
- : `${owner}/${repo}#${number}`;
|
|
|
- link = explicitLinks ? `[${text}](${url})` : text;
|
|
|
- }
|
|
|
- return link;
|
|
|
-};
|
|
|
+// @return the pull request's GitHub URL
|
|
|
+const buildPullURL = ghKey => `https://github.com/${ghKey.owner}/${ghKey.repo}/pull/${ghKey.number}`;
|
|
|
+
|
|
|
+const renderPull = ghKey => `[#${ghKey.number}](${buildPullURL(ghKey)})`;
|
|
|
+
|
|
|
+// @return the commit's GitHub URL
|
|
|
+const buildCommitURL = commit => `https://github.com/${commit.owner}/${commit.repo}/commit/${commit.hash}`;
|
|
|
+
|
|
|
+const renderCommit = commit => `[${commit.hash.slice(0, 8)}](${buildCommitURL(commit)})`;
|
|
|
+
|
|
|
+// @return a markdown link to the PR if available; otherwise, the git commit
|
|
|
+function renderLink (commit) {
|
|
|
+ const maybePull = commit.prKeys.values().next();
|
|
|
+ return maybePull.value ? renderPull(maybePull.value) : renderCommit(commit);
|
|
|
+}
|
|
|
|
|
|
-const renderCommit = (commit, explicitLinks) => {
|
|
|
- // clean up the note
|
|
|
- let note = commit.note || commit.subject;
|
|
|
+// @return a terser branch name,
|
|
|
+// e.g. '7-2-x' -> '7.2' and '8-x-y' -> '8'
|
|
|
+const renderBranchName = name => name.replace(/-[a-zA-Z]/g, '').replace('-', '.');
|
|
|
+
|
|
|
+const renderTrop = (branch, ghKey) => `[${renderBranchName(branch)}](${buildPullURL(ghKey)})`;
|
|
|
+
|
|
|
+// @return markdown-formatted links to other branches' trops,
|
|
|
+// e.g. "(Also in 7.2, 8, 9)"
|
|
|
+function renderTrops (commit, excludeBranch) {
|
|
|
+ const body = [...commit.trops.entries()]
|
|
|
+ .filter(([branch]) => branch !== excludeBranch)
|
|
|
+ .sort(([branchA], [branchB]) => parseInt(branchA) - parseInt(branchB)) // sort by semver major
|
|
|
+ .map(([branch, key]) => renderTrop(branch, key))
|
|
|
+ .join(', ');
|
|
|
+ return body ? `<span style="font-size:small;">(Also in ${body})</span>` : body;
|
|
|
+}
|
|
|
+
|
|
|
+// @return a slightly cleaned-up human-readable change description
|
|
|
+function renderDescription (commit) {
|
|
|
+ let note = commit.note || commit.subject || '';
|
|
|
note = note.trim();
|
|
|
+
|
|
|
+ // release notes bullet point every change, so if the note author
|
|
|
+ // manually started the content with a bullet point, that will confuse
|
|
|
+ // the markdown renderer -- remove the redundant bullet point
|
|
|
+ // (example: https://github.com/electron/electron/pull/25216)
|
|
|
+ if (note.startsWith('*')) {
|
|
|
+ note = note.slice(1).trim();
|
|
|
+ }
|
|
|
+
|
|
|
if (note.length !== 0) {
|
|
|
note = note[0].toUpperCase() + note.substr(1);
|
|
|
|
|
@@ -590,30 +583,24 @@ const renderCommit = (commit, explicitLinks) => {
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- const link = renderLink(commit, explicitLinks);
|
|
|
+ return note;
|
|
|
+}
|
|
|
|
|
|
- return { note, link };
|
|
|
-};
|
|
|
+// @return markdown-formatted release note line item,
|
|
|
+// e.g. '* Fixed a foo. #12345 (Also in 7.2, 8, 9)'
|
|
|
+const renderNote = (commit, excludeBranch) =>
|
|
|
+ `* ${renderDescription(commit)} ${renderLink(commit)} ${renderTrops(commit, excludeBranch)}\n`;
|
|
|
|
|
|
-const renderNotes = (notes, explicitLinks) => {
|
|
|
+const renderNotes = (notes) => {
|
|
|
const rendered = [`# Release Notes for ${notes.name}\n\n`];
|
|
|
|
|
|
const renderSection = (title, commits) => {
|
|
|
- if (commits.length === 0) {
|
|
|
- return;
|
|
|
- }
|
|
|
- const notes = new Map();
|
|
|
- for (const note of commits.map(commit => renderCommit(commit, explicitLinks))) {
|
|
|
- if (!notes.has(note.note)) {
|
|
|
- notes.set(note.note, [note.link]);
|
|
|
- } else {
|
|
|
- notes.get(note.note).push(note.link);
|
|
|
- }
|
|
|
+ if (commits.length > 0) {
|
|
|
+ rendered.push(
|
|
|
+ `## ${title}\n\n`,
|
|
|
+ ...(commits.map(commit => renderNote(commit, notes.toBranch)).sort())
|
|
|
+ );
|
|
|
}
|
|
|
- rendered.push(`## ${title}\n\n`);
|
|
|
- const lines = [];
|
|
|
- notes.forEach((links, key) => lines.push(` * ${key} ${links.map(link => link.toString()).sort().join(', ')}\n`));
|
|
|
- rendered.push(...lines.sort(), '\n');
|
|
|
};
|
|
|
|
|
|
renderSection('Breaking Changes', notes.breaking);
|
|
@@ -622,7 +609,7 @@ const renderNotes = (notes, explicitLinks) => {
|
|
|
renderSection('Other Changes', notes.other);
|
|
|
|
|
|
if (notes.docs.length) {
|
|
|
- const docs = notes.docs.map(commit => renderLink(commit, explicitLinks)).sort();
|
|
|
+ const docs = notes.docs.map(commit => renderLink(commit)).sort();
|
|
|
rendered.push('## Documentation\n\n', ` * Documentation changes: ${docs.join(', ')}\n`, '\n');
|
|
|
}
|
|
|
|