semantic-release 20.0.3-beta.1 → 20.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/docs/recipes/release-workflow/pre-releases.md +1 -1
- package/docs/usage/workflow-configuration.md +1 -1
- package/lib/branches/expand.js +7 -7
- package/lib/branches/get-tags.js +14 -13
- package/lib/branches/index.js +22 -22
- package/lib/branches/normalize.js +17 -18
- package/lib/definitions/branches.js +9 -9
- package/lib/definitions/constants.js +8 -8
- package/lib/definitions/errors.js +81 -81
- package/lib/definitions/plugins.js +20 -20
- package/lib/plugins/index.js +18 -18
- package/lib/plugins/normalize.js +17 -17
- package/lib/plugins/pipeline.js +32 -33
- package/lib/plugins/utils.js +9 -20
- package/package.json +4 -4
|
@@ -62,7 +62,7 @@ The Git history of the repository is now:
|
|
|
62
62
|
|
|
63
63
|
We now decide to work on another future major release, in parallel of the beta one, which will also be composed of multiple features, some of them being breaking changes.
|
|
64
64
|
|
|
65
|
-
To implement that workflow we can create the branch `alpha` from the branch `beta` and commit our first feature there. When pushing that commit, **semantic-release** will publish the pre-release version `3.0.0-alpha.1` on the dist-tag `@alpha`. That allow us to run integration tests by installing our module with `npm install example-module@alpha`. Other users installing with `npm install example-module` will still receive the version `1.0.
|
|
65
|
+
To implement that workflow we can create the branch `alpha` from the branch `beta` and commit our first feature there. When pushing that commit, **semantic-release** will publish the pre-release version `3.0.0-alpha.1` on the dist-tag `@alpha`. That allow us to run integration tests by installing our module with `npm install example-module@alpha`. Other users installing with `npm install example-module` will still receive the version `1.0.1`.
|
|
66
66
|
|
|
67
67
|
The Git history of the repository is now:
|
|
68
68
|
|
|
@@ -12,7 +12,7 @@ See [Release workflow recipes](../recipes/release-workflow/README.md#release-wor
|
|
|
12
12
|
The release workflow is configured via the [branches option](./configuration.md#branches) which accepts a single or an array of branch definitions.
|
|
13
13
|
Each branch can be defined either as a string, a [glob](https://github.com/micromatch/micromatch#matching-features) or an object. For string and glob definitions each [property](#branches-properties) will be defaulted.
|
|
14
14
|
|
|
15
|
-
A branch can defined as one of three types:
|
|
15
|
+
A branch can be defined as one of three types:
|
|
16
16
|
|
|
17
17
|
- [release](#release-branches): to make releases on top of the last version released
|
|
18
18
|
- [maintenance](#maintenance-branches): to make releases on top of an old release
|
package/lib/branches/expand.js
CHANGED
|
@@ -1,18 +1,18 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import micromatch from
|
|
3
|
-
import {
|
|
1
|
+
import {isString, mapValues, omit, remove, template} from 'lodash-es';
|
|
2
|
+
import micromatch from 'micromatch';
|
|
3
|
+
import {getBranches} from '../git.js';
|
|
4
4
|
|
|
5
|
-
export default async (repositoryUrl, {
|
|
6
|
-
const gitBranches = await getBranches(repositoryUrl, {
|
|
5
|
+
export default async (repositoryUrl, {cwd}, branches) => {
|
|
6
|
+
const gitBranches = await getBranches(repositoryUrl, {cwd});
|
|
7
7
|
|
|
8
8
|
return branches.reduce(
|
|
9
9
|
(branches, branch) => [
|
|
10
10
|
...branches,
|
|
11
11
|
...remove(gitBranches, (name) => micromatch(gitBranches, branch.name).includes(name)).map((name) => ({
|
|
12
12
|
name,
|
|
13
|
-
...mapValues(omit(branch,
|
|
13
|
+
...mapValues(omit(branch, 'name'), (value) => (isString(value) ? template(value)({name}) : value)),
|
|
14
14
|
})),
|
|
15
15
|
],
|
|
16
16
|
[]
|
|
17
17
|
);
|
|
18
|
-
}
|
|
18
|
+
}
|
package/lib/branches/get-tags.js
CHANGED
|
@@ -1,35 +1,36 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import semver from
|
|
3
|
-
import pReduce from
|
|
4
|
-
import debugTags from
|
|
5
|
-
import {
|
|
1
|
+
import {escapeRegExp, template} from 'lodash-es';
|
|
2
|
+
import semver from 'semver';
|
|
3
|
+
import pReduce from 'p-reduce';
|
|
4
|
+
import debugTags from 'debug';
|
|
5
|
+
import {getNote, getTags} from '../../lib/git.js';
|
|
6
6
|
|
|
7
|
-
const debug = debugTags(
|
|
7
|
+
const debug = debugTags('semantic-release:get-tags');
|
|
8
8
|
|
|
9
|
-
|
|
9
|
+
|
|
10
|
+
export default async ({cwd, env, options: {tagFormat}}, branches) => {
|
|
10
11
|
// Generate a regex to parse tags formatted with `tagFormat`
|
|
11
12
|
// by replacing the `version` variable in the template by `(.+)`.
|
|
12
13
|
// The `tagFormat` is compiled with space as the `version` as it's an invalid tag character,
|
|
13
14
|
// so it's guaranteed to no be present in the `tagFormat`.
|
|
14
|
-
const tagRegexp = `^${escapeRegExp(template(tagFormat)({
|
|
15
|
+
const tagRegexp = `^${escapeRegExp(template(tagFormat)({version: ' '})).replace(' ', '(.+)')}`;
|
|
15
16
|
|
|
16
17
|
return pReduce(
|
|
17
18
|
branches,
|
|
18
19
|
async (branches, branch) => {
|
|
19
20
|
const branchTags = await pReduce(
|
|
20
|
-
await getTags(branch.name, {
|
|
21
|
+
await getTags(branch.name, {cwd, env}),
|
|
21
22
|
async (branchTags, tag) => {
|
|
22
23
|
const [, version] = tag.match(tagRegexp) || [];
|
|
23
24
|
return version && semver.valid(semver.clean(version))
|
|
24
|
-
? [...branchTags, {
|
|
25
|
+
? [...branchTags, {gitTag: tag, version, channels: (await getNote(tag, {cwd, env})).channels || [null]}]
|
|
25
26
|
: branchTags;
|
|
26
27
|
},
|
|
27
28
|
[]
|
|
28
29
|
);
|
|
29
30
|
|
|
30
|
-
debug(
|
|
31
|
-
return [...branches, {
|
|
31
|
+
debug('found tags for branch %s: %o', branch.name, branchTags);
|
|
32
|
+
return [...branches, {...branch, tags: branchTags}];
|
|
32
33
|
},
|
|
33
34
|
[]
|
|
34
35
|
);
|
|
35
|
-
}
|
|
36
|
+
}
|
package/lib/branches/index.js
CHANGED
|
@@ -1,51 +1,51 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import AggregateError from
|
|
3
|
-
import pEachSeries from
|
|
4
|
-
import * as DEFINITIONS from
|
|
5
|
-
import getError from
|
|
6
|
-
import {
|
|
7
|
-
import expand from
|
|
8
|
-
import getTags from
|
|
9
|
-
import * as normalize from
|
|
1
|
+
import {isRegExp, isString} from 'lodash-es';
|
|
2
|
+
import AggregateError from 'aggregate-error';
|
|
3
|
+
import pEachSeries from 'p-each-series';
|
|
4
|
+
import * as DEFINITIONS from '../definitions/branches.js';
|
|
5
|
+
import getError from '../get-error.js';
|
|
6
|
+
import {fetch, fetchNotes, verifyBranchName} from '../git.js';
|
|
7
|
+
import expand from './expand.js';
|
|
8
|
+
import getTags from './get-tags.js';
|
|
9
|
+
import * as normalize from './normalize.js';
|
|
10
10
|
|
|
11
11
|
export default async (repositoryUrl, ciBranch, context) => {
|
|
12
|
-
const {
|
|
12
|
+
const {cwd, env} = context;
|
|
13
13
|
|
|
14
14
|
const remoteBranches = await expand(
|
|
15
15
|
repositoryUrl,
|
|
16
16
|
context,
|
|
17
|
-
context.options.branches.map((branch) => (isString(branch) || isRegExp(branch) ? {
|
|
17
|
+
context.options.branches.map((branch) => (isString(branch) || isRegExp(branch) ? {name: branch} : branch))
|
|
18
18
|
);
|
|
19
19
|
|
|
20
|
-
await pEachSeries(remoteBranches, async ({
|
|
21
|
-
await fetch(repositoryUrl, name, ciBranch, {
|
|
20
|
+
await pEachSeries(remoteBranches, async ({name}) => {
|
|
21
|
+
await fetch(repositoryUrl, name, ciBranch, {cwd, env});
|
|
22
22
|
});
|
|
23
23
|
|
|
24
|
-
await fetchNotes(repositoryUrl, {
|
|
24
|
+
await fetchNotes(repositoryUrl, {cwd, env});
|
|
25
25
|
|
|
26
26
|
const branches = await getTags(context, remoteBranches);
|
|
27
27
|
|
|
28
28
|
const errors = [];
|
|
29
29
|
const branchesByType = Object.entries(DEFINITIONS).reduce(
|
|
30
30
|
// eslint-disable-next-line unicorn/no-fn-reference-in-iterator
|
|
31
|
-
(branchesByType, [type, {
|
|
31
|
+
(branchesByType, [type, {filter}]) => ({[type]: branches.filter(filter), ...branchesByType}),
|
|
32
32
|
{}
|
|
33
33
|
);
|
|
34
34
|
|
|
35
|
-
const result = Object.entries(DEFINITIONS).reduce((result, [type, {
|
|
35
|
+
const result = Object.entries(DEFINITIONS).reduce((result, [type, {branchesValidator, branchValidator}]) => {
|
|
36
36
|
branchesByType[type].forEach((branch) => {
|
|
37
37
|
if (branchValidator && !branchValidator(branch)) {
|
|
38
|
-
errors.push(getError(`E${type.toUpperCase()}BRANCH`, {
|
|
38
|
+
errors.push(getError(`E${type.toUpperCase()}BRANCH`, {branch}));
|
|
39
39
|
}
|
|
40
40
|
});
|
|
41
41
|
|
|
42
42
|
const branchesOfType = normalize[type](branchesByType);
|
|
43
43
|
|
|
44
44
|
if (!branchesValidator(branchesOfType)) {
|
|
45
|
-
errors.push(getError(`E${type.toUpperCase()}BRANCHES`, {
|
|
45
|
+
errors.push(getError(`E${type.toUpperCase()}BRANCHES`, {branches: branchesOfType}));
|
|
46
46
|
}
|
|
47
47
|
|
|
48
|
-
return {
|
|
48
|
+
return {...result, [type]: branchesOfType};
|
|
49
49
|
}, {});
|
|
50
50
|
|
|
51
51
|
const duplicates = [...branches]
|
|
@@ -54,12 +54,12 @@ export default async (repositoryUrl, ciBranch, context) => {
|
|
|
54
54
|
.filter((_, idx, array) => array[idx] === array[idx + 1] && array[idx] !== array[idx - 1]);
|
|
55
55
|
|
|
56
56
|
if (duplicates.length > 0) {
|
|
57
|
-
errors.push(getError(
|
|
57
|
+
errors.push(getError('EDUPLICATEBRANCHES', {duplicates}));
|
|
58
58
|
}
|
|
59
59
|
|
|
60
60
|
await pEachSeries(branches, async (branch) => {
|
|
61
61
|
if (!(await verifyBranchName(branch.name))) {
|
|
62
|
-
errors.push(getError(
|
|
62
|
+
errors.push(getError('EINVALIDBRANCHNAME', branch));
|
|
63
63
|
}
|
|
64
64
|
});
|
|
65
65
|
|
|
@@ -68,4 +68,4 @@ export default async (repositoryUrl, ciBranch, context) => {
|
|
|
68
68
|
}
|
|
69
69
|
|
|
70
70
|
return [...result.maintenance, ...result.release, ...result.prerelease];
|
|
71
|
-
}
|
|
71
|
+
}
|
|
@@ -1,28 +1,27 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import semverDiff from
|
|
3
|
-
import {
|
|
1
|
+
import {isNil, sortBy} from 'lodash-es';
|
|
2
|
+
import semverDiff from 'semver-diff';
|
|
3
|
+
import {FIRST_RELEASE, RELEASE_TYPE} from '../definitions/constants.js';
|
|
4
4
|
import {
|
|
5
5
|
getFirstVersion,
|
|
6
6
|
getLatestVersion,
|
|
7
|
-
getLowerBound,
|
|
8
|
-
getRange,
|
|
7
|
+
getLowerBound, getRange,
|
|
9
8
|
getUpperBound,
|
|
10
9
|
highest,
|
|
11
10
|
isMajorRange,
|
|
12
11
|
lowest,
|
|
13
|
-
tagsToVersions
|
|
14
|
-
} from
|
|
12
|
+
tagsToVersions
|
|
13
|
+
} from '../utils.js';
|
|
15
14
|
|
|
16
|
-
export function maintenance({
|
|
15
|
+
export function maintenance({maintenance, release}) {
|
|
17
16
|
return sortBy(
|
|
18
|
-
maintenance.map(({
|
|
17
|
+
maintenance.map(({name, range, channel, ...rest}) => ({
|
|
19
18
|
...rest,
|
|
20
19
|
name,
|
|
21
20
|
range: range || name,
|
|
22
21
|
channel: isNil(channel) ? name : channel,
|
|
23
22
|
})),
|
|
24
|
-
|
|
25
|
-
).map(({
|
|
23
|
+
'range'
|
|
24
|
+
).map(({name, range, tags, ...rest}, idx, branches) => {
|
|
26
25
|
const versions = tagsToVersions(tags);
|
|
27
26
|
// Find the lower bound based on Maintenance branches
|
|
28
27
|
const maintenanceMin =
|
|
@@ -45,7 +44,7 @@ export function maintenance({ maintenance, release }) {
|
|
|
45
44
|
const diff = semverDiff(min, max);
|
|
46
45
|
return {
|
|
47
46
|
...rest,
|
|
48
|
-
type:
|
|
47
|
+
type: 'maintenance',
|
|
49
48
|
name,
|
|
50
49
|
tags,
|
|
51
50
|
range: getRange(min, max),
|
|
@@ -55,7 +54,7 @@ export function maintenance({ maintenance, release }) {
|
|
|
55
54
|
});
|
|
56
55
|
}
|
|
57
56
|
|
|
58
|
-
export function release({
|
|
57
|
+
export function release({release}) {
|
|
59
58
|
if (release.length === 0) {
|
|
60
59
|
return release;
|
|
61
60
|
}
|
|
@@ -63,7 +62,7 @@ export function release({ release }) {
|
|
|
63
62
|
// The intial lastVersion is the last release from the base branch of `FIRST_RELEASE` (1.0.0)
|
|
64
63
|
let lastVersion = getLatestVersion(tagsToVersions(release[0].tags)) || FIRST_RELEASE;
|
|
65
64
|
|
|
66
|
-
return release.map(({
|
|
65
|
+
return release.map(({name, tags, channel, ...rest}, idx) => {
|
|
67
66
|
const versions = tagsToVersions(tags);
|
|
68
67
|
// The new lastVersion is the highest version between the current branch last release and the previous branch lastVersion
|
|
69
68
|
lastVersion = highest(getLatestVersion(versions), lastVersion);
|
|
@@ -80,7 +79,7 @@ export function release({ release }) {
|
|
|
80
79
|
...rest,
|
|
81
80
|
channel: idx === 0 ? channel : isNil(channel) ? name : channel,
|
|
82
81
|
tags,
|
|
83
|
-
type:
|
|
82
|
+
type: 'release',
|
|
84
83
|
name,
|
|
85
84
|
range: getRange(lastVersion, bound),
|
|
86
85
|
accept: bound ? RELEASE_TYPE.slice(0, RELEASE_TYPE.indexOf(diff)) : RELEASE_TYPE,
|
|
@@ -89,13 +88,13 @@ export function release({ release }) {
|
|
|
89
88
|
});
|
|
90
89
|
}
|
|
91
90
|
|
|
92
|
-
export function prerelease({
|
|
93
|
-
return prerelease.map(({
|
|
91
|
+
export function prerelease({prerelease}) {
|
|
92
|
+
return prerelease.map(({name, prerelease, channel, tags, ...rest}) => {
|
|
94
93
|
const preid = prerelease === true ? name : prerelease;
|
|
95
94
|
return {
|
|
96
95
|
...rest,
|
|
97
96
|
channel: isNil(channel) ? name : channel,
|
|
98
|
-
type:
|
|
97
|
+
type: 'prerelease',
|
|
99
98
|
name,
|
|
100
99
|
prerelease: preid,
|
|
101
100
|
tags,
|
|
@@ -1,18 +1,18 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import semver from
|
|
3
|
-
import {
|
|
1
|
+
import {isNil, uniqBy} from 'lodash-es';
|
|
2
|
+
import semver from 'semver';
|
|
3
|
+
import {isMaintenanceRange} from '../utils.js';
|
|
4
4
|
|
|
5
5
|
export const maintenance = {
|
|
6
|
-
filter: ({
|
|
7
|
-
branchValidator: ({
|
|
8
|
-
branchesValidator: (branches) => uniqBy(branches, ({
|
|
6
|
+
filter: ({name, range}) => (!isNil(range) && range !== false) || isMaintenanceRange(name),
|
|
7
|
+
branchValidator: ({range}) => (isNil(range) ? true : isMaintenanceRange(range)),
|
|
8
|
+
branchesValidator: (branches) => uniqBy(branches, ({range}) => semver.validRange(range)).length === branches.length,
|
|
9
9
|
};
|
|
10
10
|
|
|
11
11
|
export const prerelease = {
|
|
12
|
-
filter: ({
|
|
13
|
-
branchValidator: ({
|
|
12
|
+
filter: ({prerelease}) => !isNil(prerelease) && prerelease !== false,
|
|
13
|
+
branchValidator: ({name, prerelease}) =>
|
|
14
14
|
Boolean(prerelease) && Boolean(semver.valid(`1.0.0-${prerelease === true ? name : prerelease}.1`)),
|
|
15
|
-
branchesValidator: (branches) => uniqBy(branches,
|
|
15
|
+
branchesValidator: (branches) => uniqBy(branches, 'prerelease').length === branches.length,
|
|
16
16
|
};
|
|
17
17
|
|
|
18
18
|
export const release = {
|
|
@@ -1,17 +1,17 @@
|
|
|
1
|
-
export const RELEASE_TYPE = [
|
|
1
|
+
export const RELEASE_TYPE = ['patch', 'minor', 'major'];
|
|
2
2
|
|
|
3
|
-
export const FIRST_RELEASE =
|
|
3
|
+
export const FIRST_RELEASE = '1.0.0';
|
|
4
4
|
|
|
5
|
-
export const FIRSTPRERELEASE =
|
|
5
|
+
export const FIRSTPRERELEASE = '1';
|
|
6
6
|
|
|
7
|
-
export const COMMIT_NAME =
|
|
7
|
+
export const COMMIT_NAME = 'semantic-release-bot';
|
|
8
8
|
|
|
9
|
-
export const COMMIT_EMAIL =
|
|
9
|
+
export const COMMIT_EMAIL = 'semantic-release-bot@martynus.net';
|
|
10
10
|
|
|
11
|
-
export const RELEASE_NOTES_SEPARATOR =
|
|
11
|
+
export const RELEASE_NOTES_SEPARATOR = '\n\n';
|
|
12
12
|
|
|
13
|
-
export const SECRET_REPLACEMENT =
|
|
13
|
+
export const SECRET_REPLACEMENT = '[secure]';
|
|
14
14
|
|
|
15
15
|
export const SECRET_MIN_SIZE = 5;
|
|
16
16
|
|
|
17
|
-
export const GIT_NOTE_REF =
|
|
17
|
+
export const GIT_NOTE_REF = 'semantic-release';
|
|
@@ -1,21 +1,21 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import {
|
|
3
|
-
import {
|
|
4
|
-
import {
|
|
1
|
+
import {inspect} from 'node:util';
|
|
2
|
+
import {createRequire} from 'node:module';
|
|
3
|
+
import {isString, toLower, trim} from 'lodash-es';
|
|
4
|
+
import {RELEASE_TYPE} from './constants.js';
|
|
5
5
|
|
|
6
6
|
const require = createRequire(import.meta.url);
|
|
7
|
-
const pkg = require(
|
|
7
|
+
const pkg = require('../../package.json');
|
|
8
8
|
|
|
9
|
-
const [homepage] = pkg.homepage.split(
|
|
9
|
+
const [homepage] = pkg.homepage.split('#');
|
|
10
10
|
const stringify = (object) =>
|
|
11
|
-
isString(object) ? object : inspect(object, {
|
|
11
|
+
isString(object) ? object : inspect(object, {breakLength: Infinity, depth: 2, maxArrayLength: 5});
|
|
12
12
|
const linkify = (file) => `${homepage}/blob/master/${file}`;
|
|
13
13
|
const wordsList = (words) =>
|
|
14
|
-
`${words.slice(0, -1).join(
|
|
14
|
+
`${words.slice(0, -1).join(', ')}${words.length > 1 ? ` or ${words[words.length - 1]}` : trim(words[0])}`;
|
|
15
15
|
|
|
16
|
-
export function ENOGITREPO({
|
|
16
|
+
export function ENOGITREPO({cwd}) {
|
|
17
17
|
return {
|
|
18
|
-
message:
|
|
18
|
+
message: 'Not running from a git repository.',
|
|
19
19
|
details: `The \`semantic-release\` command must be executed from a Git repository.
|
|
20
20
|
|
|
21
21
|
The current working directory is \`${cwd}\`.
|
|
@@ -26,76 +26,76 @@ Please verify your CI configuration to make sure the \`semantic-release\` comman
|
|
|
26
26
|
|
|
27
27
|
export function ENOREPOURL() {
|
|
28
28
|
return {
|
|
29
|
-
message:
|
|
29
|
+
message: 'The `repositoryUrl` option is required.',
|
|
30
30
|
details: `The [repositoryUrl option](${linkify(
|
|
31
|
-
|
|
31
|
+
'docs/usage/configuration.md#repositoryurl'
|
|
32
32
|
)}) cannot be determined from the semantic-release configuration, the \`package.json\` nor the [git origin url](https://git-scm.com/book/en/v2/Git-Basics-Working-with-Remotes).
|
|
33
33
|
|
|
34
34
|
Please make sure to add the \`repositoryUrl\` to the [semantic-release configuration] (${linkify(
|
|
35
|
-
|
|
35
|
+
'docs/usage/configuration.md'
|
|
36
36
|
)}).`,
|
|
37
37
|
};
|
|
38
38
|
}
|
|
39
39
|
|
|
40
|
-
export function EGITNOPERMISSION({
|
|
40
|
+
export function EGITNOPERMISSION({options: {repositoryUrl}, branch: {name}}) {
|
|
41
41
|
return {
|
|
42
|
-
message:
|
|
42
|
+
message: 'Cannot push to the Git repository.',
|
|
43
43
|
details: `**semantic-release** cannot push the version tag to the branch \`${name}\` on the remote Git repository with URL \`${repositoryUrl}\`.
|
|
44
44
|
|
|
45
45
|
This can be caused by:
|
|
46
|
-
- a misconfiguration of the [repositoryUrl](${linkify(
|
|
46
|
+
- a misconfiguration of the [repositoryUrl](${linkify('docs/usage/configuration.md#repositoryurl')}) option
|
|
47
47
|
- the repository being unavailable
|
|
48
48
|
- or missing push permission for the user configured via the [Git credentials on your CI environment](${linkify(
|
|
49
|
-
|
|
50
|
-
|
|
49
|
+
'docs/usage/ci-configuration.md#authentication'
|
|
50
|
+
)})`,
|
|
51
51
|
};
|
|
52
52
|
}
|
|
53
53
|
|
|
54
|
-
export function EINVALIDTAGFORMAT({
|
|
54
|
+
export function EINVALIDTAGFORMAT({options: {tagFormat}}) {
|
|
55
55
|
return {
|
|
56
|
-
message:
|
|
56
|
+
message: 'Invalid `tagFormat` option.',
|
|
57
57
|
details: `The [tagFormat](${linkify(
|
|
58
|
-
|
|
58
|
+
'docs/usage/configuration.md#tagformat'
|
|
59
59
|
)}) must compile to a [valid Git reference](https://git-scm.com/docs/git-check-ref-format#_description).
|
|
60
60
|
|
|
61
61
|
Your configuration for the \`tagFormat\` option is \`${stringify(tagFormat)}\`.`,
|
|
62
62
|
};
|
|
63
63
|
}
|
|
64
64
|
|
|
65
|
-
export function ETAGNOVERSION({
|
|
65
|
+
export function ETAGNOVERSION({options: {tagFormat}}) {
|
|
66
66
|
return {
|
|
67
|
-
message:
|
|
67
|
+
message: 'Invalid `tagFormat` option.',
|
|
68
68
|
details: `The [tagFormat](${linkify(
|
|
69
|
-
|
|
69
|
+
'docs/usage/configuration.md#tagformat'
|
|
70
70
|
)}) option must contain the variable \`version\` exactly once.
|
|
71
71
|
|
|
72
72
|
Your configuration for the \`tagFormat\` option is \`${stringify(tagFormat)}\`.`,
|
|
73
73
|
};
|
|
74
74
|
}
|
|
75
75
|
|
|
76
|
-
export function EPLUGINCONF({
|
|
76
|
+
export function EPLUGINCONF({type, required, pluginConf}) {
|
|
77
77
|
return {
|
|
78
78
|
message: `The \`${type}\` plugin configuration is invalid.`,
|
|
79
79
|
details: `The [${type} plugin configuration](${linkify(`docs/usage/plugins.md#${toLower(type)}-plugin`)}) ${
|
|
80
|
-
required ?
|
|
80
|
+
required ? 'is required and ' : ''
|
|
81
81
|
} must be a single or an array of plugins definition. A plugin definition is an npm module name, optionally wrapped in an array with an object.
|
|
82
82
|
|
|
83
83
|
Your configuration for the \`${type}\` plugin is \`${stringify(pluginConf)}\`.`,
|
|
84
84
|
};
|
|
85
85
|
}
|
|
86
86
|
|
|
87
|
-
export function EPLUGINSCONF({
|
|
87
|
+
export function EPLUGINSCONF({plugin}) {
|
|
88
88
|
return {
|
|
89
|
-
message:
|
|
89
|
+
message: 'The `plugins` configuration is invalid.',
|
|
90
90
|
details: `The [plugins](${linkify(
|
|
91
|
-
|
|
91
|
+
'docs/usage/configuration.md#plugins'
|
|
92
92
|
)}) option must be an array of plugin definitions. A plugin definition is an npm module name, optionally wrapped in an array with an object.
|
|
93
93
|
|
|
94
94
|
The invalid configuration is \`${stringify(plugin)}\`.`,
|
|
95
95
|
};
|
|
96
96
|
}
|
|
97
97
|
|
|
98
|
-
export function EPLUGIN({
|
|
98
|
+
export function EPLUGIN({pluginName, type}) {
|
|
99
99
|
return {
|
|
100
100
|
message: `A plugin configured in the step ${type} is not a valid semantic-release plugin.`,
|
|
101
101
|
details: `A valid \`${type}\` **semantic-release** plugin must be a function or an object with a function in the property \`${type}\`.
|
|
@@ -103,17 +103,17 @@ export function EPLUGIN({ pluginName, type }) {
|
|
|
103
103
|
The plugin \`${pluginName}\` doesn't have the property \`${type}\` and cannot be used for the \`${type}\` step.
|
|
104
104
|
|
|
105
105
|
Please refer to the \`${pluginName}\` and [semantic-release plugins configuration](${linkify(
|
|
106
|
-
|
|
106
|
+
'docs/usage/plugins.md'
|
|
107
107
|
)}) documentation for more details.`,
|
|
108
108
|
};
|
|
109
109
|
}
|
|
110
110
|
|
|
111
|
-
export function EANALYZECOMMITSOUTPUT({
|
|
111
|
+
export function EANALYZECOMMITSOUTPUT({result, pluginName}) {
|
|
112
112
|
return {
|
|
113
|
-
message:
|
|
113
|
+
message: 'The `analyzeCommits` plugin returned an invalid value. It must return a valid semver release type.',
|
|
114
114
|
details: `The \`analyzeCommits\` plugin must return a valid [semver](https://semver.org) release type. The valid values are: ${RELEASE_TYPE.map(
|
|
115
115
|
(type) => `\`${type}\``
|
|
116
|
-
).join(
|
|
116
|
+
).join(', ')}.
|
|
117
117
|
|
|
118
118
|
The \`analyzeCommits\` function of the \`${pluginName}\` returned \`${stringify(result)}\` instead.
|
|
119
119
|
|
|
@@ -121,15 +121,15 @@ We recommend to report the issue to the \`${pluginName}\` authors, providing the
|
|
|
121
121
|
- The **semantic-release** version: \`${pkg.version}\`
|
|
122
122
|
- The **semantic-release** logs from your CI job
|
|
123
123
|
- The value returned by the plugin: \`${stringify(result)}\`
|
|
124
|
-
- A link to the **semantic-release** plugin developer guide: [${linkify(
|
|
125
|
-
|
|
124
|
+
- A link to the **semantic-release** plugin developer guide: [${linkify('docs/developer-guide/plugin.md')}](${linkify(
|
|
125
|
+
'docs/developer-guide/plugin.md'
|
|
126
126
|
)})`,
|
|
127
127
|
};
|
|
128
128
|
}
|
|
129
129
|
|
|
130
|
-
export function EGENERATENOTESOUTPUT({
|
|
130
|
+
export function EGENERATENOTESOUTPUT({result, pluginName}) {
|
|
131
131
|
return {
|
|
132
|
-
message:
|
|
132
|
+
message: 'The `generateNotes` plugin returned an invalid value. It must return a `String`.',
|
|
133
133
|
details: `The \`generateNotes\` plugin must return a \`String\`.
|
|
134
134
|
|
|
135
135
|
The \`generateNotes\` function of the \`${pluginName}\` returned \`${stringify(result)}\` instead.
|
|
@@ -138,15 +138,15 @@ We recommend to report the issue to the \`${pluginName}\` authors, providing the
|
|
|
138
138
|
- The **semantic-release** version: \`${pkg.version}\`
|
|
139
139
|
- The **semantic-release** logs from your CI job
|
|
140
140
|
- The value returned by the plugin: \`${stringify(result)}\`
|
|
141
|
-
- A link to the **semantic-release** plugin developer guide: [${linkify(
|
|
142
|
-
|
|
141
|
+
- A link to the **semantic-release** plugin developer guide: [${linkify('docs/developer-guide/plugin.md')}](${linkify(
|
|
142
|
+
'docs/developer-guide/plugin.md'
|
|
143
143
|
)})`,
|
|
144
144
|
};
|
|
145
145
|
}
|
|
146
146
|
|
|
147
|
-
export function EPUBLISHOUTPUT({
|
|
147
|
+
export function EPUBLISHOUTPUT({result, pluginName}) {
|
|
148
148
|
return {
|
|
149
|
-
message:
|
|
149
|
+
message: 'A `publish` plugin returned an invalid value. It must return an `Object`.',
|
|
150
150
|
details: `The \`publish\` plugins must return an \`Object\`.
|
|
151
151
|
|
|
152
152
|
The \`publish\` function of the \`${pluginName}\` returned \`${stringify(result)}\` instead.
|
|
@@ -155,15 +155,15 @@ We recommend to report the issue to the \`${pluginName}\` authors, providing the
|
|
|
155
155
|
- The **semantic-release** version: \`${pkg.version}\`
|
|
156
156
|
- The **semantic-release** logs from your CI job
|
|
157
157
|
- The value returned by the plugin: \`${stringify(result)}\`
|
|
158
|
-
- A link to the **semantic-release** plugin developer guide: [${linkify(
|
|
159
|
-
|
|
158
|
+
- A link to the **semantic-release** plugin developer guide: [${linkify('docs/developer-guide/plugin.md')}](${linkify(
|
|
159
|
+
'docs/developer-guide/plugin.md'
|
|
160
160
|
)})`,
|
|
161
161
|
};
|
|
162
162
|
}
|
|
163
163
|
|
|
164
|
-
export function EADDCHANNELOUTPUT({
|
|
164
|
+
export function EADDCHANNELOUTPUT({result, pluginName}) {
|
|
165
165
|
return {
|
|
166
|
-
message:
|
|
166
|
+
message: 'A `addChannel` plugin returned an invalid value. It must return an `Object`.',
|
|
167
167
|
details: `The \`addChannel\` plugins must return an \`Object\`.
|
|
168
168
|
|
|
169
169
|
The \`addChannel\` function of the \`${pluginName}\` returned \`${stringify(result)}\` instead.
|
|
@@ -172,72 +172,72 @@ We recommend to report the issue to the \`${pluginName}\` authors, providing the
|
|
|
172
172
|
- The **semantic-release** version: \`${pkg.version}\`
|
|
173
173
|
- The **semantic-release** logs from your CI job
|
|
174
174
|
- The value returned by the plugin: \`${stringify(result)}\`
|
|
175
|
-
- A link to the **semantic-release** plugin developer guide: [${linkify(
|
|
176
|
-
|
|
175
|
+
- A link to the **semantic-release** plugin developer guide: [${linkify('docs/developer-guide/plugin.md')}](${linkify(
|
|
176
|
+
'docs/developer-guide/plugin.md'
|
|
177
177
|
)})`,
|
|
178
178
|
};
|
|
179
179
|
}
|
|
180
180
|
|
|
181
|
-
export function EINVALIDBRANCH({
|
|
181
|
+
export function EINVALIDBRANCH({branch}) {
|
|
182
182
|
return {
|
|
183
|
-
message:
|
|
183
|
+
message: 'A branch is invalid in the `branches` configuration.',
|
|
184
184
|
details: `Each branch in the [branches configuration](${linkify(
|
|
185
|
-
|
|
185
|
+
'docs/usage/configuration.md#branches'
|
|
186
186
|
)}) must be either a string, a regexp or an object with a \`name\` property.
|
|
187
187
|
|
|
188
188
|
Your configuration for the problematic branch is \`${stringify(branch)}\`.`,
|
|
189
189
|
};
|
|
190
190
|
}
|
|
191
191
|
|
|
192
|
-
export function EINVALIDBRANCHNAME({
|
|
192
|
+
export function EINVALIDBRANCHNAME({branch}) {
|
|
193
193
|
return {
|
|
194
|
-
message:
|
|
194
|
+
message: 'A branch name is invalid in the `branches` configuration.',
|
|
195
195
|
details: `Each branch in the [branches configuration](${linkify(
|
|
196
|
-
|
|
196
|
+
'docs/usage/configuration.md#branches'
|
|
197
197
|
)}) must be a [valid Git reference](https://git-scm.com/docs/git-check-ref-format#_description).
|
|
198
198
|
|
|
199
199
|
Your configuration for the problematic branch is \`${stringify(branch)}\`.`,
|
|
200
200
|
};
|
|
201
201
|
}
|
|
202
202
|
|
|
203
|
-
export function EDUPLICATEBRANCHES({
|
|
203
|
+
export function EDUPLICATEBRANCHES({duplicates}) {
|
|
204
204
|
return {
|
|
205
|
-
message:
|
|
205
|
+
message: 'The `branches` configuration has duplicate branches.',
|
|
206
206
|
details: `Each branch in the [branches configuration](${linkify(
|
|
207
|
-
|
|
207
|
+
'docs/usage/configuration.md#branches'
|
|
208
208
|
)}) must havea unique name.
|
|
209
209
|
|
|
210
210
|
Your configuration contains duplicates for the following branch names: \`${stringify(duplicates)}\`.`,
|
|
211
211
|
};
|
|
212
212
|
}
|
|
213
213
|
|
|
214
|
-
export function EMAINTENANCEBRANCH({
|
|
214
|
+
export function EMAINTENANCEBRANCH({branch}) {
|
|
215
215
|
return {
|
|
216
|
-
message:
|
|
216
|
+
message: 'A maintenance branch is invalid in the `branches` configuration.',
|
|
217
217
|
details: `Each maintenance branch in the [branches configuration](${linkify(
|
|
218
|
-
|
|
218
|
+
'docs/usage/configuration.md#branches'
|
|
219
219
|
)}) must have a \`range\` property formatted like \`N.x\`, \`N.x.x\` or \`N.N.x\` (\`N\` is a number).
|
|
220
220
|
|
|
221
221
|
Your configuration for the problematic branch is \`${stringify(branch)}\`.`,
|
|
222
222
|
};
|
|
223
223
|
}
|
|
224
224
|
|
|
225
|
-
export function EMAINTENANCEBRANCHES({
|
|
225
|
+
export function EMAINTENANCEBRANCHES({branches}) {
|
|
226
226
|
return {
|
|
227
|
-
message:
|
|
227
|
+
message: 'The maintenance branches are invalid in the `branches` configuration.',
|
|
228
228
|
details: `Each maintenance branch in the [branches configuration](${linkify(
|
|
229
|
-
|
|
229
|
+
'docs/usage/configuration.md#branches'
|
|
230
230
|
)}) must have a unique \`range\` property.
|
|
231
231
|
|
|
232
232
|
Your configuration for the problematic branches is \`${stringify(branches)}\`.`,
|
|
233
233
|
};
|
|
234
234
|
}
|
|
235
235
|
|
|
236
|
-
export function ERELEASEBRANCHES({
|
|
236
|
+
export function ERELEASEBRANCHES({branches}) {
|
|
237
237
|
return {
|
|
238
|
-
message:
|
|
238
|
+
message: 'The release branches are invalid in the `branches` configuration.',
|
|
239
239
|
details: `A minimum of 1 and a maximum of 3 release branches are required in the [branches configuration](${linkify(
|
|
240
|
-
|
|
240
|
+
'docs/usage/configuration.md#branches'
|
|
241
241
|
)}).
|
|
242
242
|
|
|
243
243
|
This may occur if your repository does not have a release branch, such as \`master\`.
|
|
@@ -246,53 +246,53 @@ Your configuration for the problematic branches is \`${stringify(branches)}\`.`,
|
|
|
246
246
|
};
|
|
247
247
|
}
|
|
248
248
|
|
|
249
|
-
export function EPRERELEASEBRANCH({
|
|
249
|
+
export function EPRERELEASEBRANCH({branch}) {
|
|
250
250
|
return {
|
|
251
|
-
message:
|
|
251
|
+
message: 'A pre-release branch configuration is invalid in the `branches` configuration.',
|
|
252
252
|
details: `Each pre-release branch in the [branches configuration](${linkify(
|
|
253
|
-
|
|
253
|
+
'docs/usage/configuration.md#branches'
|
|
254
254
|
)}) must have a \`prerelease\` property valid per the [Semantic Versioning Specification](https://semver.org/#spec-item-9). If the \`prerelease\` property is set to \`true\`, then the \`name\` property is used instead.
|
|
255
255
|
|
|
256
256
|
Your configuration for the problematic branch is \`${stringify(branch)}\`.`,
|
|
257
257
|
};
|
|
258
258
|
}
|
|
259
259
|
|
|
260
|
-
export function EPRERELEASEBRANCHES({
|
|
260
|
+
export function EPRERELEASEBRANCHES({branches}) {
|
|
261
261
|
return {
|
|
262
|
-
message:
|
|
262
|
+
message: 'The pre-release branches are invalid in the `branches` configuration.',
|
|
263
263
|
details: `Each pre-release branch in the [branches configuration](${linkify(
|
|
264
|
-
|
|
264
|
+
'docs/usage/configuration.md#branches'
|
|
265
265
|
)}) must have a unique \`prerelease\` property. If the \`prerelease\` property is set to \`true\`, then the \`name\` property is used instead.
|
|
266
266
|
|
|
267
267
|
Your configuration for the problematic branches is \`${stringify(branches)}\`.`,
|
|
268
268
|
};
|
|
269
269
|
}
|
|
270
270
|
|
|
271
|
-
export function EINVALIDNEXTVERSION({
|
|
271
|
+
export function EINVALIDNEXTVERSION({nextRelease: {version}, branch: {name, range}, commits, validBranches}) {
|
|
272
272
|
return {
|
|
273
273
|
message: `The release \`${version}\` on branch \`${name}\` cannot be published as it is out of range.`,
|
|
274
274
|
details: `Based on the releases published on other branches, only versions within the range \`${range}\` can be published from branch \`${name}\`.
|
|
275
275
|
|
|
276
|
-
The following commit${commits.length > 1 ?
|
|
277
|
-
${commits.map(({
|
|
276
|
+
The following commit${commits.length > 1 ? 's are' : ' is'} responsible for the invalid release:
|
|
277
|
+
${commits.map(({commit: {short}, subject}) => `- ${subject} (${short})`).join('\n')}
|
|
278
278
|
|
|
279
279
|
${
|
|
280
|
-
|
|
281
|
-
} should be moved to a valid branch with [git merge](https://git-scm.com/docs/git-merge) or [git cherry-pick](https://git-scm.com/docs/git-cherry-pick) and removed from branch \`${name}\` with [git revert](https://git-scm.com/docs/git-revert) or [git reset](https://git-scm.com/docs/git-reset).
|
|
280
|
+
commits.length > 1 ? 'Those commits' : 'This commit'
|
|
281
|
+
} should be moved to a valid branch with [git merge](https://git-scm.com/docs/git-merge) or [git cherry-pick](https://git-scm.com/docs/git-cherry-pick) and removed from branch \`${name}\` with [git revert](https://git-scm.com/docs/git-revert) or [git reset](https://git-scm.com/docs/git-reset).
|
|
282
282
|
|
|
283
|
-
A valid branch could be ${wordsList(validBranches.map(({
|
|
283
|
+
A valid branch could be ${wordsList(validBranches.map(({name}) => `\`${name}\``))}.
|
|
284
284
|
|
|
285
|
-
See the [workflow configuration documentation](${linkify(
|
|
285
|
+
See the [workflow configuration documentation](${linkify('docs/usage/workflow-configuration.md')}) for more details.`,
|
|
286
286
|
};
|
|
287
287
|
}
|
|
288
288
|
|
|
289
|
-
export function EINVALIDMAINTENANCEMERGE({
|
|
289
|
+
export function EINVALIDMAINTENANCEMERGE({nextRelease: {channel, gitTag, version}, branch: {mergeRange, name}}) {
|
|
290
290
|
return {
|
|
291
291
|
message: `The release \`${version}\` on branch \`${name}\` cannot be published as it is out of range.`,
|
|
292
292
|
details: `Only releases within the range \`${mergeRange}\` can be merged into the maintenance branch \`${name}\` and published to the \`${channel}\` distribution channel.
|
|
293
293
|
|
|
294
294
|
The branch \`${name}\` head should be [reset](https://git-scm.com/docs/git-reset) to a previous commit so the commit with tag \`${gitTag}\` is removed from the branch history.
|
|
295
295
|
|
|
296
|
-
See the [workflow configuration documentation](${linkify(
|
|
296
|
+
See the [workflow configuration documentation](${linkify('docs/usage/workflow-configuration.md')}) for more details.`,
|
|
297
297
|
};
|
|
298
298
|
}
|
|
@@ -1,23 +1,23 @@
|
|
|
1
1
|
/* eslint require-atomic-updates: off */
|
|
2
2
|
|
|
3
|
-
import {
|
|
4
|
-
import {
|
|
5
|
-
import hideSensitive from
|
|
6
|
-
import {
|
|
7
|
-
import {
|
|
3
|
+
import {isPlainObject, isString} from 'lodash-es';
|
|
4
|
+
import {getGitHead} from '../git.js';
|
|
5
|
+
import hideSensitive from '../hide-sensitive.js';
|
|
6
|
+
import {hideSensitiveValues} from '../utils.js';
|
|
7
|
+
import {RELEASE_NOTES_SEPARATOR, RELEASE_TYPE} from './constants.js';
|
|
8
8
|
|
|
9
9
|
export default {
|
|
10
10
|
verifyConditions: {
|
|
11
11
|
required: false,
|
|
12
12
|
dryRun: true,
|
|
13
|
-
pipelineConfig: () => ({
|
|
13
|
+
pipelineConfig: () => ({settleAll: true}),
|
|
14
14
|
},
|
|
15
15
|
analyzeCommits: {
|
|
16
|
-
default: [
|
|
16
|
+
default: ['@semantic-release/commit-analyzer'],
|
|
17
17
|
required: true,
|
|
18
18
|
dryRun: true,
|
|
19
19
|
outputValidator: (output) => !output || RELEASE_TYPE.includes(output),
|
|
20
|
-
preprocess: ({
|
|
20
|
+
preprocess: ({commits, ...inputs}) => ({
|
|
21
21
|
...inputs,
|
|
22
22
|
commits: commits.filter((commit) => !/\[skip\s+release]|\[release\s+skip]/i.test(commit.message)),
|
|
23
23
|
}),
|
|
@@ -32,29 +32,29 @@ export default {
|
|
|
32
32
|
verifyRelease: {
|
|
33
33
|
required: false,
|
|
34
34
|
dryRun: true,
|
|
35
|
-
pipelineConfig: () => ({
|
|
35
|
+
pipelineConfig: () => ({settleAll: true}),
|
|
36
36
|
},
|
|
37
37
|
generateNotes: {
|
|
38
38
|
required: false,
|
|
39
39
|
dryRun: true,
|
|
40
40
|
outputValidator: (output) => !output || isString(output),
|
|
41
41
|
pipelineConfig: () => ({
|
|
42
|
-
getNextInput: ({
|
|
42
|
+
getNextInput: ({nextRelease, ...context}, notes) => ({
|
|
43
43
|
...context,
|
|
44
44
|
nextRelease: {
|
|
45
45
|
...nextRelease,
|
|
46
|
-
notes: `${nextRelease.notes ? `${nextRelease.notes}${RELEASE_NOTES_SEPARATOR}` :
|
|
46
|
+
notes: `${nextRelease.notes ? `${nextRelease.notes}${RELEASE_NOTES_SEPARATOR}` : ''}${notes}`,
|
|
47
47
|
},
|
|
48
48
|
}),
|
|
49
49
|
}),
|
|
50
|
-
postprocess: (results, {
|
|
50
|
+
postprocess: (results, {env}) => hideSensitive(env)(results.filter(Boolean).join(RELEASE_NOTES_SEPARATOR)),
|
|
51
51
|
},
|
|
52
52
|
prepare: {
|
|
53
53
|
required: false,
|
|
54
54
|
dryRun: false,
|
|
55
|
-
pipelineConfig: ({
|
|
55
|
+
pipelineConfig: ({generateNotes}) => ({
|
|
56
56
|
getNextInput: async (context) => {
|
|
57
|
-
const newGitHead = await getGitHead({
|
|
57
|
+
const newGitHead = await getGitHead({cwd: context.cwd});
|
|
58
58
|
// If previous prepare plugin has created a commit (gitHead changed)
|
|
59
59
|
if (context.nextRelease.gitHead !== newGitHead) {
|
|
60
60
|
context.nextRelease.gitHead = newGitHead;
|
|
@@ -73,7 +73,7 @@ export default {
|
|
|
73
73
|
outputValidator: (output) => !output || isPlainObject(output),
|
|
74
74
|
pipelineConfig: () => ({
|
|
75
75
|
// Add `nextRelease` and plugin properties to published release
|
|
76
|
-
transform: (release, step, {
|
|
76
|
+
transform: (release, step, {nextRelease}) => ({
|
|
77
77
|
...(release === false ? {} : nextRelease),
|
|
78
78
|
...release,
|
|
79
79
|
...step,
|
|
@@ -86,7 +86,7 @@ export default {
|
|
|
86
86
|
outputValidator: (output) => !output || isPlainObject(output),
|
|
87
87
|
pipelineConfig: () => ({
|
|
88
88
|
// Add `nextRelease` and plugin properties to published release
|
|
89
|
-
transform: (release, step, {
|
|
89
|
+
transform: (release, step, {nextRelease}) => ({
|
|
90
90
|
...(release === false ? {} : nextRelease),
|
|
91
91
|
...release,
|
|
92
92
|
...step,
|
|
@@ -96,13 +96,13 @@ export default {
|
|
|
96
96
|
success: {
|
|
97
97
|
required: false,
|
|
98
98
|
dryRun: false,
|
|
99
|
-
pipelineConfig: () => ({
|
|
100
|
-
preprocess: ({
|
|
99
|
+
pipelineConfig: () => ({settleAll: true}),
|
|
100
|
+
preprocess: ({releases, env, ...inputs}) => ({...inputs, env, releases: hideSensitiveValues(env, releases)}),
|
|
101
101
|
},
|
|
102
102
|
fail: {
|
|
103
103
|
required: false,
|
|
104
104
|
dryRun: false,
|
|
105
|
-
pipelineConfig: () => ({
|
|
106
|
-
preprocess: ({
|
|
105
|
+
pipelineConfig: () => ({settleAll: true}),
|
|
106
|
+
preprocess: ({errors, env, ...inputs}) => ({...inputs, env, errors: hideSensitiveValues(env, errors)}),
|
|
107
107
|
},
|
|
108
108
|
};
|
package/lib/plugins/index.js
CHANGED
|
@@ -1,13 +1,13 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import AggregateError from
|
|
3
|
-
import getError from
|
|
4
|
-
import PLUGINS_DEFINITIONS from
|
|
5
|
-
import {
|
|
6
|
-
import pipeline from
|
|
7
|
-
import normalize from
|
|
1
|
+
import {castArray, identity, isNil, isPlainObject, isString, omit} from 'lodash-es';
|
|
2
|
+
import AggregateError from 'aggregate-error';
|
|
3
|
+
import getError from '../get-error.js';
|
|
4
|
+
import PLUGINS_DEFINITIONS from '../definitions/plugins.js';
|
|
5
|
+
import {loadPlugin, parseConfig, validatePlugin, validateStep} from './utils.js';
|
|
6
|
+
import pipeline from './pipeline.js';
|
|
7
|
+
import normalize from './normalize.js';
|
|
8
8
|
|
|
9
9
|
export default async (context, pluginsPath) => {
|
|
10
|
-
let {
|
|
10
|
+
let {options, logger} = context;
|
|
11
11
|
const errors = [];
|
|
12
12
|
|
|
13
13
|
const plugins = options.plugins
|
|
@@ -20,8 +20,8 @@ export default async (context, pluginsPath) => {
|
|
|
20
20
|
if (isPlainObject(plugin)) {
|
|
21
21
|
Object.entries(plugin).forEach(([type, func]) => {
|
|
22
22
|
if (PLUGINS_DEFINITIONS[type]) {
|
|
23
|
-
Reflect.defineProperty(func,
|
|
24
|
-
value: isPlainObject(name) ?
|
|
23
|
+
Reflect.defineProperty(func, 'pluginName', {
|
|
24
|
+
value: isPlainObject(name) ? 'Inline plugin' : name,
|
|
25
25
|
writable: false,
|
|
26
26
|
enumerable: true,
|
|
27
27
|
});
|
|
@@ -29,10 +29,10 @@ export default async (context, pluginsPath) => {
|
|
|
29
29
|
}
|
|
30
30
|
});
|
|
31
31
|
} else {
|
|
32
|
-
errors.push(getError(
|
|
32
|
+
errors.push(getError('EPLUGINSCONF', {plugin}));
|
|
33
33
|
}
|
|
34
34
|
} else {
|
|
35
|
-
errors.push(getError(
|
|
35
|
+
errors.push(getError('EPLUGINSCONF', {plugin}));
|
|
36
36
|
}
|
|
37
37
|
|
|
38
38
|
return pluginsList;
|
|
@@ -43,12 +43,12 @@ export default async (context, pluginsPath) => {
|
|
|
43
43
|
throw new AggregateError(errors);
|
|
44
44
|
}
|
|
45
45
|
|
|
46
|
-
options = {
|
|
46
|
+
options = {...plugins, ...options};
|
|
47
47
|
|
|
48
48
|
const pluginsConfig = await Object.entries(PLUGINS_DEFINITIONS).reduce(
|
|
49
49
|
async (
|
|
50
50
|
eventualPluginsConfigAccumulator,
|
|
51
|
-
[type, {
|
|
51
|
+
[type, {required, default: def, pipelineConfig, postprocess = identity, preprocess = identity}]
|
|
52
52
|
) => {
|
|
53
53
|
let pluginOptions;
|
|
54
54
|
const pluginsConfigAccumulator = await eventualPluginsConfigAccumulator;
|
|
@@ -63,8 +63,8 @@ export default async (context, pluginsPath) => {
|
|
|
63
63
|
);
|
|
64
64
|
}
|
|
65
65
|
|
|
66
|
-
if (!validateStep({
|
|
67
|
-
errors.push(getError(
|
|
66
|
+
if (!validateStep({required}, options[type])) {
|
|
67
|
+
errors.push(getError('EPLUGINCONF', {type, required, pluginConf: options[type]}));
|
|
68
68
|
return pluginsConfigAccumulator;
|
|
69
69
|
}
|
|
70
70
|
|
|
@@ -74,7 +74,7 @@ export default async (context, pluginsPath) => {
|
|
|
74
74
|
const steps = await Promise.all(
|
|
75
75
|
castArray(pluginOptions).map(async (pluginOpt) =>
|
|
76
76
|
normalize(
|
|
77
|
-
{
|
|
77
|
+
{...context, options: omit(options, Object.keys(PLUGINS_DEFINITIONS), 'plugins')},
|
|
78
78
|
type,
|
|
79
79
|
pluginOpt,
|
|
80
80
|
pluginsPath
|
|
@@ -100,4 +100,4 @@ export default async (context, pluginsPath) => {
|
|
|
100
100
|
}
|
|
101
101
|
|
|
102
102
|
return pluginsConfig;
|
|
103
|
-
}
|
|
103
|
+
}
|
package/lib/plugins/normalize.js
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import debugPlugins from
|
|
3
|
-
import getError from
|
|
4
|
-
import {
|
|
5
|
-
import PLUGINS_DEFINITIONS from
|
|
6
|
-
import {
|
|
1
|
+
import {cloneDeep, isFunction, isPlainObject, noop, omit} from 'lodash-es';
|
|
2
|
+
import debugPlugins from 'debug';
|
|
3
|
+
import getError from '../get-error.js';
|
|
4
|
+
import {extractErrors} from '../utils.js';
|
|
5
|
+
import PLUGINS_DEFINITIONS from '../definitions/plugins.js';
|
|
6
|
+
import {loadPlugin, parseConfig} from './utils.js';
|
|
7
7
|
|
|
8
|
-
const debug = debugPlugins(
|
|
8
|
+
const debug = debugPlugins('semantic-release:plugins');
|
|
9
9
|
|
|
10
10
|
export default async (context, type, pluginOpt, pluginsPath) => {
|
|
11
|
-
const {
|
|
11
|
+
const {stdout, stderr, options, logger} = context;
|
|
12
12
|
if (!pluginOpt) {
|
|
13
13
|
return noop;
|
|
14
14
|
}
|
|
@@ -21,26 +21,26 @@ export default async (context, type, pluginOpt, pluginsPath) => {
|
|
|
21
21
|
|
|
22
22
|
let func;
|
|
23
23
|
if (isFunction(plugin)) {
|
|
24
|
-
func = plugin.bind(null, cloneDeep({
|
|
24
|
+
func = plugin.bind(null, cloneDeep({...options, ...config}));
|
|
25
25
|
} else if (isPlainObject(plugin) && plugin[type] && isFunction(plugin[type])) {
|
|
26
|
-
func = plugin[type].bind(null, cloneDeep({
|
|
26
|
+
func = plugin[type].bind(null, cloneDeep({...options, ...config}));
|
|
27
27
|
} else {
|
|
28
|
-
throw getError(
|
|
28
|
+
throw getError('EPLUGIN', {type, pluginName});
|
|
29
29
|
}
|
|
30
30
|
|
|
31
31
|
const validator = async (input) => {
|
|
32
|
-
const {
|
|
32
|
+
const {dryRun, outputValidator} = PLUGINS_DEFINITIONS[type] || {};
|
|
33
33
|
try {
|
|
34
34
|
if (!input.options.dryRun || dryRun) {
|
|
35
35
|
logger.log(`Start step "${type}" of plugin "${pluginName}"`);
|
|
36
36
|
const result = await func({
|
|
37
|
-
...cloneDeep(omit(input, [
|
|
37
|
+
...cloneDeep(omit(input, ['stdout', 'stderr', 'logger'])),
|
|
38
38
|
stdout,
|
|
39
39
|
stderr,
|
|
40
40
|
logger: logger.scope(logger.scopeName, pluginName),
|
|
41
41
|
});
|
|
42
42
|
if (outputValidator && !outputValidator(result)) {
|
|
43
|
-
throw getError(`E${type.toUpperCase()}OUTPUT`, {
|
|
43
|
+
throw getError(`E${type.toUpperCase()}OUTPUT`, {result, pluginName});
|
|
44
44
|
}
|
|
45
45
|
|
|
46
46
|
logger.success(`Completed step "${type}" of plugin "${pluginName}"`);
|
|
@@ -50,12 +50,12 @@ export default async (context, type, pluginOpt, pluginsPath) => {
|
|
|
50
50
|
logger.warn(`Skip step "${type}" of plugin "${pluginName}" in dry-run mode`);
|
|
51
51
|
} catch (error) {
|
|
52
52
|
logger.error(`Failed step "${type}" of plugin "${pluginName}"`);
|
|
53
|
-
extractErrors(error).forEach((err) => Object.assign(err, {
|
|
53
|
+
extractErrors(error).forEach((err) => Object.assign(err, {pluginName}));
|
|
54
54
|
throw error;
|
|
55
55
|
}
|
|
56
56
|
};
|
|
57
57
|
|
|
58
|
-
Reflect.defineProperty(validator,
|
|
58
|
+
Reflect.defineProperty(validator, 'pluginName', {value: pluginName, writable: false, enumerable: true});
|
|
59
59
|
|
|
60
60
|
if (!isFunction(pluginOpt)) {
|
|
61
61
|
if (pluginsPath[name]) {
|
|
@@ -66,4 +66,4 @@ export default async (context, type, pluginOpt, pluginsPath) => {
|
|
|
66
66
|
}
|
|
67
67
|
|
|
68
68
|
return validator;
|
|
69
|
-
}
|
|
69
|
+
}
|
package/lib/plugins/pipeline.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import pReduce from
|
|
3
|
-
import AggregateError from
|
|
4
|
-
import {
|
|
1
|
+
import {identity} from 'lodash-es';
|
|
2
|
+
import pReduce from 'p-reduce';
|
|
3
|
+
import AggregateError from 'aggregate-error';
|
|
4
|
+
import {extractErrors} from '../utils.js';
|
|
5
5
|
|
|
6
6
|
/**
|
|
7
7
|
* A Function that execute a list of function sequencially. If at least one Function ins the pipeline throws an Error or rejects, the pipeline function rejects as well.
|
|
@@ -25,35 +25,34 @@ import { extractErrors } from "../utils.js";
|
|
|
25
25
|
*
|
|
26
26
|
* @return {Pipeline} A Function that execute the `steps` sequentially
|
|
27
27
|
*/
|
|
28
|
-
export default (steps, {
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
throw error;
|
|
46
|
-
}
|
|
28
|
+
export default (steps, {settleAll = false, getNextInput = identity, transform = identity} = {}) => async (input) => {
|
|
29
|
+
const results = [];
|
|
30
|
+
const errors = [];
|
|
31
|
+
await pReduce(
|
|
32
|
+
steps,
|
|
33
|
+
async (lastInput, step) => {
|
|
34
|
+
let result;
|
|
35
|
+
try {
|
|
36
|
+
// Call the step with the input computed at the end of the previous iteration and save intermediary result
|
|
37
|
+
result = await transform(await step(lastInput), step, lastInput);
|
|
38
|
+
results.push(result);
|
|
39
|
+
} catch (error) {
|
|
40
|
+
if (settleAll) {
|
|
41
|
+
errors.push(...extractErrors(error));
|
|
42
|
+
result = error;
|
|
43
|
+
} else {
|
|
44
|
+
throw error;
|
|
47
45
|
}
|
|
46
|
+
}
|
|
48
47
|
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
48
|
+
// Prepare input for the next step, passing the input of the last iteration (or initial parameter for the first iteration) and the result of the current one
|
|
49
|
+
return getNextInput(lastInput, result);
|
|
50
|
+
},
|
|
51
|
+
input
|
|
52
|
+
);
|
|
53
|
+
if (errors.length > 0) {
|
|
54
|
+
throw new AggregateError(errors);
|
|
55
|
+
}
|
|
57
56
|
|
|
58
|
-
|
|
59
|
-
|
|
57
|
+
return results;
|
|
58
|
+
}
|
package/lib/plugins/utils.js
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import {
|
|
3
|
-
import {
|
|
4
|
-
import resolveFrom from
|
|
1
|
+
import {dirname} from 'node:path';
|
|
2
|
+
import {fileURLToPath} from 'node:url';
|
|
3
|
+
import {castArray, isArray, isFunction, isNil, isPlainObject, isString} from 'lodash-es';
|
|
4
|
+
import resolveFrom from 'resolve-from';
|
|
5
5
|
|
|
6
6
|
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
7
7
|
|
|
@@ -38,7 +38,7 @@ export function validatePlugin(conf) {
|
|
|
38
38
|
);
|
|
39
39
|
}
|
|
40
40
|
|
|
41
|
-
export function validateStep({
|
|
41
|
+
export function validateStep({required}, conf) {
|
|
42
42
|
conf = castArray(conf).filter(Boolean);
|
|
43
43
|
if (required) {
|
|
44
44
|
return conf.length >= 1 && validateSteps(conf);
|
|
@@ -47,25 +47,14 @@ export function validateStep({ required }, conf) {
|
|
|
47
47
|
return conf.length === 0 || validateSteps(conf);
|
|
48
48
|
}
|
|
49
49
|
|
|
50
|
-
export async function loadPlugin({
|
|
50
|
+
export async function loadPlugin({cwd}, name, pluginsPath) {
|
|
51
51
|
const basePath = pluginsPath[name]
|
|
52
52
|
? dirname(resolveFrom.silent(__dirname, pluginsPath[name]) || resolveFrom(cwd, pluginsPath[name]))
|
|
53
53
|
: __dirname;
|
|
54
54
|
|
|
55
55
|
// See https://github.com/mysticatea/eslint-plugin-node/issues/250
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
}
|
|
59
|
-
|
|
60
|
-
const { default: cjsExport, ...esmNamedExports } = await import(
|
|
61
|
-
resolveFrom.silent(basePath, name) || resolveFrom(cwd, name)
|
|
62
|
-
);
|
|
63
|
-
|
|
64
|
-
if (cjsExport) {
|
|
65
|
-
return cjsExport;
|
|
66
|
-
}
|
|
67
|
-
|
|
68
|
-
return esmNamedExports;
|
|
56
|
+
// eslint-disable-next-line node/no-unsupported-features/es-syntax
|
|
57
|
+
return isFunction(name) ? name : (await import(resolveFrom.silent(basePath, name) || resolveFrom(cwd, name))).default;
|
|
69
58
|
}
|
|
70
59
|
|
|
71
60
|
export function parseConfig(plugin) {
|
|
@@ -74,7 +63,7 @@ export function parseConfig(plugin) {
|
|
|
74
63
|
if (isArray(plugin)) {
|
|
75
64
|
[path, config] = plugin;
|
|
76
65
|
} else if (isPlainObject(plugin) && !isNil(plugin.path)) {
|
|
77
|
-
({
|
|
66
|
+
({path, ...config} = plugin);
|
|
78
67
|
} else {
|
|
79
68
|
path = plugin;
|
|
80
69
|
}
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "semantic-release",
|
|
3
3
|
"description": "Automated semver compliant package publishing",
|
|
4
|
-
"version": "20.0.3
|
|
4
|
+
"version": "20.0.3",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"author": "Stephan Bönnemann <stephan@boennemann.me> (http://boennemann.me)",
|
|
7
7
|
"ava": {
|
|
@@ -29,7 +29,7 @@
|
|
|
29
29
|
"@semantic-release/commit-analyzer": "^9.0.2",
|
|
30
30
|
"@semantic-release/error": "^3.0.0",
|
|
31
31
|
"@semantic-release/github": "^8.0.0",
|
|
32
|
-
"@semantic-release/npm": "^
|
|
32
|
+
"@semantic-release/npm": "^9.0.0",
|
|
33
33
|
"@semantic-release/release-notes-generator": "^10.0.0",
|
|
34
34
|
"aggregate-error": "^4.0.1",
|
|
35
35
|
"cosmiconfig": "^8.0.0",
|
|
@@ -125,8 +125,8 @@
|
|
|
125
125
|
},
|
|
126
126
|
"scripts": {
|
|
127
127
|
"codecov": "codecov -f coverage/coverage-final.json",
|
|
128
|
-
"lint": "prettier --check \"*.{js,json,md}\" \".github/**/*.{md,yml}\" \"docs/**/*.md\" \"{bin,lib,test}
|
|
129
|
-
"lint:fix": "prettier --write \"*.{js,json,md}\" \".github/**/*.{md,yml}\" \"docs/**/*.md\" \"{bin,lib,test}
|
|
128
|
+
"lint": "prettier --check \"*.{js,json,md}\" \".github/**/*.{md,yml}\" \"docs/**/*.md\" \"{bin,lib,test}/*.js\"",
|
|
129
|
+
"lint:fix": "prettier --write \"*.{js,json,md}\" \".github/**/*.{md,yml}\" \"docs/**/*.md\" \"{bin,lib,test}/*.js\"",
|
|
130
130
|
"pretest": "npm run lint",
|
|
131
131
|
"semantic-release": "./bin/semantic-release.js",
|
|
132
132
|
"test": "c8 ava --verbose",
|