semantic-release 17.0.4 → 17.0.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -29,7 +29,7 @@ execa('git', ['--version'])
29
29
  process.exit(1);
30
30
  }
31
31
  })
32
- .catch(error => {
32
+ .catch((error) => {
33
33
  console.error(`[semantic-release]: Git version ${MIN_GIT_VERSION} is required. No git binary found.`);
34
34
  console.error(error);
35
35
  process.exit(1);
@@ -37,7 +37,7 @@ execa('git', ['--version'])
37
37
 
38
38
  // Node 10+ from this point on
39
39
  require('../cli')()
40
- .then(exitCode => {
40
+ .then((exitCode) => {
41
41
  process.exitCode = exitCode;
42
42
  })
43
43
  .catch(() => {
package/cli.js CHANGED
@@ -5,15 +5,15 @@ const hideSensitive = require('./lib/hide-sensitive');
5
5
  const stringList = {
6
6
  type: 'string',
7
7
  array: true,
8
- coerce: values =>
8
+ coerce: (values) =>
9
9
  values.length === 1 && values[0].trim() === 'false'
10
10
  ? []
11
- : values.reduce((values, value) => values.concat(value.split(',').map(value => value.trim())), []),
11
+ : values.reduce((values, value) => values.concat(value.split(',').map((value) => value.trim())), []),
12
12
  };
13
13
 
14
14
  module.exports = async () => {
15
15
  const cli = require('yargs')
16
- .command('$0', 'Run automated package publishing', yargs => {
16
+ .command('$0', 'Run automated package publishing', (yargs) => {
17
17
  yargs.demandCommand(0, 0).usage(`Run automated package publishing
18
18
 
19
19
  Usage:
@@ -41,18 +41,18 @@ Usage:
41
41
  .exitProcess(false);
42
42
 
43
43
  try {
44
- const {help, version, ...opts} = cli.parse(argv.slice(2));
44
+ const {help, version, ...options} = cli.parse(argv.slice(2));
45
45
 
46
46
  if (Boolean(help) || Boolean(version)) {
47
47
  return 0;
48
48
  }
49
49
 
50
- if (opts.debug) {
50
+ if (options.debug) {
51
51
  // Debug must be enabled before other requires in order to work
52
52
  require('debug').enable('semantic-release:*');
53
53
  }
54
54
 
55
- await require('.')(opts);
55
+ await require('.')(options);
56
56
  return 0;
57
57
  } catch (error) {
58
58
  if (error.name !== 'YError') {
@@ -99,3 +99,5 @@
99
99
  - `verifyConditions`: Verify the presence and the validity of the authentication and the assets option configuration.
100
100
  - `publish`: Publish a Gitea release, optionally uploading file assets.
101
101
  - `addChannel`: Update a Gitea release's pre-release field.
102
+ - [@google/semantic-release-replace-plugin](https://github.com/google/semantic-release-replace-plugin)
103
+ - `prepare`: Replace version strings in files using regex and glob.
@@ -66,3 +66,8 @@ To trigger a release, call (with a [Personal Access Tokens](https://help.github.
66
66
  ```
67
67
  $ curl -v -H "Accept: application/vnd.github.everest-preview+json" -H "Authorization: token ${GITHUB_TOKEN}" https://api.github.com/repos/[org-name-or-username]/[repository]/dispatches -d '{ "event_type": "semantic-release" }'
68
68
  ```
69
+
70
+ If you'd like to use a GitHub app to manage this instead of creating a personal access token, you could consider using a project like:
71
+
72
+ * [Actions Panel](https://www.actionspanel.app/) - A declaratively configured way for triggering GitHub Actions
73
+ * [Action Button](https://github-action-button.web.app/#details) - A simple badge based mechanism for triggering GitHub Actions
@@ -14,7 +14,7 @@ GitLab CI supports [Pipelines](https://docs.gitlab.com/ee/ci/pipelines.html) all
14
14
 
15
15
  ### `.gitlab-ci.yml` configuration for Node projects
16
16
 
17
- This example is a minimal configuration for **semantic-release** with a build running Node 6 and 8. See [GitLab CI - Configuration of your jobs with .gitlab-ci.yml](https://docs.gitlab.com/ee/ci/yaml/README.html) for additional configuration options.
17
+ This example is a minimal configuration for **semantic-release** with a build running Node 10 and 12. See [GitLab CI - Configuration of your jobs with .gitlab-ci.yml](https://docs.gitlab.com/ee/ci/yaml/README.html) for additional configuration options.
18
18
 
19
19
  **Note**: The`semantic-release` execution command varies depending if you are using a [local](../usage/installation.md#local-installation) or [global](../usage/installation.md#global-installation) **semantic-release** installation.
20
20
 
@@ -27,25 +27,60 @@ stages:
27
27
  before_script:
28
28
  - npm install
29
29
 
30
- node:6:
31
- image: node:6
30
+ node:10:
31
+ image: node:10
32
32
  stage: test
33
33
  script:
34
34
  - npm test
35
35
 
36
- node:8:
37
- image: node:8
36
+ node:12:
37
+ image: node:12
38
38
  stage: test
39
39
  script:
40
40
  - npm test
41
41
 
42
42
  publish:
43
- image: node:8
43
+ image: node:12
44
44
  stage: release
45
45
  script:
46
46
  - npx semantic-release
47
47
  ```
48
48
 
49
+ ### `.gitlab-ci.yml` configuration for all projects
50
+
51
+ This example is a minimal configuration for **semantic-release** with a build running Node 10 and 12. See [GitLab CI - Configuration of your jobs with .gitlab-ci.yml](https://docs.gitlab.com/ee/ci/yaml/README.html) for additional configuration options.
52
+
53
+ **Note**: The`semantic-release` execution command varies depending if you are using a [local](../usage/installation.md#local-installation) or [global](../usage/installation.md#global-installation) **semantic-release** installation.
54
+
55
+
56
+ ```yaml
57
+ # The release pipeline will run only on the master branch a commit is triggered
58
+ stages:
59
+ - release
60
+
61
+ release:
62
+ image: node:10-buster-slim
63
+ stage: release
64
+ before_script:
65
+ - apt-get update && apt-get install -y --no-install-recommends git-core ca-certificates
66
+ - npm install -g semantic-release @semantic-release/gitlab
67
+ script:
68
+ - semantic-release
69
+ only:
70
+ - master
71
+
72
+ release:
73
+ image: node:12-buster-slim
74
+ stage: release
75
+ before_script:
76
+ - apt-get update && apt-get install -y --no-install-recommends git-core ca-certificates
77
+ - npm install -g semantic-release @semantic-release/gitlab
78
+ script:
79
+ - semantic-release
80
+ only:
81
+ - master
82
+ ```
83
+
49
84
  ### `package.json` configuration
50
85
 
51
86
  A `package.json` is required only for [local](../usage/installation.md#local-installation) **semantic-release** installation.
@@ -1,6 +1,7 @@
1
1
  # Getting started
2
2
 
3
3
  In order to use **semantic-release** you must follow these steps:
4
+
4
5
  1. [Install](./installation.md#installation) **semantic-release** in your project
5
6
  2. Configure your Continuous Integration service to [run **semantic-release**](./ci-configuration.md#run-semantic-release-only-after-all-tests-succeeded)
6
7
  3. Configure your Git repository and package manager repository [authentication](ci-configuration.md#authentication) in your Continuous Integration service
@@ -9,10 +10,8 @@ In order to use **semantic-release** you must follow these steps:
9
10
  Alternatively those steps can be easily done with the [**semantic-release** interactive CLI](https://github.com/semantic-release/cli):
10
11
 
11
12
  ```bash
12
- npm install -g semantic-release-cli
13
-
14
13
  cd your-module
15
- semantic-release-cli setup
14
+ npx semantic-release-cli setup
16
15
  ```
17
16
 
18
17
  ![dialogue](../../media/semantic-release-cli.png)
@@ -140,7 +140,7 @@ A maintenance branch is characterized by a range which defines the versions that
140
140
 
141
141
  Maintenance branches are always considered lower than [release branches](#release-branches) and similarly to them, when a commit that would create a version conflict is pushed, **semantic-release** will not perform the release and will throw an `EINVALIDNEXTVERSION` error, listing the problematic commits and the valid branches on which to move them.
142
142
 
143
- **semantic-release** will automatically add releases to the corresponding distribution channel when code is [merged from a release or maintenance branch to another maintenance branch](#merging-into-a-maintenance-branch), however only version version within the branch `range` can be merged. Ia merged version is outside the maintenance branch `range` **semantic-release** will not add to the corresponding channel and will throw an `EINVALIDMAINTENANCEMERGE` error.
143
+ **semantic-release** will automatically add releases to the corresponding distribution channel when code is [merged from a release or maintenance branch to another maintenance branch](#merging-into-a-maintenance-branch), however only versions within the branch `range` can be merged. If a merged version is outside the maintenance branch `range`, **semantic-release** will not add to the corresponding channel and will throw an `EINVALIDMAINTENANCEMERGE` error.
144
144
 
145
145
  See [publishing maintenance releases recipe](../recipes/maintenance-releases.md) for a detailed example.
146
146
 
package/index.js CHANGED
@@ -214,7 +214,7 @@ async function run(context, plugins) {
214
214
  }
215
215
 
216
216
  function logErrors({logger, stderr}, err) {
217
- const errors = extractErrors(err).sort(error => (error.semanticRelease ? -1 : 0));
217
+ const errors = extractErrors(err).sort((error) => (error.semanticRelease ? -1 : 0));
218
218
  for (const error of errors) {
219
219
  if (error.semanticRelease) {
220
220
  logger.error(`${error.code} ${error.message}`);
@@ -228,7 +228,7 @@ function logErrors({logger, stderr}, err) {
228
228
  }
229
229
 
230
230
  async function callFail(context, plugins, err) {
231
- const errors = extractErrors(err).filter(err => err.semanticRelease);
231
+ const errors = extractErrors(err).filter((err) => err.semanticRelease);
232
232
  if (errors.length > 0) {
233
233
  try {
234
234
  await plugins.fail({...context, errors});
@@ -238,7 +238,7 @@ async function callFail(context, plugins, err) {
238
238
  }
239
239
  }
240
240
 
241
- module.exports = async (opts = {}, {cwd = process.cwd(), env = process.env, stdout, stderr} = {}) => {
241
+ module.exports = async (cliOptions = {}, {cwd = process.cwd(), env = process.env, stdout, stderr} = {}) => {
242
242
  const {unhook} = hookStd(
243
243
  {silent: false, streams: [process.stdout, process.stderr, stdout, stderr].filter(Boolean)},
244
244
  hideSensitive(env)
@@ -253,7 +253,7 @@ module.exports = async (opts = {}, {cwd = process.cwd(), env = process.env, stdo
253
253
  context.logger = getLogger(context);
254
254
  context.logger.log(`Running ${pkg.name} version ${pkg.version}`);
255
255
  try {
256
- const {plugins, options} = await getConfig(context, opts);
256
+ const {plugins, options} = await getConfig(context, cliOptions);
257
257
  context.options = options;
258
258
  try {
259
259
  const result = await run(context, plugins);
@@ -8,9 +8,9 @@ module.exports = async (repositoryUrl, {cwd}, branches) => {
8
8
  return branches.reduce(
9
9
  (branches, branch) => [
10
10
  ...branches,
11
- ...remove(gitBranches, name => micromatch(gitBranches, branch.name).includes(name)).map(name => ({
11
+ ...remove(gitBranches, (name) => micromatch(gitBranches, branch.name).includes(name)).map((name) => ({
12
12
  name,
13
- ...mapValues(omit(branch, 'name'), value => (isString(value) ? template(value)({name}) : value)),
13
+ ...mapValues(omit(branch, 'name'), (value) => (isString(value) ? template(value)({name}) : value)),
14
14
  })),
15
15
  ],
16
16
  []
@@ -14,7 +14,7 @@ module.exports = async (repositoryUrl, ciBranch, context) => {
14
14
  const remoteBranches = await expand(
15
15
  repositoryUrl,
16
16
  context,
17
- context.options.branches.map(branch => (isString(branch) || isRegExp(branch) ? {name: branch} : branch))
17
+ context.options.branches.map((branch) => (isString(branch) || isRegExp(branch) ? {name: branch} : branch))
18
18
  );
19
19
 
20
20
  await pEachSeries(remoteBranches, async ({name}) => {
@@ -32,7 +32,7 @@ module.exports = async (repositoryUrl, ciBranch, context) => {
32
32
  );
33
33
 
34
34
  const result = Object.entries(DEFINITIONS).reduce((result, [type, {branchesValidator, branchValidator}]) => {
35
- branchesByType[type].forEach(branch => {
35
+ branchesByType[type].forEach((branch) => {
36
36
  if (branchValidator && !branchValidator(branch)) {
37
37
  errors.push(getError(`E${type.toUpperCase()}BRANCH`, {branch}));
38
38
  }
@@ -48,15 +48,15 @@ module.exports = async (repositoryUrl, ciBranch, context) => {
48
48
  }, {});
49
49
 
50
50
  const duplicates = [...branches]
51
- .map(branch => branch.name)
51
+ .map((branch) => branch.name)
52
52
  .sort()
53
- .filter((_, idx, arr) => arr[idx] === arr[idx + 1] && arr[idx] !== arr[idx - 1]);
53
+ .filter((_, idx, array) => array[idx] === array[idx + 1] && array[idx] !== array[idx - 1]);
54
54
 
55
55
  if (duplicates.length > 0) {
56
56
  errors.push(getError('EDUPLICATEBRANCHES', {duplicates}));
57
57
  }
58
58
 
59
- await pEachSeries(branches, async branch => {
59
+ await pEachSeries(branches, async (branch) => {
60
60
  if (!(await verifyBranchName(branch.name))) {
61
61
  errors.push(getError('EINVALIDBRANCHNAME', branch));
62
62
  }
@@ -5,19 +5,19 @@ const {isMaintenanceRange} = require('../utils');
5
5
  const maintenance = {
6
6
  filter: ({name, range}) => (!isNil(range) && range !== false) || isMaintenanceRange(name),
7
7
  branchValidator: ({range}) => (isNil(range) ? true : isMaintenanceRange(range)),
8
- branchesValidator: branches => uniqBy(branches, ({range}) => semver.validRange(range)).length === branches.length,
8
+ branchesValidator: (branches) => uniqBy(branches, ({range}) => semver.validRange(range)).length === branches.length,
9
9
  };
10
10
 
11
11
  const prerelease = {
12
12
  filter: ({prerelease}) => !isNil(prerelease) && prerelease !== false,
13
13
  branchValidator: ({name, prerelease}) =>
14
14
  Boolean(prerelease) && Boolean(semver.valid(`1.0.0-${prerelease === true ? name : prerelease}.1`)),
15
- branchesValidator: branches => uniqBy(branches, 'prerelease').length === branches.length,
15
+ branchesValidator: (branches) => uniqBy(branches, 'prerelease').length === branches.length,
16
16
  };
17
17
 
18
18
  const release = {
19
- filter: branch => !maintenance.filter(branch) && !prerelease.filter(branch),
20
- branchesValidator: branches => branches.length <= 3 && branches.length > 0,
19
+ filter: (branch) => !maintenance.filter(branch) && !prerelease.filter(branch),
20
+ branchesValidator: (branches) => branches.length <= 3 && branches.length > 0,
21
21
  };
22
22
 
23
23
  module.exports = {maintenance, prerelease, release};
@@ -4,9 +4,10 @@ const pkg = require('../../package.json');
4
4
  const {RELEASE_TYPE} = require('./constants');
5
5
 
6
6
  const [homepage] = pkg.homepage.split('#');
7
- const stringify = obj => (isString(obj) ? obj : inspect(obj, {breakLength: Infinity, depth: 2, maxArrayLength: 5}));
8
- const linkify = file => `${homepage}/blob/master/${file}`;
9
- const wordsList = words =>
7
+ const stringify = (object) =>
8
+ isString(object) ? object : inspect(object, {breakLength: Infinity, depth: 2, maxArrayLength: 5});
9
+ const linkify = (file) => `${homepage}/blob/master/${file}`;
10
+ const wordsList = (words) =>
10
11
  `${words.slice(0, -1).join(', ')}${words.length > 1 ? ` or ${words[words.length - 1]}` : trim(words[0])}`;
11
12
 
12
13
  module.exports = {
@@ -84,7 +85,7 @@ Please refer to the \`${pluginName}\` and [semantic-release plugins configuratio
84
85
  EANALYZECOMMITSOUTPUT: ({result, pluginName}) => ({
85
86
  message: 'The `analyzeCommits` plugin returned an invalid value. It must return a valid semver release type.',
86
87
  details: `The \`analyzeCommits\` plugin must return a valid [semver](https://semver.org) release type. The valid values are: ${RELEASE_TYPE.map(
87
- type => `\`${type}\``
88
+ (type) => `\`${type}\``
88
89
  ).join(', ')}.
89
90
 
90
91
  The \`analyzeCommits\` function of the \`${pluginName}\` returned \`${stringify(result)}\` instead.
@@ -16,12 +16,12 @@ module.exports = {
16
16
  default: ['@semantic-release/commit-analyzer'],
17
17
  required: true,
18
18
  dryRun: true,
19
- outputValidator: output => !output || RELEASE_TYPE.includes(output),
19
+ outputValidator: (output) => !output || RELEASE_TYPE.includes(output),
20
20
  preprocess: ({commits, ...inputs}) => ({
21
21
  ...inputs,
22
- commits: commits.filter(commit => !/\[skip\s+release\]|\[release\s+skip\]/i.test(commit.message)),
22
+ commits: commits.filter((commit) => !/\[skip\s+release]|\[release\s+skip]/i.test(commit.message)),
23
23
  }),
24
- postprocess: results =>
24
+ postprocess: (results) =>
25
25
  RELEASE_TYPE[
26
26
  results.reduce((highest, result) => {
27
27
  const typeIndex = RELEASE_TYPE.indexOf(result);
@@ -37,7 +37,7 @@ module.exports = {
37
37
  generateNotes: {
38
38
  required: false,
39
39
  dryRun: true,
40
- outputValidator: output => !output || isString(output),
40
+ outputValidator: (output) => !output || isString(output),
41
41
  pipelineConfig: () => ({
42
42
  getNextInput: ({nextRelease, ...context}, notes) => ({
43
43
  ...context,
@@ -53,7 +53,7 @@ module.exports = {
53
53
  required: false,
54
54
  dryRun: false,
55
55
  pipelineConfig: ({generateNotes}) => ({
56
- getNextInput: async context => {
56
+ getNextInput: async (context) => {
57
57
  const newGitHead = await getGitHead({cwd: context.cwd});
58
58
  // If previous prepare plugin has created a commit (gitHead changed)
59
59
  if (context.nextRelease.gitHead !== newGitHead) {
@@ -70,7 +70,7 @@ module.exports = {
70
70
  publish: {
71
71
  required: false,
72
72
  dryRun: false,
73
- outputValidator: output => !output || isPlainObject(output),
73
+ outputValidator: (output) => !output || isPlainObject(output),
74
74
  pipelineConfig: () => ({
75
75
  // Add `nextRelease` and plugin properties to published release
76
76
  transform: (release, step, {nextRelease}) => ({
@@ -83,7 +83,7 @@ module.exports = {
83
83
  addChannel: {
84
84
  required: false,
85
85
  dryRun: false,
86
- outputValidator: output => !output || isPlainObject(output),
86
+ outputValidator: (output) => !output || isPlainObject(output),
87
87
  pipelineConfig: () => ({
88
88
  // Add `nextRelease` and plugin properties to published release
89
89
  transform: (release, step, {nextRelease}) => ({
package/lib/get-config.js CHANGED
@@ -19,14 +19,14 @@ const CONFIG_FILES = [
19
19
  `${CONFIG_NAME}.config.js`,
20
20
  ];
21
21
 
22
- module.exports = async (context, opts) => {
22
+ module.exports = async (context, cliOptions) => {
23
23
  const {cwd, env} = context;
24
24
  const {config, filepath} = (await cosmiconfig(CONFIG_NAME, {searchPlaces: CONFIG_FILES}).search(cwd)) || {};
25
25
 
26
26
  debug('load config from: %s', filepath);
27
27
 
28
28
  // Merge config file options and CLI/API options
29
- let options = {...config, ...opts};
29
+ let options = {...config, ...cliOptions};
30
30
  if (options.ci === false) {
31
31
  options.noCi = true;
32
32
  }
@@ -38,14 +38,14 @@ module.exports = async (context, opts) => {
38
38
  // If `extends` is defined, load and merge each shareable config with `options`
39
39
  options = {
40
40
  ...castArray(extendPaths).reduce((result, extendPath) => {
41
- const extendsOpts = require(resolveFrom.silent(__dirname, extendPath) || resolveFrom(cwd, extendPath));
41
+ const extendsOptions = require(resolveFrom.silent(__dirname, extendPath) || resolveFrom(cwd, extendPath));
42
42
 
43
43
  // For each plugin defined in a shareable config, save in `pluginsPath` the extendable config path,
44
44
  // so those plugin will be loaded relatively to the config file
45
- Object.entries(extendsOpts)
45
+ Object.entries(extendsOptions)
46
46
  .filter(([, value]) => Boolean(value))
47
47
  .reduce((pluginsPath, [option, value]) => {
48
- castArray(value).forEach(plugin => {
48
+ castArray(value).forEach((plugin) => {
49
49
  if (option === 'plugins' && validatePlugin(plugin)) {
50
50
  pluginsPath[parseConfig(plugin)[0]] = extendPath;
51
51
  } else if (
@@ -58,7 +58,7 @@ module.exports = async (context, opts) => {
58
58
  return pluginsPath;
59
59
  }, pluginsPath);
60
60
 
61
- return {...result, ...extendsOpts};
61
+ return {...result, ...extendsOptions};
62
62
  }, {}),
63
63
  ...options,
64
64
  };
@@ -83,7 +83,7 @@ module.exports = async (context, opts) => {
83
83
  '@semantic-release/github',
84
84
  ],
85
85
  // Remove `null` and `undefined` options so they can be replaced with default ones
86
- ...pickBy(options, option => !isNil(option)),
86
+ ...pickBy(options, (option) => !isNil(option)),
87
87
  ...(options.branches ? {branches: castArray(options.branches)} : {}),
88
88
  };
89
89
 
@@ -92,7 +92,7 @@ module.exports = async (context, opts) => {
92
92
  return {options, plugins: await plugins({...context, options}, pluginsPath)};
93
93
  };
94
94
 
95
- async function pkgRepoUrl(opts) {
96
- const {packageJson} = (await readPkgUp(opts)) || {};
95
+ async function pkgRepoUrl(options) {
96
+ const {packageJson} = (await readPkgUp(options)) || {};
97
97
  return packageJson && (isPlainObject(packageJson.repository) ? packageJson.repository.url : packageJson.repository);
98
98
  }
@@ -42,7 +42,7 @@ module.exports = async ({cwd, env, branch, options: {repositoryUrl}}) => {
42
42
  try {
43
43
  await verifyAuth(repositoryUrl, branch.name, {cwd, env});
44
44
  } catch (_) {
45
- const envVar = Object.keys(GIT_TOKENS).find(envVar => !isNil(env[envVar]));
45
+ const envVar = Object.keys(GIT_TOKENS).find((envVar) => !isNil(env[envVar]));
46
46
  const gitCredentials = `${GIT_TOKENS[envVar] || ''}${env[envVar] || ''}`;
47
47
 
48
48
  if (gitCredentials) {
@@ -29,8 +29,8 @@ const {makeTag, isSameChannel} = require('./utils');
29
29
  module.exports = ({branch, options: {tagFormat}}, {before} = {}) => {
30
30
  const [{version, gitTag, channels} = {}] = branch.tags
31
31
  .filter(
32
- tag =>
33
- ((branch.type === 'prerelease' && tag.channels.some(channel => isSameChannel(branch.channel, channel))) ||
32
+ (tag) =>
33
+ ((branch.type === 'prerelease' && tag.channels.some((channel) => isSameChannel(branch.channel, channel))) ||
34
34
  !semver.prerelease(tag.version)) &&
35
35
  (isUndefined(before) || semver.lt(tag.version, before))
36
36
  )
@@ -10,7 +10,7 @@ module.exports = ({branch, nextRelease: {type, channel}, lastRelease, logger}) =
10
10
  if (branch.type === 'prerelease') {
11
11
  if (
12
12
  semver.prerelease(lastRelease.version) &&
13
- lastRelease.channels.some(lastReleaseChannel => isSameChannel(lastReleaseChannel, channel))
13
+ lastRelease.channels.some((lastReleaseChannel) => isSameChannel(lastReleaseChannel, channel))
14
14
  ) {
15
15
  version = highest(
16
16
  semver.inc(lastRelease.version, 'prerelease'),
@@ -11,7 +11,7 @@ const {makeTag, getLowerBound} = require('./utils');
11
11
  *
12
12
  * @return {Array<Object>} Last release and next release to be added on the channel of the current branch.
13
13
  */
14
- module.exports = context => {
14
+ module.exports = (context) => {
15
15
  const {
16
16
  branch,
17
17
  branches,
package/lib/git.js CHANGED
@@ -14,8 +14,8 @@ Object.assign(gitLogParser.fields, {hash: 'H', message: 'B', gitTags: 'd', commi
14
14
  *
15
15
  * @return {String} The commit sha of the tag in parameter or `null`.
16
16
  */
17
- async function getTagHead(tagName, execaOpts) {
18
- return (await execa('git', ['rev-list', '-1', tagName], execaOpts)).stdout;
17
+ async function getTagHead(tagName, execaOptions) {
18
+ return (await execa('git', ['rev-list', '-1', tagName], execaOptions)).stdout;
19
19
  }
20
20
 
21
21
  /**
@@ -27,10 +27,10 @@ async function getTagHead(tagName, execaOpts) {
27
27
  * @return {Array<String>} List of git tags.
28
28
  * @throws {Error} If the `git` command fails.
29
29
  */
30
- async function getTags(branch, execaOpts) {
31
- return (await execa('git', ['tag', '--merged', branch], execaOpts)).stdout
30
+ async function getTags(branch, execaOptions) {
31
+ return (await execa('git', ['tag', '--merged', branch], execaOptions)).stdout
32
32
  .split('\n')
33
- .map(tag => tag.trim())
33
+ .map((tag) => tag.trim())
34
34
  .filter(Boolean);
35
35
  }
36
36
 
@@ -42,12 +42,12 @@ async function getTags(branch, execaOpts) {
42
42
  * @param {Object} [execaOpts] Options to pass to `execa`.
43
43
  * @return {Promise<Array<Object>>} The list of commits between `from` and `to`.
44
44
  */
45
- async function getCommits(from, to, execaOpts) {
45
+ async function getCommits(from, to, execaOptions) {
46
46
  return (
47
47
  await getStream.array(
48
48
  gitLogParser.parse(
49
49
  {_: `${from ? from + '..' : ''}${to}`},
50
- {cwd: execaOpts.cwd, env: {...process.env, ...execaOpts.env}}
50
+ {cwd: execaOptions.cwd, env: {...process.env, ...execaOptions.env}}
51
51
  )
52
52
  )
53
53
  ).map(({message, gitTags, ...commit}) => ({...commit, message: message.trim(), gitTags: gitTags.trim()}));
@@ -62,11 +62,11 @@ async function getCommits(from, to, execaOpts) {
62
62
  * @return {Array<String>} List of git branches.
63
63
  * @throws {Error} If the `git` command fails.
64
64
  */
65
- async function getBranches(repositoryUrl, execaOpts) {
66
- return (await execa('git', ['ls-remote', '--heads', repositoryUrl], execaOpts)).stdout
65
+ async function getBranches(repositoryUrl, execaOptions) {
66
+ return (await execa('git', ['ls-remote', '--heads', repositoryUrl], execaOptions)).stdout
67
67
  .split('\n')
68
68
  .filter(Boolean)
69
- .map(branch => branch.match(/^.+refs\/heads\/(?<branch>.+)$/)[1]);
69
+ .map((branch) => branch.match(/^.+refs\/heads\/(?<branch>.+)$/)[1]);
70
70
  }
71
71
 
72
72
  /**
@@ -77,9 +77,9 @@ async function getBranches(repositoryUrl, execaOpts) {
77
77
  *
78
78
  * @return {Boolean} `true` if the reference exists, falsy otherwise.
79
79
  */
80
- async function isRefExists(ref, execaOpts) {
80
+ async function isRefExists(ref, execaOptions) {
81
81
  try {
82
- return (await execa('git', ['rev-parse', '--verify', ref], execaOpts)).exitCode === 0;
82
+ return (await execa('git', ['rev-parse', '--verify', ref], execaOptions)).exitCode === 0;
83
83
  } catch (error) {
84
84
  debug(error);
85
85
  }
@@ -99,9 +99,9 @@ async function isRefExists(ref, execaOpts) {
99
99
  * @param {String} branch The repository branch to fetch.
100
100
  * @param {Object} [execaOpts] Options to pass to `execa`.
101
101
  */
102
- async function fetch(repositoryUrl, branch, ciBranch, execaOpts) {
102
+ async function fetch(repositoryUrl, branch, ciBranch, execaOptions) {
103
103
  const isDetachedHead =
104
- (await execa('git', ['rev-parse', '--abbrev-ref', 'HEAD'], {...execaOpts, reject: false})).stdout === 'HEAD';
104
+ (await execa('git', ['rev-parse', '--abbrev-ref', 'HEAD'], {...execaOptions, reject: false})).stdout === 'HEAD';
105
105
 
106
106
  try {
107
107
  await execa(
@@ -114,7 +114,7 @@ async function fetch(repositoryUrl, branch, ciBranch, execaOpts) {
114
114
  ? [repositoryUrl]
115
115
  : ['--update-head-ok', repositoryUrl, `+refs/heads/${branch}:refs/heads/${branch}`]),
116
116
  ],
117
- execaOpts
117
+ execaOptions
118
118
  );
119
119
  } catch (_) {
120
120
  await execa(
@@ -126,7 +126,7 @@ async function fetch(repositoryUrl, branch, ciBranch, execaOpts) {
126
126
  ? [repositoryUrl]
127
127
  : ['--update-head-ok', repositoryUrl, `+refs/heads/${branch}:refs/heads/${branch}`]),
128
128
  ],
129
- execaOpts
129
+ execaOptions
130
130
  );
131
131
  }
132
132
  }
@@ -137,16 +137,16 @@ async function fetch(repositoryUrl, branch, ciBranch, execaOpts) {
137
137
  * @param {String} repositoryUrl The remote repository URL.
138
138
  * @param {Object} [execaOpts] Options to pass to `execa`.
139
139
  */
140
- async function fetchNotes(repositoryUrl, execaOpts) {
140
+ async function fetchNotes(repositoryUrl, execaOptions) {
141
141
  try {
142
142
  await execa(
143
143
  'git',
144
144
  ['fetch', '--unshallow', repositoryUrl, `+refs/notes/${GIT_NOTE_REF}:refs/notes/${GIT_NOTE_REF}`],
145
- execaOpts
145
+ execaOptions
146
146
  );
147
147
  } catch (_) {
148
148
  await execa('git', ['fetch', repositoryUrl, `+refs/notes/${GIT_NOTE_REF}:refs/notes/${GIT_NOTE_REF}`], {
149
- ...execaOpts,
149
+ ...execaOptions,
150
150
  reject: false,
151
151
  });
152
152
  }
@@ -159,8 +159,8 @@ async function fetchNotes(repositoryUrl, execaOpts) {
159
159
  *
160
160
  * @return {String} the sha of the HEAD commit.
161
161
  */
162
- async function getGitHead(execaOpts) {
163
- return (await execa('git', ['rev-parse', 'HEAD'], execaOpts)).stdout;
162
+ async function getGitHead(execaOptions) {
163
+ return (await execa('git', ['rev-parse', 'HEAD'], execaOptions)).stdout;
164
164
  }
165
165
 
166
166
  /**
@@ -170,9 +170,9 @@ async function getGitHead(execaOpts) {
170
170
  *
171
171
  * @return {string} The value of the remote git URL.
172
172
  */
173
- async function repoUrl(execaOpts) {
173
+ async function repoUrl(execaOptions) {
174
174
  try {
175
- return (await execa('git', ['config', '--get', 'remote.origin.url'], execaOpts)).stdout;
175
+ return (await execa('git', ['config', '--get', 'remote.origin.url'], execaOptions)).stdout;
176
176
  } catch (error) {
177
177
  debug(error);
178
178
  }
@@ -185,9 +185,9 @@ async function repoUrl(execaOpts) {
185
185
  *
186
186
  * @return {Boolean} `true` if the current working directory is in a git repository, falsy otherwise.
187
187
  */
188
- async function isGitRepo(execaOpts) {
188
+ async function isGitRepo(execaOptions) {
189
189
  try {
190
- return (await execa('git', ['rev-parse', '--git-dir'], execaOpts)).exitCode === 0;
190
+ return (await execa('git', ['rev-parse', '--git-dir'], execaOptions)).exitCode === 0;
191
191
  } catch (error) {
192
192
  debug(error);
193
193
  }
@@ -202,9 +202,9 @@ async function isGitRepo(execaOpts) {
202
202
  *
203
203
  * @throws {Error} if not authorized to push.
204
204
  */
205
- async function verifyAuth(repositoryUrl, branch, execaOpts) {
205
+ async function verifyAuth(repositoryUrl, branch, execaOptions) {
206
206
  try {
207
- await execa('git', ['push', '--dry-run', '--no-verify', repositoryUrl, `HEAD:${branch}`], execaOpts);
207
+ await execa('git', ['push', '--dry-run', '--no-verify', repositoryUrl, `HEAD:${branch}`], execaOptions);
208
208
  } catch (error) {
209
209
  debug(error);
210
210
  throw error;
@@ -220,8 +220,8 @@ async function verifyAuth(repositoryUrl, branch, execaOpts) {
220
220
  *
221
221
  * @throws {Error} if the tag creation failed.
222
222
  */
223
- async function tag(tagName, ref, execaOpts) {
224
- await execa('git', ['tag', tagName, ref], execaOpts);
223
+ async function tag(tagName, ref, execaOptions) {
224
+ await execa('git', ['tag', tagName, ref], execaOptions);
225
225
  }
226
226
 
227
227
  /**
@@ -232,8 +232,8 @@ async function tag(tagName, ref, execaOpts) {
232
232
  *
233
233
  * @throws {Error} if the push failed.
234
234
  */
235
- async function push(repositoryUrl, execaOpts) {
236
- await execa('git', ['push', '--tags', repositoryUrl], execaOpts);
235
+ async function push(repositoryUrl, execaOptions) {
236
+ await execa('git', ['push', '--tags', repositoryUrl], execaOptions);
237
237
  }
238
238
 
239
239
  /**
@@ -244,8 +244,8 @@ async function push(repositoryUrl, execaOpts) {
244
244
  *
245
245
  * @throws {Error} if the push failed.
246
246
  */
247
- async function pushNotes(repositoryUrl, execaOpts) {
248
- await execa('git', ['push', repositoryUrl, `refs/notes/${GIT_NOTE_REF}`], execaOpts);
247
+ async function pushNotes(repositoryUrl, execaOptions) {
248
+ await execa('git', ['push', repositoryUrl, `refs/notes/${GIT_NOTE_REF}`], execaOptions);
249
249
  }
250
250
 
251
251
  /**
@@ -256,9 +256,9 @@ async function pushNotes(repositoryUrl, execaOpts) {
256
256
  *
257
257
  * @return {Boolean} `true` if valid, falsy otherwise.
258
258
  */
259
- async function verifyTagName(tagName, execaOpts) {
259
+ async function verifyTagName(tagName, execaOptions) {
260
260
  try {
261
- return (await execa('git', ['check-ref-format', `refs/tags/${tagName}`], execaOpts)).exitCode === 0;
261
+ return (await execa('git', ['check-ref-format', `refs/tags/${tagName}`], execaOptions)).exitCode === 0;
262
262
  } catch (error) {
263
263
  debug(error);
264
264
  }
@@ -272,9 +272,9 @@ async function verifyTagName(tagName, execaOpts) {
272
272
  *
273
273
  * @return {Boolean} `true` if valid, falsy otherwise.
274
274
  */
275
- async function verifyBranchName(branch, execaOpts) {
275
+ async function verifyBranchName(branch, execaOptions) {
276
276
  try {
277
- return (await execa('git', ['check-ref-format', `refs/heads/${branch}`], execaOpts)).exitCode === 0;
277
+ return (await execa('git', ['check-ref-format', `refs/heads/${branch}`], execaOptions)).exitCode === 0;
278
278
  } catch (error) {
279
279
  debug(error);
280
280
  }
@@ -289,10 +289,10 @@ async function verifyBranchName(branch, execaOpts) {
289
289
  *
290
290
  * @return {Boolean} `true` is the HEAD of the current local branch is the same as the HEAD of the remote branch, falsy otherwise.
291
291
  */
292
- async function isBranchUpToDate(repositoryUrl, branch, execaOpts) {
292
+ async function isBranchUpToDate(repositoryUrl, branch, execaOptions) {
293
293
  return (
294
- (await getGitHead(execaOpts)) ===
295
- (await execa('git', ['ls-remote', '--heads', repositoryUrl, branch], execaOpts)).stdout.match(/^(?<ref>\w+)?/)[1]
294
+ (await getGitHead(execaOptions)) ===
295
+ (await execa('git', ['ls-remote', '--heads', repositoryUrl, branch], execaOptions)).stdout.match(/^(?<ref>\w+)?/)[1]
296
296
  );
297
297
  }
298
298
 
@@ -304,9 +304,9 @@ async function isBranchUpToDate(repositoryUrl, branch, execaOpts) {
304
304
  *
305
305
  * @return {Object} the parsed JSON note if there is one, an empty object otherwise.
306
306
  */
307
- async function getNote(ref, execaOpts) {
307
+ async function getNote(ref, execaOptions) {
308
308
  try {
309
- return JSON.parse((await execa('git', ['notes', '--ref', GIT_NOTE_REF, 'show', ref], execaOpts)).stdout);
309
+ return JSON.parse((await execa('git', ['notes', '--ref', GIT_NOTE_REF, 'show', ref], execaOptions)).stdout);
310
310
  } catch (error) {
311
311
  if (error.exitCode === 1) {
312
312
  return {};
@@ -324,8 +324,8 @@ async function getNote(ref, execaOpts) {
324
324
  * @param {String} ref The Git reference to add the note to.
325
325
  * @param {Object} [execaOpts] Options to pass to `execa`.
326
326
  */
327
- async function addNote(note, ref, execaOpts) {
328
- await execa('git', ['notes', '--ref', GIT_NOTE_REF, 'add', '-f', '-m', JSON.stringify(note), ref], execaOpts);
327
+ async function addNote(note, ref, execaOptions) {
328
+ await execa('git', ['notes', '--ref', GIT_NOTE_REF, 'add', '-f', '-m', JSON.stringify(note), ref], execaOptions);
329
329
  }
330
330
 
331
331
  module.exports = {
@@ -1,12 +1,17 @@
1
1
  const {escapeRegExp, size, isString} = require('lodash');
2
2
  const {SECRET_REPLACEMENT, SECRET_MIN_SIZE} = require('./definitions/constants');
3
3
 
4
- module.exports = env => {
5
- const toReplace = Object.keys(env).filter(
6
- envVar => /token|password|credential|secret|private/i.test(envVar) && size(env[envVar].trim()) >= SECRET_MIN_SIZE
7
- );
4
+ module.exports = (env) => {
5
+ const toReplace = Object.keys(env).filter((envVar) => {
6
+ // https://github.com/semantic-release/semantic-release/issues/1558
7
+ if (envVar === 'GOPRIVATE') {
8
+ return false;
9
+ }
8
10
 
9
- const regexp = new RegExp(toReplace.map(envVar => escapeRegExp(env[envVar])).join('|'), 'g');
10
- return output =>
11
+ return /token|password|credential|secret|private/i.test(envVar) && size(env[envVar].trim()) >= SECRET_MIN_SIZE;
12
+ });
13
+
14
+ const regexp = new RegExp(toReplace.map((envVar) => escapeRegExp(env[envVar])).join('|'), 'g');
15
+ return (output) =>
11
16
  output && isString(output) && toReplace.length > 0 ? output.toString().replace(regexp, SECRET_REPLACEMENT) : output;
12
17
  };
@@ -46,14 +46,14 @@ module.exports = (context, pluginsPath) => {
46
46
 
47
47
  const pluginsConf = Object.entries(PLUGINS_DEFINITIONS).reduce(
48
48
  (pluginsConf, [type, {required, default: def, pipelineConfig, postprocess = identity, preprocess = identity}]) => {
49
- let pluginOpts;
49
+ let pluginOptions;
50
50
 
51
51
  if (isNil(options[type]) && def) {
52
- pluginOpts = def;
52
+ pluginOptions = def;
53
53
  } else {
54
54
  // If an object is passed and the path is missing, merge it with step options
55
55
  if (isPlainObject(options[type]) && !options[type].path) {
56
- options[type] = castArray(plugins[type]).map(plugin =>
56
+ options[type] = castArray(plugins[type]).map((plugin) =>
57
57
  plugin ? [plugin[0], Object.assign(plugin[1], options[type])] : plugin
58
58
  );
59
59
  }
@@ -63,10 +63,10 @@ module.exports = (context, pluginsPath) => {
63
63
  return pluginsConf;
64
64
  }
65
65
 
66
- pluginOpts = options[type];
66
+ pluginOptions = options[type];
67
67
  }
68
68
 
69
- const steps = castArray(pluginOpts).map(pluginOpt =>
69
+ const steps = castArray(pluginOptions).map((pluginOpt) =>
70
70
  normalize(
71
71
  {...context, options: omit(options, Object.keys(PLUGINS_DEFINITIONS), 'plugins')},
72
72
  type,
@@ -75,7 +75,7 @@ module.exports = (context, pluginsPath) => {
75
75
  )
76
76
  );
77
77
 
78
- pluginsConf[type] = async input =>
78
+ pluginsConf[type] = async (input) =>
79
79
  postprocess(
80
80
  await pipeline(steps, pipelineConfig && pipelineConfig(pluginsConf, logger))(await preprocess(input)),
81
81
  input
@@ -26,7 +26,7 @@ module.exports = (context, type, pluginOpt, pluginsPath) => {
26
26
  throw getError('EPLUGIN', {type, pluginName});
27
27
  }
28
28
 
29
- const validator = async input => {
29
+ const validator = async (input) => {
30
30
  const {dryRun, outputValidator} = PLUGINS_DEFINITIONS[type] || {};
31
31
  try {
32
32
  if (!input.options.dryRun || dryRun) {
@@ -48,7 +48,7 @@ module.exports = (context, type, pluginOpt, pluginsPath) => {
48
48
  logger.warn(`Skip step "${type}" of plugin "${pluginName}" in dry-run mode`);
49
49
  } catch (error) {
50
50
  logger.error(`Failed step "${type}" of plugin "${pluginName}"`);
51
- extractErrors(error).forEach(err => Object.assign(err, {pluginName}));
51
+ extractErrors(error).forEach((err) => Object.assign(err, {pluginName}));
52
52
  throw error;
53
53
  }
54
54
  };
@@ -25,7 +25,7 @@ const {extractErrors} = require('../utils');
25
25
  *
26
26
  * @return {Pipeline} A Function that execute the `steps` sequencially
27
27
  */
28
- module.exports = (steps, {settleAll = false, getNextInput = identity, transform = identity} = {}) => async input => {
28
+ module.exports = (steps, {settleAll = false, getNextInput = identity, transform = identity} = {}) => async (input) => {
29
29
  const results = [];
30
30
  const errors = [];
31
31
  await pReduce(
@@ -2,8 +2,8 @@ const {dirname} = require('path');
2
2
  const {isString, isFunction, castArray, isArray, isPlainObject, isNil} = require('lodash');
3
3
  const resolveFrom = require('resolve-from');
4
4
 
5
- const validateSteps = conf => {
6
- return conf.every(conf => {
5
+ const validateSteps = (conf) => {
6
+ return conf.every((conf) => {
7
7
  if (
8
8
  isArray(conf) &&
9
9
  (conf.length === 1 || conf.length === 2) &&
package/lib/utils.js CHANGED
@@ -8,13 +8,13 @@ function extractErrors(err) {
8
8
 
9
9
  function hideSensitiveValues(env, objs) {
10
10
  const hideFunction = hideSensitive(env);
11
- return objs.map(obj => {
12
- Object.getOwnPropertyNames(obj).forEach(prop => {
13
- if (obj[prop]) {
14
- obj[prop] = hideFunction(obj[prop]);
11
+ return objs.map((object) => {
12
+ Object.getOwnPropertyNames(object).forEach((prop) => {
13
+ if (object[prop]) {
14
+ object[prop] = hideFunction(object[prop]);
15
15
  }
16
16
  });
17
- return obj;
17
+ return object;
18
18
  });
19
19
  }
20
20
 
@@ -31,9 +31,14 @@ function isMaintenanceRange(range) {
31
31
  }
32
32
 
33
33
  function getUpperBound(range) {
34
- return semver.valid(range)
34
+ const result = semver.valid(range)
35
35
  ? range
36
- : ((semver.validRange(range) || '').match(/<(?<upperBound>\d+\.\d+\.\d+)$/) || [])[1];
36
+ : ((semver.validRange(range) || '').match(/<(?<upperBound>\d+\.\d+\.\d+(-\d+)?)$/) || [])[1];
37
+
38
+ return result
39
+ ? // https://github.com/npm/node-semver/issues/322
40
+ result.replace(/-\d+$/, '')
41
+ : result;
37
42
  }
38
43
 
39
44
  function getLowerBound(range) {
@@ -49,17 +54,17 @@ function lowest(version1, version2) {
49
54
  }
50
55
 
51
56
  function getLatestVersion(versions, {withPrerelease} = {}) {
52
- return versions.filter(version => withPrerelease || !semver.prerelease(version)).sort(semver.rcompare)[0];
57
+ return versions.filter((version) => withPrerelease || !semver.prerelease(version)).sort(semver.rcompare)[0];
53
58
  }
54
59
 
55
60
  function getEarliestVersion(versions, {withPrerelease} = {}) {
56
- return versions.filter(version => withPrerelease || !semver.prerelease(version)).sort(semver.compare)[0];
61
+ return versions.filter((version) => withPrerelease || !semver.prerelease(version)).sort(semver.compare)[0];
57
62
  }
58
63
 
59
64
  function getFirstVersion(versions, lowerBranches) {
60
65
  const lowerVersion = union(...lowerBranches.map(({tags}) => tagsToVersions(tags))).sort(semver.rcompare);
61
66
  if (lowerVersion[0]) {
62
- return versions.sort(semver.compare).find(version => semver.gt(version, lowerVersion[0]));
67
+ return versions.sort(semver.compare).find((version) => semver.gt(version, lowerVersion[0]));
63
68
  }
64
69
 
65
70
  return getEarliestVersion(versions);
package/lib/verify.js CHANGED
@@ -3,7 +3,7 @@ const AggregateError = require('aggregate-error');
3
3
  const {isGitRepo, verifyTagName} = require('./git');
4
4
  const getError = require('./get-error');
5
5
 
6
- module.exports = async context => {
6
+ module.exports = async (context) => {
7
7
  const {
8
8
  cwd,
9
9
  env,
@@ -29,7 +29,7 @@ module.exports = async context => {
29
29
  errors.push(getError('ETAGNOVERSION', context));
30
30
  }
31
31
 
32
- branches.forEach(branch => {
32
+ branches.forEach((branch) => {
33
33
  if (
34
34
  !((isString(branch) && branch.trim()) || (isPlainObject(branch) && isString(branch.name) && branch.name.trim()))
35
35
  ) {
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "semantic-release",
3
3
  "description": "Automated semver compliant package publishing",
4
- "version": "17.0.4",
4
+ "version": "17.0.8",
5
5
  "author": "Stephan Bönnemann <stephan@boennemann.me> (http://boennemann.me)",
6
6
  "ava": {
7
7
  "files": [
@@ -37,14 +37,14 @@
37
37
  "hook-std": "^2.0.0",
38
38
  "hosted-git-info": "^3.0.0",
39
39
  "lodash": "^4.17.15",
40
- "marked": "^0.8.0",
40
+ "marked": "^1.0.0",
41
41
  "marked-terminal": "^4.0.0",
42
42
  "micromatch": "^4.0.2",
43
43
  "p-each-series": "^2.1.0",
44
44
  "p-reduce": "^2.0.0",
45
45
  "read-pkg-up": "^7.0.0",
46
46
  "resolve-from": "^5.0.0",
47
- "semver": "^7.1.1",
47
+ "semver": "^7.3.2",
48
48
  "semver-diff": "^3.1.1",
49
49
  "signale": "^1.2.1",
50
50
  "yargs": "^15.0.1"
@@ -56,18 +56,18 @@
56
56
  "delay": "^4.0.0",
57
57
  "dockerode": "^3.0.0",
58
58
  "file-url": "^3.0.0",
59
- "fs-extra": "^8.0.0",
60
- "got": "^10.5.2",
59
+ "fs-extra": "^9.0.0",
60
+ "got": "^11.0.0",
61
61
  "js-yaml": "^3.10.0",
62
62
  "mockserver-client": "^5.1.1",
63
63
  "nock": "^12.0.0",
64
64
  "nyc": "^15.0.0",
65
65
  "p-retry": "^4.0.0",
66
66
  "proxyquire": "^2.0.0",
67
- "sinon": "^8.0.4",
67
+ "sinon": "^9.0.0",
68
68
  "stream-buffers": "^3.0.2",
69
- "tempy": "^0.4.0",
70
- "xo": "^0.26.0"
69
+ "tempy": "^0.5.0",
70
+ "xo": "^0.29.0"
71
71
  },
72
72
  "engines": {
73
73
  "node": ">=10.18"
@@ -126,6 +126,9 @@
126
126
  },
127
127
  "xo": {
128
128
  "prettier": true,
129
- "space": true
129
+ "space": true,
130
+ "rules": {
131
+ "unicorn/string-content": "off"
132
+ }
130
133
  }
131
134
  }