@npmcli/template-oss 4.15.1 → 4.17.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -9,7 +9,7 @@ const run = async (dir, files, options) => {
9
9
  await rmEach(dir, rm, options, (f) => fs.rm(f))
10
10
 
11
11
  log.verbose('apply-files', 'add', add)
12
- await parseEach(dir, add, options, (p) => p.applyWrite())
12
+ await parseEach(dir, add, options, {}, (p) => p.applyWrite())
13
13
  }
14
14
 
15
15
  module.exports = [{
@@ -12,7 +12,8 @@ const run = async (type, dir, files, options) => {
12
12
  const { add: addFiles, rm: rmFiles } = files
13
13
 
14
14
  const rm = await rmEach(dir, rmFiles, options, (f) => rel(f))
15
- const [add, update] = partition(await parseEach(dir, addFiles, options, async (p) => {
15
+ const parseOpts = { allowMultipleSources: false }
16
+ const [add, update] = partition(await parseEach(dir, addFiles, options, parseOpts, async (p) => {
16
17
  const diff = await p.applyDiff()
17
18
  const target = rel(p.target)
18
19
  if (diff === null) {
package/lib/config.js CHANGED
@@ -4,8 +4,8 @@ const semver = require('semver')
4
4
  const parseCIVersions = require('./util/parse-ci-versions.js')
5
5
  const getGitUrl = require('./util/get-git-url.js')
6
6
  const gitignore = require('./util/gitignore.js')
7
- const { withArrays } = require('./util/merge.js')
8
- const { FILE_KEYS, parseConfig: parseFiles, getAddedFiles } = require('./util/files.js')
7
+ const { mergeWithArrays } = require('./util/merge.js')
8
+ const { FILE_KEYS, parseConfig: parseFiles, getAddedFiles, mergeFiles } = require('./util/files.js')
9
9
 
10
10
  const CONFIG_KEY = 'templateOSS'
11
11
  const getPkgConfig = (pkg) => pkg[CONFIG_KEY] || {}
@@ -14,7 +14,7 @@ const { name: NAME, version: LATEST_VERSION } = require('../package.json')
14
14
  const MERGE_KEYS = [...FILE_KEYS, 'defaultContent', 'content']
15
15
  const DEFAULT_CONTENT = require.resolve(NAME)
16
16
 
17
- const merge = withArrays('branches', 'distPaths', 'allowPaths', 'ignorePaths')
17
+ const merge = mergeWithArrays('branches', 'distPaths', 'allowPaths', 'ignorePaths')
18
18
 
19
19
  const makePosix = (v) => v.split(win32.sep).join(posix.sep)
20
20
  const deglob = (v) => makePosix(v).replace(/[/*]+$/, '')
@@ -120,7 +120,7 @@ const getFullConfig = async ({
120
120
  // Files get merged in from the default content (that template-oss provides) as well
121
121
  // as any content paths provided from the root or the workspace
122
122
  const fileDirs = uniq([useDefault && defaultDir, rootDir, pkgDir].filter(Boolean))
123
- const files = merge(useDefault && defaultFiles, rootFiles, pkgFiles)
123
+ const files = mergeFiles(useDefault && defaultFiles, rootFiles, pkgFiles)
124
124
  const repoFiles = isRoot ? files.rootRepo : files.workspaceRepo
125
125
  const moduleFiles = isRoot ? files.rootModule : files.workspaceModule
126
126
 
@@ -195,7 +195,7 @@ const getFullConfig = async ({
195
195
  isLatest,
196
196
  // whether to install and update npm in ci
197
197
  // only do this if we aren't using a custom path to bin
198
- updateNpm: !npmPath.isLocal,
198
+ updateNpm: !npmPath.isLocal && pkgConfig.updateNpm,
199
199
  rootNpmPath: npmPath.root,
200
200
  localNpmPath: npmPath.local,
201
201
  rootNpxPath: npxPath.root,
@@ -228,9 +228,11 @@ const getFullConfig = async ({
228
228
 
229
229
  if (pkgConfig.ciVersions) {
230
230
  let versions = pkgConfig.ciVersions
231
- if (versions === 'latest') {
232
- const defaultVersions = [rootPkgConfig, defaultConfig].find(c => Array.isArray(c.ciVersions))
233
- versions = defaultVersions.ciVersions.slice(-1)
231
+ if (versions === 'latest' || (Array.isArray(versions) && versions.includes('latest'))) {
232
+ const { ciVersions } = [isWorkspace ? rootPkgConfig : {}, defaultConfig]
233
+ .find(c => Array.isArray(c.ciVersions))
234
+ const defaultLatest = ciVersions[ciVersions.length - 1]
235
+ versions = [].concat(versions).map(v => v === 'latest' ? defaultLatest : v)
234
236
  }
235
237
 
236
238
  const { targets, engines } = parseCIVersions(versions)
@@ -251,6 +253,14 @@ const getFullConfig = async ({
251
253
  derived.engines = pkgConfig.engines || engines
252
254
  }
253
255
 
256
+ if (!pkgConfig.eslint) {
257
+ derived.ignorePaths = derived.ignorePaths.filter(p => !p.includes('eslint'))
258
+ if (Array.isArray(pkgConfig.requiredPackages?.devDependencies)) {
259
+ pkgConfig.requiredPackages.devDependencies =
260
+ pkgConfig.requiredPackages.devDependencies.filter(p => !p.includes('eslint'))
261
+ }
262
+ }
263
+
254
264
  const gitUrl = await getGitUrl(rootPkg.path)
255
265
  if (gitUrl) {
256
266
  derived.repository = {
@@ -37,6 +37,7 @@ const sharedRootAdd = (name) => ({
37
37
  // dependabot
38
38
  '.github/dependabot.yml': {
39
39
  file: 'dependabot.yml',
40
+ filter: (p) => p.config.dependabot,
40
41
  clean: (p) => p.config.isRoot,
41
42
  // dependabot takes a single top level config file. this parser
42
43
  // will run for all configured packages and each one will have
@@ -48,6 +49,7 @@ const sharedRootAdd = (name) => ({
48
49
  },
49
50
  '.github/workflows/post-dependabot.yml': {
50
51
  file: 'post-dependabot.yml',
52
+ filter: (p) => p.config.dependabot,
51
53
  },
52
54
  '.github/settings.yml': {
53
55
  file: 'settings.yml',
@@ -85,7 +87,10 @@ const rootRepo = {
85
87
  // dir. so we might want to combine these
86
88
  const rootModule = {
87
89
  add: {
88
- '.eslintrc.js': 'eslintrc.js',
90
+ '.eslintrc.js': {
91
+ file: 'eslintrc.js',
92
+ filter: (p) => p.config.eslint,
93
+ },
89
94
  '.gitignore': 'gitignore',
90
95
  '.npmrc': 'npmrc',
91
96
  'SECURITY.md': 'SECURITY.md',
@@ -113,7 +118,10 @@ const workspaceRepo = {
113
118
  // Changes for each workspace but applied to the relative workspace dir
114
119
  const workspaceModule = {
115
120
  add: {
116
- '.eslintrc.js': 'eslintrc.js',
121
+ '.eslintrc.js': {
122
+ file: 'eslintrc.js',
123
+ filter: (p) => p.config.eslint,
124
+ },
117
125
  '.gitignore': 'gitignore',
118
126
  'package.json': 'pkg.json',
119
127
  },
@@ -131,7 +139,7 @@ module.exports = {
131
139
  workspaceModule,
132
140
  windowsCI: true,
133
141
  macCI: true,
134
- branches: ['main', 'latest'],
142
+ branches: ['main', 'latest', 'release/v*'],
135
143
  defaultBranch: 'main',
136
144
  distPaths: [
137
145
  'bin/',
@@ -155,10 +163,12 @@ module.exports = {
155
163
  ciVersions: ['14.17.0', '14.x', '16.13.0', '16.x', '18.0.0', '18.x'],
156
164
  lockfile: false,
157
165
  codeowner: '@npm/cli-team',
166
+ eslint: true,
158
167
  publish: false,
159
168
  npm: 'npm',
160
169
  npx: 'npx',
161
170
  npmSpec: 'latest',
171
+ updateNpm: true,
162
172
  dependabot: 'increase-if-necessary',
163
173
  unwantedPackages: [
164
174
  'eslint',
@@ -2,7 +2,7 @@
2
2
  "author": "GitHub Inc.",
3
3
  "files": {{{ json distPaths }}},
4
4
  "scripts": {
5
- "lint": "eslint \"**/*.js\"",
5
+ "lint": "{{#if eslint}}eslint \"**/*.js\"{{else}}echo linting disabled{{/if}}",
6
6
  "postlint": "template-oss-check",
7
7
  "template-oss-apply": "template-oss-apply --force",
8
8
  "lintfix": "{{ localNpmPath }} run lint -- --fix",
@@ -11,7 +11,6 @@ on:
11
11
  {{#each branches}}
12
12
  - {{ . }}
13
13
  {{/each}}
14
- - release/v*
15
14
 
16
15
  permissions:
17
16
  contents: write
@@ -55,7 +54,7 @@ jobs:
55
54
  let commentId = comments.find(c => c.user.login === 'github-actions[bot]' && c.body.startsWith(body))?.id
56
55
 
57
56
  body += `Release workflow run: ${workflow.html_url}\n\n#### Force CI to Update This Release\n\n`
58
- body += `This PR will be updated and CI will run for every non-\`chore:\` commit that is pushed to \`{{ defaultBranch }}\`. `
57
+ body += `This PR will be updated and CI will run for every non-\`chore:\` commit that is pushed to \`${REF_NAME}\`. `
59
58
  body += `To force CI to update this PR, run this command:\n\n`
60
59
  body += `\`\`\`\ngh workflow run release.yml -r ${REF_NAME} -R ${owner}/${repo} -f release-pr=${issue_number}\n\`\`\``
61
60
 
@@ -9,7 +9,8 @@ repository:
9
9
  enable_vulnerability_alerts: true
10
10
 
11
11
  branches:
12
- - name: {{ defaultBranch }}
12
+ {{#each branches}}
13
+ - name: {{ . }}
13
14
  protection:
14
15
  required_status_checks: null
15
16
  enforce_admins: true
@@ -22,3 +23,4 @@ branches:
22
23
  apps: []
23
24
  users: []
24
25
  teams: ["cli-team"]
26
+ {{/each}}
package/lib/util/files.js CHANGED
@@ -1,27 +1,62 @@
1
1
  const { join } = require('path')
2
- const { defaultsDeep } = require('lodash')
3
- const merge = require('./merge.js')
2
+ const { defaultsDeep, omit } = require('lodash')
4
3
  const deepMapValues = require('just-deep-map-values')
5
4
  const { glob } = require('glob')
5
+ const { mergeWithCustomizers, customizers } = require('./merge.js')
6
6
  const Parser = require('./parser.js')
7
7
  const template = require('./template.js')
8
8
 
9
+ const ADD_KEY = 'add'
10
+ const RM_KEY = 'rm'
9
11
  const FILE_KEYS = ['rootRepo', 'rootModule', 'workspaceRepo', 'workspaceModule']
10
12
 
11
13
  const globify = pattern => pattern.split('\\').join('/')
12
14
 
13
- const fileEntries = (dir, files, options) => Object.entries(files)
14
- // remove any false values
15
- .filter(([_, v]) => v !== false)
16
- // target paths need to be joinsed with dir and templated
17
- .map(([k, source]) => {
18
- const target = join(dir, template(k, options))
19
- return [target, source]
20
- })
15
+ const mergeFiles = mergeWithCustomizers((value, srcValue, key, target, source, stack) => {
16
+ // This will merge all files except if the src file has overwrite:false. Then
17
+ // the files will be turned into an array so they can be applied on top of
18
+ // each other in the parser.
19
+ if (
20
+ stack[0] === ADD_KEY &&
21
+ FILE_KEYS.includes(stack[1]) &&
22
+ value?.file &&
23
+ srcValue?.overwrite === false
24
+ ) {
25
+ return [value, omit(srcValue, 'overwrite')]
26
+ }
27
+ }, customizers.overwriteArrays)
28
+
29
+ const fileEntries = (dir, files, options, { allowMultipleSources = true } = {}) => {
30
+ const results = []
31
+
32
+ for (const [key, source] of Object.entries(files)) {
33
+ // remove any false values first since that means those targets are skipped
34
+ if (source === false) {
35
+ continue
36
+ }
37
+
38
+ // target paths need to be joinsed with dir and templated
39
+ const target = join(dir, template(key, options))
40
+
41
+ if (Array.isArray(source)) {
42
+ // When turning an object of files into all its entries, we allow
43
+ // multiples when applying changes, but not when checking for changes
44
+ // since earlier files would always return as needing an update. So we
45
+ // either allow multiples and return the array or only return the last
46
+ // source file in the array.
47
+ const sources = allowMultipleSources ? source : source.slice(-1)
48
+ results.push(...sources.map(s => [target, s]))
49
+ } else {
50
+ results.push([target, source])
51
+ }
52
+ }
53
+
54
+ return results
55
+ }
21
56
 
22
57
  // given an obj of files, return the full target/source paths and associated parser
23
- const getParsers = (dir, files, options) => {
24
- const parsers = fileEntries(dir, files, options).map(([target, source]) => {
58
+ const getParsers = (dir, files, options, parseOptions) => {
59
+ const parsers = fileEntries(dir, files, options, parseOptions).map(([target, source]) => {
25
60
  const { file, parser, filter, clean: shouldClean } = source
26
61
 
27
62
  if (typeof filter === 'function' && !filter(options)) {
@@ -35,7 +70,7 @@ const getParsers = (dir, files, options) => {
35
70
  return new (parser(Parser.Parsers))(target, file, options, { clean })
36
71
  }
37
72
 
38
- return new (Parser(file))(target, file, options, { clean })
73
+ return new (Parser(target))(target, file, options, { clean })
39
74
  })
40
75
 
41
76
  return parsers.filter(Boolean)
@@ -62,9 +97,9 @@ const rmEach = async (dir, files, options, fn) => {
62
97
  return res.filter(Boolean)
63
98
  }
64
99
 
65
- const parseEach = async (dir, files, options, fn) => {
100
+ const parseEach = async (dir, files, options, parseOptions, fn) => {
66
101
  const res = []
67
- for (const parser of getParsers(dir, files, options)) {
102
+ for (const parser of getParsers(dir, files, options, parseOptions)) {
68
103
  res.push(await fn(parser))
69
104
  }
70
105
  return res.filter(Boolean)
@@ -72,7 +107,7 @@ const parseEach = async (dir, files, options, fn) => {
72
107
 
73
108
  const parseConfig = (files, dir, overrides) => {
74
109
  const normalizeFiles = (v) => deepMapValues(v, (value, key) => {
75
- if (key === 'rm' && Array.isArray(value)) {
110
+ if (key === RM_KEY && Array.isArray(value)) {
76
111
  return value.reduce((acc, k) => {
77
112
  acc[k] = true
78
113
  return acc
@@ -88,16 +123,16 @@ const parseConfig = (files, dir, overrides) => {
88
123
  return value
89
124
  })
90
125
 
91
- const merged = merge(normalizeFiles(files), normalizeFiles(overrides))
126
+ const merged = mergeFiles(normalizeFiles(files), normalizeFiles(overrides))
92
127
  const withDefaults = defaultsDeep(merged, FILE_KEYS.reduce((acc, k) => {
93
- acc[k] = { add: {}, rm: {} }
128
+ acc[k] = { [ADD_KEY]: {}, [RM_KEY]: {} }
94
129
  return acc
95
130
  }, {}))
96
131
 
97
132
  return withDefaults
98
133
  }
99
134
 
100
- const getAddedFiles = (files) => files ? Object.keys(files.add || {}) : []
135
+ const getAddedFiles = (files) => files ? Object.keys(files[ADD_KEY] || {}) : []
101
136
 
102
137
  module.exports = {
103
138
  rmEach,
@@ -105,4 +140,5 @@ module.exports = {
105
140
  FILE_KEYS,
106
141
  parseConfig,
107
142
  getAddedFiles,
143
+ mergeFiles,
108
144
  }
package/lib/util/merge.js CHANGED
@@ -1,21 +1,72 @@
1
- const { mergeWith } = require('lodash')
1
+ const { mergeWith: _mergeWith } = require('lodash')
2
2
 
3
- const merge = (...objects) => mergeWith({}, ...objects, (value, srcValue, key) => {
4
- if (Array.isArray(srcValue)) {
5
- // Dont merge arrays, last array wins
6
- return srcValue
7
- }
8
- })
3
+ // Adapted from https://github.com/lodash/lodash/issues/3901#issuecomment-517983996
4
+ // Allows us to keep track of the current key during each merge so a customizer
5
+ // can make different merges based on the parent keys.
6
+ const mergeWith = (...args) => {
7
+ const customizer = args.pop()
8
+ const objects = args
9
+ const sourceStack = []
10
+ const keyStack = []
11
+ return _mergeWith({}, ...objects, (value, srcValue, key, target, source) => {
12
+ let currentKeys
13
+ while (true) {
14
+ if (!sourceStack.length) {
15
+ sourceStack.push(source)
16
+ keyStack.push([])
17
+ }
18
+ if (source === sourceStack[sourceStack.length - 1]) {
19
+ currentKeys = keyStack[keyStack.length - 1].concat(key)
20
+ sourceStack.push(srcValue)
21
+ keyStack.push(currentKeys)
22
+ break
23
+ }
24
+ sourceStack.pop()
25
+ keyStack.pop()
26
+ }
27
+ // Remove the last key since that is the current one and reverse the whole
28
+ // array so that the first entry is the parent, 2nd grandparent, etc
29
+ return customizer(value, srcValue, key, target, source, currentKeys.slice(0, -1).reverse())
30
+ })
31
+ }
32
+
33
+ // Create a merge function that will run a set of customizer functions
34
+ const mergeWithCustomizers = (...customizers) => {
35
+ return (...objects) => mergeWith({}, ...objects, (...args) => {
36
+ for (const customizer of customizers) {
37
+ const result = customizer(...args)
38
+ // undefined means the customizer will defer to the next one
39
+ // the default behavior of undefined in lodash is to merge
40
+ if (result !== undefined) {
41
+ return result
42
+ }
43
+ }
44
+ })
45
+ }
9
46
 
10
- const mergeWithArrays = (...keys) =>
11
- (...objects) => mergeWith({}, ...objects, (value, srcValue, key) => {
47
+ const customizers = {
48
+ // Dont merge arrays, last array wins
49
+ overwriteArrays: (value, srcValue) => {
50
+ if (Array.isArray(srcValue)) {
51
+ return srcValue
52
+ }
53
+ },
54
+ // Merge arrays if their key matches one of the passed in keys
55
+ mergeArrays: (...keys) => (value, srcValue, key) => {
12
56
  if (Array.isArray(srcValue)) {
13
57
  if (keys.includes(key)) {
14
58
  return (Array.isArray(value) ? value : []).concat(srcValue)
15
59
  }
16
60
  return srcValue
17
61
  }
18
- })
62
+ },
63
+ }
19
64
 
20
- module.exports = merge
21
- module.exports.withArrays = mergeWithArrays
65
+ module.exports = {
66
+ // default merge is to overwrite arrays
67
+ merge: mergeWithCustomizers(customizers.overwriteArrays),
68
+ mergeWithArrays: (...keys) => mergeWithCustomizers(customizers.mergeArrays(...keys)),
69
+ mergeWithCustomizers,
70
+ mergeWith,
71
+ customizers,
72
+ }
@@ -1,14 +1,15 @@
1
1
  const fs = require('fs/promises')
2
- const { basename, extname, dirname } = require('path')
2
+ const { dirname } = require('path')
3
3
  const yaml = require('yaml')
4
4
  const NpmPackageJson = require('@npmcli/package-json')
5
5
  const jsonParse = require('json-parse-even-better-errors')
6
6
  const Diff = require('diff')
7
7
  const { unset } = require('lodash')
8
8
  const ini = require('ini')
9
+ const { minimatch } = require('minimatch')
9
10
  const template = require('./template.js')
10
11
  const jsonDiff = require('./json-diff')
11
- const merge = require('./merge.js')
12
+ const { merge } = require('./merge.js')
12
13
 
13
14
  const setFirst = (first, rest) => ({ ...first, ...rest })
14
15
 
@@ -167,17 +168,17 @@ class Base {
167
168
  }
168
169
 
169
170
  class Gitignore extends Base {
170
- static types = ['codeowners', 'gitignore']
171
+ static types = ['codeowners', '.gitignore']
171
172
  comment = (c) => `# ${c}`
172
173
  }
173
174
 
174
175
  class Js extends Base {
175
- static types = ['js']
176
+ static types = ['*.js']
176
177
  comment = (c) => `/* ${c} */`
177
178
  }
178
179
 
179
180
  class Ini extends Base {
180
- static types = ['ini']
181
+ static types = ['*.ini']
181
182
  comment = (c) => `; ${c}`
182
183
 
183
184
  toString (s) {
@@ -202,17 +203,17 @@ class Ini extends Base {
202
203
  }
203
204
 
204
205
  class IniMerge extends Ini {
205
- static types = ['npmrc']
206
+ static types = ['.npmrc']
206
207
  merge = (t, s) => merge(t, s)
207
208
  }
208
209
 
209
210
  class Markdown extends Base {
210
- static types = ['md']
211
+ static types = ['*.md']
211
212
  comment = (c) => `<!-- ${c} -->`
212
213
  }
213
214
 
214
215
  class Yml extends Base {
215
- static types = ['yml']
216
+ static types = ['*.yml']
216
217
  comment = (c) => ` ${c}`
217
218
 
218
219
  toString (s) {
@@ -274,7 +275,7 @@ class YmlMerge extends Yml {
274
275
  }
275
276
 
276
277
  class Json extends Base {
277
- static types = ['json']
278
+ static types = ['*.json']
278
279
  // its a json comment! not really but we do add a special key
279
280
  // to json objects
280
281
  comment = (c) => ({ [`//${this.options.config.__NAME__}`]: c })
@@ -306,7 +307,7 @@ class JsonMerge extends Json {
306
307
  }
307
308
 
308
309
  class PackageJson extends JsonMerge {
309
- static types = ['pkg.json']
310
+ static types = ['package.json']
310
311
 
311
312
  async prepare (s, t) {
312
313
  // merge new source with current pkg content
@@ -348,15 +349,28 @@ const Parsers = {
348
349
  PackageJson,
349
350
  }
350
351
 
351
- const parserLookup = Object.values(Parsers)
352
+ // Create an order to lookup parsers based on filename the only important part
353
+ // of ordering is that we want to match types by exact match first, then globs,
354
+ // so we always sort globs to the bottom
355
+ const parserLookup = []
356
+ for (const parser of Object.values(Parsers)) {
357
+ for (const type of parser.types) {
358
+ const parserEntry = [type, parser]
359
+ if (type.includes('*')) {
360
+ parserLookup.push(parserEntry)
361
+ } else {
362
+ parserLookup.unshift(parserEntry)
363
+ }
364
+ }
365
+ }
352
366
 
353
367
  const getParser = (file) => {
354
- const base = basename(file).toLowerCase()
355
- const ext = extname(file).slice(1).toLowerCase()
356
-
357
- return parserLookup.find((p) => p.types.includes(base))
358
- || parserLookup.find((p) => p.types.includes(ext))
359
- || Parsers.Base
368
+ for (const [type, parser] of parserLookup) {
369
+ if (minimatch(file, type, { nocase: true, dot: true, matchBase: true })) {
370
+ return parser
371
+ }
372
+ }
373
+ return Parsers.Base
360
374
  }
361
375
 
362
376
  module.exports = getParser
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@npmcli/template-oss",
3
- "version": "4.15.1",
3
+ "version": "4.17.0",
4
4
  "description": "templated files used in npm CLI team oss projects",
5
5
  "main": "lib/content/index.js",
6
6
  "bin": {
@@ -39,7 +39,7 @@
39
39
  "@npmcli/arborist": "^6.0.0",
40
40
  "@npmcli/git": "^4.0.0",
41
41
  "@npmcli/map-workspaces": "^3.0.0",
42
- "@npmcli/package-json": "^3.0.0",
42
+ "@npmcli/package-json": "^4.0.0",
43
43
  "@octokit/rest": "^19.0.4",
44
44
  "diff": "^5.0.0",
45
45
  "glob": "^10.1.0",
@@ -50,6 +50,7 @@
50
50
  "just-deep-map-values": "^1.1.1",
51
51
  "just-diff": "^6.0.0",
52
52
  "lodash": "^4.17.21",
53
+ "minimatch": "^9.0.2",
53
54
  "npm-package-arg": "^10.0.0",
54
55
  "proc-log": "^3.0.0",
55
56
  "release-please": "npm:@npmcli/release-please@^14.2.6",