@npmcli/template-oss 4.21.4 → 4.23.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. package/bin/apply.js +2 -5
  2. package/bin/check.js +2 -4
  3. package/bin/release-manager.js +1 -1
  4. package/bin/release-please.js +22 -18
  5. package/lib/apply/apply-files.js +14 -19
  6. package/lib/apply/apply-version.js +1 -5
  7. package/lib/apply/index.js +1 -4
  8. package/lib/check/check-apply.js +38 -39
  9. package/lib/check/check-changelog.js +1 -4
  10. package/lib/check/check-engines.js +5 -6
  11. package/lib/check/check-gitignore.js +14 -14
  12. package/lib/check/check-required.js +13 -15
  13. package/lib/check/check-unwanted.js +2 -3
  14. package/lib/check/index.js +9 -8
  15. package/lib/config.js +86 -35
  16. package/lib/content/LICENSE-md.hbs +2 -2
  17. package/lib/content/_job-matrix-yml.hbs +10 -0
  18. package/lib/content/_job-release-integration-yml.hbs +5 -10
  19. package/lib/content/_step-git-yml.hbs +1 -1
  20. package/lib/content/_step-node-yml.hbs +1 -1
  21. package/lib/content/action-create-check-yml.hbs +1 -1
  22. package/lib/content/ci-release-yml.hbs +2 -2
  23. package/lib/content/eslintrc-js.hbs +4 -5
  24. package/lib/content/gitignore.hbs +0 -3
  25. package/lib/content/index.js +39 -38
  26. package/lib/content/package-json.hbs +12 -2
  27. package/lib/content/prettier-js.hbs +6 -0
  28. package/lib/content/prettierignore.hbs +3 -0
  29. package/lib/content/release-yml.hbs +3 -3
  30. package/lib/index.js +3 -3
  31. package/lib/release/changelog.js +41 -44
  32. package/lib/release/node-workspace-format.js +29 -16
  33. package/lib/release/release-manager.js +61 -76
  34. package/lib/release/release-please.js +50 -61
  35. package/lib/release/util.js +11 -8
  36. package/lib/util/ci-versions.js +3 -3
  37. package/lib/util/dependabot.js +2 -2
  38. package/lib/util/files.js +34 -32
  39. package/lib/util/git.js +8 -5
  40. package/lib/util/gitignore.js +13 -11
  41. package/lib/util/has-package.js +7 -12
  42. package/lib/util/import-or-require.js +1 -1
  43. package/lib/util/json-diff.js +22 -21
  44. package/lib/util/merge.js +19 -16
  45. package/lib/util/output.js +8 -5
  46. package/lib/util/parser.js +86 -70
  47. package/lib/util/path.js +4 -4
  48. package/lib/util/template.js +13 -9
  49. package/package.json +13 -8
  50. package/lib/release/node-workspace.js +0 -103
package/lib/util/files.js CHANGED
@@ -12,48 +12,47 @@ const FILE_KEYS = ['rootRepo', 'rootModule', 'workspaceRepo', 'workspaceModule']
12
12
 
13
13
  const globify = pattern => pattern.split('\\').join('/')
14
14
 
15
- const deepMapKeys = (obj, fn) => Object.entries(obj).reduce((acc, [key, value]) => {
16
- acc[fn(key)] = isPlainObject(value) ? deepMapKeys(value, fn) : value
17
- return acc
18
- }, {})
15
+ const deepMapKeys = (obj, fn) =>
16
+ Object.entries(obj).reduce((acc, [key, value]) => {
17
+ acc[fn(key)] = isPlainObject(value) ? deepMapKeys(value, fn) : value
18
+ return acc
19
+ }, {})
19
20
 
20
21
  const mergeFiles = mergeWithCustomizers((value, srcValue, key, target, source, stack) => {
21
22
  // This will merge all files except if the src file has overwrite:false. Then
22
23
  // the files will be turned into an array so they can be applied on top of
23
24
  // each other in the parser.
24
- if (
25
- stack[0] === ADD_KEY &&
26
- FILE_KEYS.includes(stack[1]) &&
27
- value?.file &&
28
- srcValue?.overwrite === false
29
- ) {
25
+ if (stack[0] === ADD_KEY && FILE_KEYS.includes(stack[1]) && value?.file && srcValue?.overwrite === false) {
30
26
  return [value, omit(srcValue, 'overwrite')]
31
27
  }
32
28
  }, customizers.overwriteArrays)
33
29
 
34
- const fileEntries = (dir, files, options, { allowMultipleSources = true } = {}) => {
30
+ const fileEntries = (dir, files, options) => {
35
31
  const results = []
36
32
 
37
- for (const [key, source] of Object.entries(files)) {
33
+ for (const [key, value] of Object.entries(files)) {
38
34
  // remove any false values first since that means those targets are skipped
39
- if (source === false) {
35
+ if (value === false) {
40
36
  continue
41
37
  }
42
38
 
43
39
  // target paths need to be joined with dir and templated
44
40
  const target = join(dir, template(key, options))
45
41
 
46
- if (Array.isArray(source)) {
47
- // When turning an object of files into all its entries, we allow
48
- // multiples when applying changes, but not when checking for changes
49
- // since earlier files would always return as needing an update. So we
50
- // either allow multiples and return the array or only return the last
51
- // source file in the array.
52
- const sources = allowMultipleSources ? source : source.slice(-1)
53
- results.push(...sources.map(s => [target, s]))
54
- } else {
55
- results.push([target, source])
56
- }
42
+ // Allow an array of values to merge into a single source to be
43
+ // applied or diffed against the target. This is how overwrite:false
44
+ // works and they are merged.
45
+ const source = Array.isArray(value)
46
+ ? value.reduce(
47
+ (acc, { file, ...rest }) => {
48
+ acc.file.push(file)
49
+ return Object.assign(acc, rest)
50
+ },
51
+ { file: [] },
52
+ )
53
+ : value
54
+
55
+ results.push([target, source])
57
56
  }
58
57
 
59
58
  return results
@@ -111,12 +110,12 @@ const parseEach = async (dir, files, options, parseOptions, fn) => {
111
110
  }
112
111
 
113
112
  const parseConfig = (files, dir, overrides, templateSettings) => {
114
- const normalizeFiles = (v) => {
115
- v = deepMapKeys(v, (s) => template(s, templateSettings))
113
+ const normalizeFiles = v => {
114
+ v = deepMapKeys(v, s => template(s, templateSettings))
116
115
  return deepMapValues(v, (value, key) => {
117
116
  if (key === RM_KEY && Array.isArray(value)) {
118
117
  return value.reduce((acc, k) => {
119
- // template files nows since they need to be normalized before merging
118
+ // template files nows since they need to be normalized before merging
120
119
  acc[template(k, templateSettings)] = true
121
120
  return acc
122
121
  }, {})
@@ -133,15 +132,18 @@ const parseConfig = (files, dir, overrides, templateSettings) => {
133
132
  }
134
133
 
135
134
  const merged = mergeFiles(normalizeFiles(files), normalizeFiles(overrides))
136
- const withDefaults = defaultsDeep(merged, FILE_KEYS.reduce((acc, k) => {
137
- acc[k] = { [ADD_KEY]: {}, [RM_KEY]: {} }
138
- return acc
139
- }, {}))
135
+ const withDefaults = defaultsDeep(
136
+ merged,
137
+ FILE_KEYS.reduce((acc, k) => {
138
+ acc[k] = { [ADD_KEY]: {}, [RM_KEY]: {} }
139
+ return acc
140
+ }, {}),
141
+ )
140
142
 
141
143
  return withDefaults
142
144
  }
143
145
 
144
- const getAddedFiles = (files) => files ? Object.keys(files[ADD_KEY] || {}) : []
146
+ const getAddedFiles = files => (files ? Object.keys(files[ADD_KEY] || {}) : [])
145
147
 
146
148
  module.exports = {
147
149
  rmEach,
package/lib/util/git.js CHANGED
@@ -5,7 +5,7 @@ const { minimatch } = require('minimatch')
5
5
  const cache = new Map()
6
6
 
7
7
  const tryGit = async (path, ...args) => {
8
- if (!await git.is({ cwd: path })) {
8
+ if (!(await git.is({ cwd: path }))) {
9
9
  throw new Error('no git')
10
10
  }
11
11
  const key = [path, ...args].join(',')
@@ -23,7 +23,7 @@ const getRemoteUrl = async (path, remote) => {
23
23
  try {
24
24
  const urlStr = await tryGit(path, 'remote', 'get-url', remote)
25
25
  const { domain, user, project } = hgi.fromUrl(urlStr)
26
- const url = new URL(`https://${domain}`)
26
+ const url = new URL(`git+https://${domain}`)
27
27
  url.pathname = `/${user}/${project}.git`
28
28
  return url.toString()
29
29
  } catch {
@@ -31,7 +31,7 @@ const getRemoteUrl = async (path, remote) => {
31
31
  }
32
32
  }
33
33
 
34
- const getUrl = async (path) => {
34
+ const getUrl = async path => {
35
35
  return (await getRemoteUrl(path, 'upstream')) ?? (await getRemoteUrl(path, 'origin'))
36
36
  }
37
37
 
@@ -41,7 +41,10 @@ const getBranches = async (path, branchPatterns) => {
41
41
 
42
42
  try {
43
43
  const res = await tryGit(path, 'ls-remote', '--heads', 'origin').then(r => r.split('\n'))
44
- const remotes = res.map((h) => h.match(/refs\/heads\/(.*)$/)).filter(Boolean).map(h => h[1])
44
+ const remotes = res
45
+ .map(h => h.match(/refs\/heads\/(.*)$/))
46
+ .filter(Boolean)
47
+ .map(h => h[1])
45
48
  for (const branch of remotes) {
46
49
  for (const pattern of branchPatterns) {
47
50
  if (minimatch(branch, pattern)) {
@@ -61,7 +64,7 @@ const getBranches = async (path, branchPatterns) => {
61
64
  }
62
65
  }
63
66
 
64
- const defaultBranch = async (path) => {
67
+ const defaultBranch = async path => {
65
68
  try {
66
69
  const remotes = await tryGit(path, 'remote', 'show', 'origin')
67
70
  return remotes.match(/HEAD branch: (.*)$/m)?.[1]
@@ -4,17 +4,19 @@ const localeCompare = require('@isaacs/string-locale-compare')('en')
4
4
 
5
5
  const sortGitPaths = (a, b) => localeCompare(a.replace(/^!/g, ''), b.replace(/^!/g, ''))
6
6
 
7
- const allowDir = (p) => {
7
+ const allowDir = p => {
8
8
  const parts = p.split(posix.sep)
9
- return parts.flatMap((part, index, list) => {
10
- const prev = list.slice(0, index)
11
- const isLast = index === list.length - 1
12
- const ignorePart = ['', ...prev, part, ''].join(posix.sep)
13
- return [`!${ignorePart}`, !isLast && `${ignorePart}*`]
14
- }).filter(Boolean)
9
+ return parts
10
+ .flatMap((part, index, list) => {
11
+ const prev = list.slice(0, index)
12
+ const isLast = index === list.length - 1
13
+ const ignorePart = ['', ...prev, part, ''].join(posix.sep)
14
+ return [`!${ignorePart}`, !isLast && `${ignorePart}*`]
15
+ })
16
+ .filter(Boolean)
15
17
  }
16
18
 
17
- const allowRootDir = (p) => {
19
+ const allowRootDir = p => {
18
20
  // This negates the first part of each path for the gitignore
19
21
  // files. It should be used to allow directories where everything
20
22
  // should be allowed inside such as .github/. It shouldn't be used on
@@ -26,9 +28,9 @@ const allowRootDir = (p) => {
26
28
  }
27
29
 
28
30
  const gitignore = {
29
- allowDir: (dirs) => uniq(dirs.map(allowDir).flat()),
30
- allowRootDir: (dirs) => dirs.map(allowRootDir).map((p) => `!${posix.sep}${p}`),
31
- sort: (arr) => uniq(arr.sort(sortGitPaths)),
31
+ allowDir: dirs => uniq(dirs.map(allowDir).flat()),
32
+ allowRootDir: dirs => dirs.map(allowRootDir).map(p => `!${posix.sep}${p}`),
33
+ sort: arr => uniq(arr.sort(sortGitPaths)),
32
34
  }
33
35
 
34
36
  module.exports = gitignore
@@ -39,16 +39,11 @@ const getSpecVersion = (spec, where) => {
39
39
  return null
40
40
  }
41
41
 
42
- const isVersion = (s) => s instanceof semver.SemVer
42
+ const isVersion = s => s instanceof semver.SemVer
43
43
 
44
44
  // Returns whether the pkg has the dependency in a semver
45
45
  // compatible version in one or more locationscccc
46
- const hasPackage = (
47
- pkg,
48
- spec,
49
- locations = installLocations,
50
- path
51
- ) => {
46
+ const hasPackage = (pkg, spec, locations = installLocations, path) => {
52
47
  const name = npa(spec).name
53
48
  const requested = getSpecVersion(spec)
54
49
 
@@ -57,16 +52,16 @@ const hasPackage = (
57
52
  }
58
53
 
59
54
  const existingByLocation = locations
60
- .map((location) => pkg[location])
61
- .filter((deps) => has(deps, name))
62
- .map((deps) => getSpecVersion(`${name}@${deps[name]}`, path))
55
+ .map(location => pkg[location])
56
+ .filter(deps => has(deps, name))
57
+ .map(deps => getSpecVersion(`${name}@${deps[name]}`, path))
63
58
  .filter(Boolean)
64
59
 
65
- return existingByLocation.some((existing) => {
60
+ return existingByLocation.some(existing => {
66
61
  if (existing === true) {
67
62
  return true
68
63
  }
69
- switch ([existing, requested].map((t) => isVersion(t) ? 'VER' : 'RNG').join('-')) {
64
+ switch ([existing, requested].map(t => (isVersion(t) ? 'VER' : 'RNG')).join('-')) {
70
65
  case `VER-VER`:
71
66
  // two versions, use semver.eq to check equality
72
67
  return semver.eq(existing, requested)
@@ -7,7 +7,7 @@ const { pathToFileURL } = require('url')
7
7
 
8
8
  const importOrRequireCache = new Map()
9
9
 
10
- const importOrRequire = async (path) => {
10
+ const importOrRequire = async path => {
11
11
  if (importOrRequireCache.has(path)) {
12
12
  return importOrRequireCache.get(path)
13
13
  }
@@ -6,28 +6,29 @@ const j = (obj, replacer = null) => JSON.stringify(obj, replacer, 2)
6
6
 
7
7
  // DELETE is a special string that will be the value of updated if it exists
8
8
  // but should be deleted
9
- const jsonDiff = (s1, s2, DELETE) => diff(s1, s2)
10
- .map(({ op, path, value }) => {
11
- // there could be cases where a whole object is reported
12
- // as missing and the expected value does not need to show
13
- // special DELETED values so filter those out here
14
- const msgVal = j(value, (_, v) => v === DELETE ? undefined : v)
15
- const prev = j(get(s1, path))
16
- const key = j(path.reduce((acc, p) => acc + (typeof p === 'number' ? `[${p}]` : `.${p}`)))
9
+ const jsonDiff = (s1, s2, DELETE) =>
10
+ diff(s1, s2)
11
+ .map(({ op, path, value }) => {
12
+ // there could be cases where a whole object is reported
13
+ // as missing and the expected value does not need to show
14
+ // special DELETED values so filter those out here
15
+ const msgVal = j(value, (_, v) => (v === DELETE ? undefined : v))
16
+ const prev = j(get(s1, path))
17
+ const key = j(path.reduce((acc, p) => acc + (typeof p === 'number' ? `[${p}]` : `.${p}`)))
17
18
 
18
- const msg = (...args) => format('%s is %s, expected %s', ...args)
19
- const AD = msg(key, 'missing', msgVal)
20
- const RM = msg(key, prev, 'to be removed')
21
- const UP = msg(key, prev, msgVal)
19
+ const msg = (...args) => format('%s is %s, expected %s', ...args)
20
+ const AD = msg(key, 'missing', msgVal)
21
+ const RM = msg(key, prev, 'to be removed')
22
+ const UP = msg(key, prev, msgVal)
22
23
 
23
- if (op === 'replace') {
24
- return value === DELETE ? RM : UP
25
- } else if (op === 'add' && value !== DELETE) {
26
- return AD
27
- }
28
- })
29
- .filter(Boolean)
30
- .sort((a, b) => a.localeCompare(b))
31
- .join('\n')
24
+ if (op === 'replace') {
25
+ return value === DELETE ? RM : UP
26
+ } else if (op === 'add' && value !== DELETE) {
27
+ return AD
28
+ }
29
+ })
30
+ .filter(Boolean)
31
+ .sort((a, b) => a.localeCompare(b))
32
+ .join('\n')
32
33
 
33
34
  module.exports = jsonDiff
package/lib/util/merge.js CHANGED
@@ -32,16 +32,17 @@ const mergeWith = (...args) => {
32
32
 
33
33
  // Create a merge function that will run a set of customizer functions
34
34
  const mergeWithCustomizers = (...customizers) => {
35
- return (...objects) => mergeWith({}, ...objects, (...args) => {
36
- for (const customizer of customizers) {
37
- const result = customizer(...args)
38
- // undefined means the customizer will defer to the next one
39
- // the default behavior of undefined in lodash is to merge
40
- if (result !== undefined) {
41
- return result
35
+ return (...objects) =>
36
+ mergeWith({}, ...objects, (...args) => {
37
+ for (const customizer of customizers) {
38
+ const result = customizer(...args)
39
+ // undefined means the customizer will defer to the next one
40
+ // the default behavior of undefined in lodash is to merge
41
+ if (result !== undefined) {
42
+ return result
43
+ }
42
44
  }
43
- }
44
- })
45
+ })
45
46
  }
46
47
 
47
48
  const customizers = {
@@ -52,14 +53,16 @@ const customizers = {
52
53
  }
53
54
  },
54
55
  // Merge arrays if their key matches one of the passed in keys
55
- mergeArrays: (...keys) => (value, srcValue, key) => {
56
- if (Array.isArray(srcValue)) {
57
- if (keys.includes(key)) {
58
- return (Array.isArray(value) ? value : []).concat(srcValue)
56
+ mergeArrays:
57
+ (...keys) =>
58
+ (value, srcValue, key) => {
59
+ if (Array.isArray(srcValue)) {
60
+ if (keys.includes(key)) {
61
+ return (Array.isArray(value) ? value : []).concat(srcValue)
62
+ }
63
+ return srcValue
59
64
  }
60
- return srcValue
61
- }
62
- },
65
+ },
63
66
  }
64
67
 
65
68
  module.exports = {
@@ -1,8 +1,12 @@
1
1
  const indent = (v, i = 2) => {
2
2
  if (Array.isArray(v)) {
3
- return v.map((a) => indent(a, i)).join('\n')
3
+ return v.map(a => indent(a, i)).join('\n')
4
4
  }
5
- return v.toString().split('\n').map((l) => ' '.repeat(i) + l).join('\n')
5
+ return v
6
+ .toString()
7
+ .split('\n')
8
+ .map(l => ' '.repeat(i) + l)
9
+ .join('\n')
6
10
  }
7
11
 
8
12
  const output = () => {
@@ -15,13 +19,12 @@ const output = () => {
15
19
  }
16
20
  }
17
21
 
18
- const outputProblems = (problems) => {
22
+ const outputProblems = problems => {
19
23
  const o = output()
20
24
  o.push('', 'Some problems were detected:')
21
25
  o.sep()
22
26
  for (const { title, body, solution } of problems) {
23
- const [solutionTitle, ...solutionRest] = Array.isArray(solution)
24
- ? solution : [solution]
27
+ const [solutionTitle, ...solutionRest] = Array.isArray(solution) ? solution : [solution]
25
28
  o.push(title, '', indent(body), '', `To correct it: ${solutionTitle}`)
26
29
  if (solutionRest.length) {
27
30
  o.push('', indent(solutionRest))