skuba 10.0.0-node-22-20250226231811 → 10.0.1-sub-fixes-20250314061613

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/lib/cli/configure/analyseDependencies.js +4 -1
  2. package/lib/cli/configure/analyseDependencies.js.map +2 -2
  3. package/lib/cli/lint/internal.js +2 -1
  4. package/lib/cli/lint/internal.js.map +2 -2
  5. package/lib/cli/lint/internalLints/detectBadCodeowners.d.ts +4 -0
  6. package/lib/cli/lint/internalLints/detectBadCodeowners.js +72 -0
  7. package/lib/cli/lint/internalLints/detectBadCodeowners.js.map +7 -0
  8. package/lib/cli/migrate/nodeVersion/checks.js +8 -1
  9. package/lib/cli/migrate/nodeVersion/checks.js.map +2 -2
  10. package/lib/cli/migrate/nodeVersion/getNodeTypesVersion.js +13 -33
  11. package/lib/cli/migrate/nodeVersion/getNodeTypesVersion.js.map +3 -3
  12. package/lib/cli/migrate/nodeVersion/index.js +4 -2
  13. package/lib/cli/migrate/nodeVersion/index.js.map +2 -2
  14. package/lib/utils/copy.d.ts +1 -1
  15. package/lib/utils/copy.js +11 -3
  16. package/lib/utils/copy.js.map +2 -2
  17. package/lib/utils/packageManager.d.ts +1 -0
  18. package/lib/utils/packageManager.js +9 -2
  19. package/lib/utils/packageManager.js.map +2 -2
  20. package/lib/utils/version.d.ts +17 -1
  21. package/lib/utils/version.js +48 -18
  22. package/lib/utils/version.js.map +3 -3
  23. package/package.json +6 -10
  24. package/template/express-rest-api/.gantry/common.yml +0 -1
  25. package/template/express-rest-api/Dockerfile.dev-deps +1 -1
  26. package/template/express-rest-api/package.json +2 -2
  27. package/template/greeter/Dockerfile +1 -1
  28. package/template/greeter/package.json +3 -3
  29. package/template/koa-rest-api/.gantry/common.yml +0 -1
  30. package/template/koa-rest-api/Dockerfile.dev-deps +1 -1
  31. package/template/koa-rest-api/package.json +2 -2
  32. package/template/lambda-sqs-worker-cdk/Dockerfile +1 -1
  33. package/template/lambda-sqs-worker-cdk/infra/appStack.ts +2 -2
  34. package/template/lambda-sqs-worker-cdk/package.json +5 -5
  35. package/template/oss-npm-package/_package.json +1 -1
  36. package/template/private-npm-package/_package.json +1 -1
@@ -53,7 +53,10 @@ const logDiff = (diff) => {
53
53
  const pinUnspecifiedVersions = async (dependencies) => {
54
54
  const updates = await Promise.all(
55
55
  Object.entries(dependencies).filter(([, version]) => version === "*").map(async ([name]) => {
56
- const version = await (name === "skuba" ? (0, import_version.getSkubaVersion)() : (0, import_version.latestNpmVersion)(name));
56
+ const version = await (name === "skuba" ? (0, import_version.getSkubaVersion)() : (0, import_version.getLatestNpmVersion)(name));
57
+ if (version === null) {
58
+ throw new Error(`Failed to fetch latest version of ${name}`);
59
+ }
57
60
  return [name, version];
58
61
  })
59
62
  );
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/cli/configure/analyseDependencies.ts"],
4
- "sourcesContent": ["import path from 'path';\n\nimport fs from 'fs-extra';\nimport type { NormalizedReadResult } from 'read-pkg-up';\n\nimport { type TextProcessor, copyFiles } from '../../utils/copy';\nimport { log } from '../../utils/logging';\nimport type { ProjectType } from '../../utils/manifest';\nimport { getSkubaVersion, latestNpmVersion } from '../../utils/version';\n\nimport { diffDependencies } from './analysis/package';\nimport * as dependencyMutators from './dependencies';\nimport { formatPackage } from './processing/package';\nimport type { DependencyDiff } from './types';\n\nconst logDiff = (diff: DependencyDiff): boolean => {\n const entries = Object.entries(diff);\n\n if (entries.length === 0) {\n log.ok('\u2714 No changes');\n\n return false;\n }\n\n Object.entries(diff)\n .sort(([nameA], [nameB]) => nameA.localeCompare(nameB))\n .forEach(([name, { operation, version }]) =>\n log.plain(operation, name, log.formatSubtle(version)),\n );\n\n return true;\n};\n\nconst pinUnspecifiedVersions = async (\n dependencies: Record<string, string>,\n): Promise<void> => {\n const updates = await Promise.all(\n Object.entries(dependencies)\n .filter(([, version]) => version === '*')\n .map(async ([name]) => {\n const version = await (name === 'skuba'\n ? getSkubaVersion()\n : latestNpmVersion(name));\n\n return [name, version] as const;\n }),\n );\n\n updates.forEach(([name, version]) => {\n dependencies[name] = version;\n });\n};\n\ninterface Props {\n destinationRoot: string;\n include: (pathname: string) => boolean;\n manifest: NormalizedReadResult;\n type: ProjectType;\n}\n\nexport const analyseDependencies = async ({\n destinationRoot,\n include,\n manifest: { packageJson },\n type,\n}: Props): Promise<undefined | (() => Promise<void>)> => {\n const input = {\n dependencies: packageJson.dependencies ?? {},\n devDependencies: packageJson.devDependencies ?? {},\n type,\n };\n\n const output = {\n dependencies: { ...input.dependencies },\n devDependencies: { ...input.devDependencies },\n type,\n };\n\n const processors = Object.values(dependencyMutators).reduce<TextProcessor[]>(\n (acc, mutate) => {\n const newProcessors = mutate(output);\n acc.push(...newProcessors);\n return acc;\n },\n [],\n );\n\n await Promise.all([\n pinUnspecifiedVersions(output.dependencies),\n pinUnspecifiedVersions(output.devDependencies),\n ]);\n\n const dependencyDiff = diffDependencies({\n old: input.dependencies,\n new: output.dependencies,\n });\n\n log.newline();\n log.plain(log.bold('Dependencies:'));\n\n log.newline();\n const hasDependencyDiff = logDiff(dependencyDiff);\n\n const devDependencyDiff = diffDependencies({\n old: input.devDependencies,\n new: output.devDependencies,\n });\n\n log.newline();\n log.plain(log.bold('Dev dependencies:'));\n\n log.newline();\n const hasDevDependencyDiff = logDiff(devDependencyDiff);\n\n const packageJsonFilepath = path.join(destinationRoot, 'package.json');\n\n if (!hasDependencyDiff && !hasDevDependencyDiff) {\n return;\n }\n\n return async () => {\n const updatedPackageJson = await formatPackage({\n ...packageJson,\n dependencies: output.dependencies,\n devDependencies: output.devDependencies,\n });\n\n await fs.promises.writeFile(packageJsonFilepath, updatedPackageJson);\n\n if (processors.length === 0) {\n return;\n }\n\n await copyFiles({\n sourceRoot: destinationRoot,\n destinationRoot,\n include,\n processors,\n });\n };\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAe;AAGf,kBAA8C;AAC9C,qBAAoB;AAEpB,qBAAkD;AAElD,qBAAiC;AACjC,yBAAoC;AACpC,IAAAA,kBAA8B;AAG9B,MAAM,UAAU,CAAC,SAAkC;AACjD,QAAM,UAAU,OAAO,QAAQ,IAAI;AAEnC,MAAI,QAAQ,WAAW,GAAG;AACxB,uBAAI,GAAG,mBAAc;AAErB,WAAO;AAAA,EACT;AAEA,SAAO,QAAQ,IAAI,EAChB,KAAK,CAAC,CAAC,KAAK,GAAG,CAAC,KAAK,MAAM,MAAM,cAAc,KAAK,CAAC,EACrD;AAAA,IAAQ,CAAC,CAAC,MAAM,EAAE,WAAW,QAAQ,CAAC,MACrC,mBAAI,MAAM,WAAW,MAAM,mBAAI,aAAa,OAAO,CAAC;AAAA,EACtD;AAEF,SAAO;AACT;AAEA,MAAM,yBAAyB,OAC7B,iBACkB;AAClB,QAAM,UAAU,MAAM,QAAQ;AAAA,IAC5B,OAAO,QAAQ,YAAY,EACxB,OAAO,CAAC,CAAC,EAAE,OAAO,MAAM,YAAY,GAAG,EACvC,IAAI,OAAO,CAAC,IAAI,MAAM;AACrB,YAAM,UAAU,OAAO,SAAS,cAC5B,gCAAgB,QAChB,iCAAiB,IAAI;AAEzB,aAAO,CAAC,MAAM,OAAO;AAAA,IACvB,CAAC;AAAA,EACL;AAEA,UAAQ,QAAQ,CAAC,CAAC,MAAM,OAAO,MAAM;AACnC,iBAAa,IAAI,IAAI;AAAA,EACvB,CAAC;AACH;AASO,MAAM,sBAAsB,OAAO;AAAA,EACxC;AAAA,EACA;AAAA,EACA,UAAU,EAAE,YAAY;AAAA,EACxB;AACF,MAAyD;AACvD,QAAM,QAAQ;AAAA,IACZ,cAAc,YAAY,gBAAgB,CAAC;AAAA,IAC3C,iBAAiB,YAAY,mBAAmB,CAAC;AAAA,IACjD;AAAA,EACF;AAEA,QAAM,SAAS;AAAA,IACb,cAAc,EAAE,GAAG,MAAM,aAAa;AAAA,IACtC,iBAAiB,EAAE,GAAG,MAAM,gBAAgB;AAAA,IAC5C;AAAA,EACF;AAEA,QAAM,aAAa,OAAO,OAAO,kBAAkB,EAAE;AAAA,IACnD,CAAC,KAAK,WAAW;AACf,YAAM,gBAAgB,OAAO,MAAM;AACnC,UAAI,KAAK,GAAG,aAAa;AACzB,aAAO;AAAA,IACT;AAAA,IACA,CAAC;AAAA,EACH;AAEA,QAAM,QAAQ,IAAI;AAAA,IAChB,uBAAuB,OAAO,YAAY;AAAA,IAC1C,uBAAuB,OAAO,eAAe;AAAA,EAC/C,CAAC;AAED,QAAM,qBAAiB,iCAAiB;AAAA,IACtC,KAAK,MAAM;AAAA,IACX,KAAK,OAAO;AAAA,EACd,CAAC;AAED,qBAAI,QAAQ;AACZ,qBAAI,MAAM,mBAAI,KAAK,eAAe,CAAC;AAEnC,qBAAI,QAAQ;AACZ,QAAM,oBAAoB,QAAQ,cAAc;AAEhD,QAAM,wBAAoB,iCAAiB;AAAA,IACzC,KAAK,MAAM;AAAA,IACX,KAAK,OAAO;AAAA,EACd,CAAC;AAED,qBAAI,QAAQ;AACZ,qBAAI,MAAM,mBAAI,KAAK,mBAAmB,CAAC;AAEvC,qBAAI,QAAQ;AACZ,QAAM,uBAAuB,QAAQ,iBAAiB;AAEtD,QAAM,sBAAsB,YAAAC,QAAK,KAAK,iBAAiB,cAAc;AAErE,MAAI,CAAC,qBAAqB,CAAC,sBAAsB;AAC/C;AAAA,EACF;AAEA,SAAO,YAAY;AACjB,UAAM,qBAAqB,UAAM,+BAAc;AAAA,MAC7C,GAAG;AAAA,MACH,cAAc,OAAO;AAAA,MACrB,iBAAiB,OAAO;AAAA,IAC1B,CAAC;AAED,UAAM,gBAAAC,QAAG,SAAS,UAAU,qBAAqB,kBAAkB;AAEnE,QAAI,WAAW,WAAW,GAAG;AAC3B;AAAA,IACF;AAEA,cAAM,uBAAU;AAAA,MACd,YAAY;AAAA,MACZ;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AACF;",
4
+ "sourcesContent": ["import path from 'path';\n\nimport fs from 'fs-extra';\nimport type { NormalizedReadResult } from 'read-pkg-up';\n\nimport { type TextProcessor, copyFiles } from '../../utils/copy';\nimport { log } from '../../utils/logging';\nimport type { ProjectType } from '../../utils/manifest';\nimport { getLatestNpmVersion, getSkubaVersion } from '../../utils/version';\n\nimport { diffDependencies } from './analysis/package';\nimport * as dependencyMutators from './dependencies';\nimport { formatPackage } from './processing/package';\nimport type { DependencyDiff } from './types';\n\nconst logDiff = (diff: DependencyDiff): boolean => {\n const entries = Object.entries(diff);\n\n if (entries.length === 0) {\n log.ok('\u2714 No changes');\n\n return false;\n }\n\n Object.entries(diff)\n .sort(([nameA], [nameB]) => nameA.localeCompare(nameB))\n .forEach(([name, { operation, version }]) =>\n log.plain(operation, name, log.formatSubtle(version)),\n );\n\n return true;\n};\n\nconst pinUnspecifiedVersions = async (\n dependencies: Record<string, string>,\n): Promise<void> => {\n const updates = await Promise.all(\n Object.entries(dependencies)\n .filter(([, version]) => version === '*')\n .map(async ([name]) => {\n const version = await (name === 'skuba'\n ? getSkubaVersion()\n : getLatestNpmVersion(name));\n\n if (version === null) {\n throw new Error(`Failed to fetch latest version of ${name}`);\n }\n\n return [name, version] as const;\n }),\n );\n\n updates.forEach(([name, version]) => {\n dependencies[name] = version;\n });\n};\n\ninterface Props {\n destinationRoot: string;\n include: (pathname: string) => boolean;\n manifest: NormalizedReadResult;\n type: ProjectType;\n}\n\nexport const analyseDependencies = async ({\n destinationRoot,\n include,\n manifest: { packageJson },\n type,\n}: Props): Promise<undefined | (() => Promise<void>)> => {\n const input = {\n dependencies: packageJson.dependencies ?? {},\n devDependencies: packageJson.devDependencies ?? {},\n type,\n };\n\n const output = {\n dependencies: { ...input.dependencies },\n devDependencies: { ...input.devDependencies },\n type,\n };\n\n const processors = Object.values(dependencyMutators).reduce<TextProcessor[]>(\n (acc, mutate) => {\n const newProcessors = mutate(output);\n acc.push(...newProcessors);\n return acc;\n },\n [],\n );\n\n await Promise.all([\n pinUnspecifiedVersions(output.dependencies),\n pinUnspecifiedVersions(output.devDependencies),\n ]);\n\n const dependencyDiff = diffDependencies({\n old: input.dependencies,\n new: output.dependencies,\n });\n\n log.newline();\n log.plain(log.bold('Dependencies:'));\n\n log.newline();\n const hasDependencyDiff = logDiff(dependencyDiff);\n\n const devDependencyDiff = diffDependencies({\n old: input.devDependencies,\n new: output.devDependencies,\n });\n\n log.newline();\n log.plain(log.bold('Dev dependencies:'));\n\n log.newline();\n const hasDevDependencyDiff = logDiff(devDependencyDiff);\n\n const packageJsonFilepath = path.join(destinationRoot, 'package.json');\n\n if (!hasDependencyDiff && !hasDevDependencyDiff) {\n return;\n }\n\n return async () => {\n const updatedPackageJson = await formatPackage({\n ...packageJson,\n dependencies: output.dependencies,\n devDependencies: output.devDependencies,\n });\n\n await fs.promises.writeFile(packageJsonFilepath, updatedPackageJson);\n\n if (processors.length === 0) {\n return;\n }\n\n await copyFiles({\n sourceRoot: destinationRoot,\n destinationRoot,\n include,\n processors,\n });\n };\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAe;AAGf,kBAA8C;AAC9C,qBAAoB;AAEpB,qBAAqD;AAErD,qBAAiC;AACjC,yBAAoC;AACpC,IAAAA,kBAA8B;AAG9B,MAAM,UAAU,CAAC,SAAkC;AACjD,QAAM,UAAU,OAAO,QAAQ,IAAI;AAEnC,MAAI,QAAQ,WAAW,GAAG;AACxB,uBAAI,GAAG,mBAAc;AAErB,WAAO;AAAA,EACT;AAEA,SAAO,QAAQ,IAAI,EAChB,KAAK,CAAC,CAAC,KAAK,GAAG,CAAC,KAAK,MAAM,MAAM,cAAc,KAAK,CAAC,EACrD;AAAA,IAAQ,CAAC,CAAC,MAAM,EAAE,WAAW,QAAQ,CAAC,MACrC,mBAAI,MAAM,WAAW,MAAM,mBAAI,aAAa,OAAO,CAAC;AAAA,EACtD;AAEF,SAAO;AACT;AAEA,MAAM,yBAAyB,OAC7B,iBACkB;AAClB,QAAM,UAAU,MAAM,QAAQ;AAAA,IAC5B,OAAO,QAAQ,YAAY,EACxB,OAAO,CAAC,CAAC,EAAE,OAAO,MAAM,YAAY,GAAG,EACvC,IAAI,OAAO,CAAC,IAAI,MAAM;AACrB,YAAM,UAAU,OAAO,SAAS,cAC5B,gCAAgB,QAChB,oCAAoB,IAAI;AAE5B,UAAI,YAAY,MAAM;AACpB,cAAM,IAAI,MAAM,qCAAqC,IAAI,EAAE;AAAA,MAC7D;AAEA,aAAO,CAAC,MAAM,OAAO;AAAA,IACvB,CAAC;AAAA,EACL;AAEA,UAAQ,QAAQ,CAAC,CAAC,MAAM,OAAO,MAAM;AACnC,iBAAa,IAAI,IAAI;AAAA,EACvB,CAAC;AACH;AASO,MAAM,sBAAsB,OAAO;AAAA,EACxC;AAAA,EACA;AAAA,EACA,UAAU,EAAE,YAAY;AAAA,EACxB;AACF,MAAyD;AACvD,QAAM,QAAQ;AAAA,IACZ,cAAc,YAAY,gBAAgB,CAAC;AAAA,IAC3C,iBAAiB,YAAY,mBAAmB,CAAC;AAAA,IACjD;AAAA,EACF;AAEA,QAAM,SAAS;AAAA,IACb,cAAc,EAAE,GAAG,MAAM,aAAa;AAAA,IACtC,iBAAiB,EAAE,GAAG,MAAM,gBAAgB;AAAA,IAC5C;AAAA,EACF;AAEA,QAAM,aAAa,OAAO,OAAO,kBAAkB,EAAE;AAAA,IACnD,CAAC,KAAK,WAAW;AACf,YAAM,gBAAgB,OAAO,MAAM;AACnC,UAAI,KAAK,GAAG,aAAa;AACzB,aAAO;AAAA,IACT;AAAA,IACA,CAAC;AAAA,EACH;AAEA,QAAM,QAAQ,IAAI;AAAA,IAChB,uBAAuB,OAAO,YAAY;AAAA,IAC1C,uBAAuB,OAAO,eAAe;AAAA,EAC/C,CAAC;AAED,QAAM,qBAAiB,iCAAiB;AAAA,IACtC,KAAK,MAAM;AAAA,IACX,KAAK,OAAO;AAAA,EACd,CAAC;AAED,qBAAI,QAAQ;AACZ,qBAAI,MAAM,mBAAI,KAAK,eAAe,CAAC;AAEnC,qBAAI,QAAQ;AACZ,QAAM,oBAAoB,QAAQ,cAAc;AAEhD,QAAM,wBAAoB,iCAAiB;AAAA,IACzC,KAAK,MAAM;AAAA,IACX,KAAK,OAAO;AAAA,EACd,CAAC;AAED,qBAAI,QAAQ;AACZ,qBAAI,MAAM,mBAAI,KAAK,mBAAmB,CAAC;AAEvC,qBAAI,QAAQ;AACZ,QAAM,uBAAuB,QAAQ,iBAAiB;AAEtD,QAAM,sBAAsB,YAAAC,QAAK,KAAK,iBAAiB,cAAc;AAErE,MAAI,CAAC,qBAAqB,CAAC,sBAAsB;AAC/C;AAAA,EACF;AAEA,SAAO,YAAY;AACjB,UAAM,qBAAqB,UAAM,+BAAc;AAAA,MAC7C,GAAG;AAAA,MACH,cAAc,OAAO;AAAA,MACrB,iBAAiB,OAAO;AAAA,IAC1B,CAAC;AAED,UAAM,gBAAAC,QAAG,SAAS,UAAU,qBAAqB,kBAAkB;AAEnE,QAAI,WAAW,WAAW,GAAG;AAC3B;AAAA,IACF;AAEA,cAAM,uBAAU;AAAA,MACd,YAAY;AAAA,MACZ;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AACF;",
6
6
  "names": ["import_package", "path", "fs"]
7
7
  }
@@ -34,12 +34,13 @@ module.exports = __toCommonJS(internal_exports);
34
34
  var import_util = require("util");
35
35
  var import_chalk = __toESM(require("chalk"));
36
36
  var import_logging = require("../../utils/logging");
37
+ var import_detectBadCodeowners = require("./internalLints/detectBadCodeowners");
37
38
  var import_noSkubaTemplateJs = require("./internalLints/noSkubaTemplateJs");
38
39
  var import_refreshConfigFiles = require("./internalLints/refreshConfigFiles");
39
40
  var import_upgrade = require("./internalLints/upgrade");
40
41
  const lints = [
41
42
  // Run upgradeSkuba before refreshConfigFiles for npmrc handling
42
- [import_upgrade.upgradeSkuba],
43
+ [import_upgrade.upgradeSkuba, import_detectBadCodeowners.tryDetectBadCodeowners],
43
44
  [import_noSkubaTemplateJs.noSkubaTemplateJs, import_refreshConfigFiles.tryRefreshConfigFiles]
44
45
  ];
45
46
  const lintSerially = async (mode, logger) => {
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../src/cli/lint/internal.ts"],
4
- "sourcesContent": ["import { inspect } from 'util';\n\nimport chalk from 'chalk';\n\nimport { type Logger, createLogger } from '../../utils/logging';\n\nimport { noSkubaTemplateJs } from './internalLints/noSkubaTemplateJs';\nimport { tryRefreshConfigFiles } from './internalLints/refreshConfigFiles';\nimport { upgradeSkuba } from './internalLints/upgrade';\nimport type { Input } from './types';\n\nexport type InternalLintResult = {\n ok: boolean;\n fixable: boolean;\n annotations?: Array<{\n start_line?: number;\n end_line?: number;\n path: string;\n message: string;\n }>;\n};\n\nconst lints: Array<\n Array<\n (mode: 'format' | 'lint', logger: Logger) => Promise<InternalLintResult>\n >\n> = [\n // Run upgradeSkuba before refreshConfigFiles for npmrc handling\n [upgradeSkuba],\n [noSkubaTemplateJs, tryRefreshConfigFiles],\n];\n\nconst lintSerially = async (mode: 'format' | 'lint', logger: Logger) => {\n const results: InternalLintResult[] = [];\n for (const lintGroup of lints) {\n for (const lint of lintGroup) {\n results.push(await lint(mode, logger));\n }\n }\n return results;\n};\n\nconst lintConcurrently = async (mode: 'format' | 'lint', logger: Logger) => {\n const results: InternalLintResult[] = [];\n\n for (const lintGroup of lints) {\n results.push(\n ...(await Promise.all(lintGroup.map((lint) => lint(mode, logger)))),\n );\n }\n\n return results;\n};\n\nconst selectLintFunction = (input?: Input) => {\n const isSerial = input?.debug || input?.serial;\n return isSerial ? lintSerially : lintConcurrently;\n};\n\nexport const internalLint = async (\n mode: 'format' | 'lint',\n input?: Input,\n): Promise<InternalLintResult> => {\n const start = process.hrtime.bigint();\n const logger = createLogger(\n input?.debug ?? false,\n ...(mode === 'lint' ? [chalk.blueBright('skuba \u2502')] : []),\n );\n\n try {\n const lint = selectLintFunction(input);\n const results = await lint(mode, logger);\n const result = combineResults(results);\n const end = process.hrtime.bigint();\n logger.plain(`Processed skuba lints in ${logger.timing(start, end)}.`);\n return result;\n } catch (err) {\n logger.err(logger.bold('Failed to run skuba lints.'));\n logger.subtle(inspect(err));\n\n process.exitCode = 1;\n\n return { ok: false, fixable: false, annotations: [] };\n }\n};\n\nconst combineResults = (results: InternalLintResult[]): InternalLintResult =>\n results.reduce(\n (cur, next) => ({\n ok: cur.ok && next.ok,\n fixable: cur.fixable || next.fixable,\n annotations: [...(cur.annotations ?? []), ...(next.annotations ?? [])],\n }),\n { ok: true, fixable: false },\n );\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,mBAAkB;AAElB,qBAA0C;AAE1C,+BAAkC;AAClC,gCAAsC;AACtC,qBAA6B;AAc7B,MAAM,QAIF;AAAA;AAAA,EAEF,CAAC,2BAAY;AAAA,EACb,CAAC,4CAAmB,+CAAqB;AAC3C;AAEA,MAAM,eAAe,OAAO,MAAyB,WAAmB;AACtE,QAAM,UAAgC,CAAC;AACvC,aAAW,aAAa,OAAO;AAC7B,eAAW,QAAQ,WAAW;AAC5B,cAAQ,KAAK,MAAM,KAAK,MAAM,MAAM,CAAC;AAAA,IACvC;AAAA,EACF;AACA,SAAO;AACT;AAEA,MAAM,mBAAmB,OAAO,MAAyB,WAAmB;AAC1E,QAAM,UAAgC,CAAC;AAEvC,aAAW,aAAa,OAAO;AAC7B,YAAQ;AAAA,MACN,GAAI,MAAM,QAAQ,IAAI,UAAU,IAAI,CAAC,SAAS,KAAK,MAAM,MAAM,CAAC,CAAC;AAAA,IACnE;AAAA,EACF;AAEA,SAAO;AACT;AAEA,MAAM,qBAAqB,CAAC,UAAkB;AAC5C,QAAM,WAAW,OAAO,SAAS,OAAO;AACxC,SAAO,WAAW,eAAe;AACnC;AAEO,MAAM,eAAe,OAC1B,MACA,UACgC;AAChC,QAAM,QAAQ,QAAQ,OAAO,OAAO;AACpC,QAAM,aAAS;AAAA,IACb,OAAO,SAAS;AAAA,IAChB,GAAI,SAAS,SAAS,CAAC,aAAAA,QAAM,WAAW,iBAAY,CAAC,IAAI,CAAC;AAAA,EAC5D;AAEA,MAAI;AACF,UAAM,OAAO,mBAAmB,KAAK;AACrC,UAAM,UAAU,MAAM,KAAK,MAAM,MAAM;AACvC,UAAM,SAAS,eAAe,OAAO;AACrC,UAAM,MAAM,QAAQ,OAAO,OAAO;AAClC,WAAO,MAAM,4BAA4B,OAAO,OAAO,OAAO,GAAG,CAAC,GAAG;AACrE,WAAO;AAAA,EACT,SAAS,KAAK;AACZ,WAAO,IAAI,OAAO,KAAK,4BAA4B,CAAC;AACpD,WAAO,WAAO,qBAAQ,GAAG,CAAC;AAE1B,YAAQ,WAAW;AAEnB,WAAO,EAAE,IAAI,OAAO,SAAS,OAAO,aAAa,CAAC,EAAE;AAAA,EACtD;AACF;AAEA,MAAM,iBAAiB,CAAC,YACtB,QAAQ;AAAA,EACN,CAAC,KAAK,UAAU;AAAA,IACd,IAAI,IAAI,MAAM,KAAK;AAAA,IACnB,SAAS,IAAI,WAAW,KAAK;AAAA,IAC7B,aAAa,CAAC,GAAI,IAAI,eAAe,CAAC,GAAI,GAAI,KAAK,eAAe,CAAC,CAAE;AAAA,EACvE;AAAA,EACA,EAAE,IAAI,MAAM,SAAS,MAAM;AAC7B;",
4
+ "sourcesContent": ["import { inspect } from 'util';\n\nimport chalk from 'chalk';\n\nimport { type Logger, createLogger } from '../../utils/logging';\n\nimport { tryDetectBadCodeowners } from './internalLints/detectBadCodeowners';\nimport { noSkubaTemplateJs } from './internalLints/noSkubaTemplateJs';\nimport { tryRefreshConfigFiles } from './internalLints/refreshConfigFiles';\nimport { upgradeSkuba } from './internalLints/upgrade';\nimport type { Input } from './types';\n\nexport type InternalLintResult = {\n ok: boolean;\n fixable: boolean;\n annotations?: Array<{\n start_line?: number;\n end_line?: number;\n path: string;\n message: string;\n }>;\n};\n\nconst lints: Array<\n Array<\n (mode: 'format' | 'lint', logger: Logger) => Promise<InternalLintResult>\n >\n> = [\n // Run upgradeSkuba before refreshConfigFiles for npmrc handling\n [upgradeSkuba, tryDetectBadCodeowners],\n [noSkubaTemplateJs, tryRefreshConfigFiles],\n];\n\nconst lintSerially = async (mode: 'format' | 'lint', logger: Logger) => {\n const results: InternalLintResult[] = [];\n for (const lintGroup of lints) {\n for (const lint of lintGroup) {\n results.push(await lint(mode, logger));\n }\n }\n return results;\n};\n\nconst lintConcurrently = async (mode: 'format' | 'lint', logger: Logger) => {\n const results: InternalLintResult[] = [];\n\n for (const lintGroup of lints) {\n results.push(\n ...(await Promise.all(lintGroup.map((lint) => lint(mode, logger)))),\n );\n }\n\n return results;\n};\n\nconst selectLintFunction = (input?: Input) => {\n const isSerial = input?.debug || input?.serial;\n return isSerial ? lintSerially : lintConcurrently;\n};\n\nexport const internalLint = async (\n mode: 'format' | 'lint',\n input?: Input,\n): Promise<InternalLintResult> => {\n const start = process.hrtime.bigint();\n const logger = createLogger(\n input?.debug ?? false,\n ...(mode === 'lint' ? [chalk.blueBright('skuba \u2502')] : []),\n );\n\n try {\n const lint = selectLintFunction(input);\n const results = await lint(mode, logger);\n const result = combineResults(results);\n const end = process.hrtime.bigint();\n logger.plain(`Processed skuba lints in ${logger.timing(start, end)}.`);\n return result;\n } catch (err) {\n logger.err(logger.bold('Failed to run skuba lints.'));\n logger.subtle(inspect(err));\n\n process.exitCode = 1;\n\n return { ok: false, fixable: false, annotations: [] };\n }\n};\n\nconst combineResults = (results: InternalLintResult[]): InternalLintResult =>\n results.reduce(\n (cur, next) => ({\n ok: cur.ok && next.ok,\n fixable: cur.fixable || next.fixable,\n annotations: [...(cur.annotations ?? []), ...(next.annotations ?? [])],\n }),\n { ok: true, fixable: false },\n );\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,mBAAkB;AAElB,qBAA0C;AAE1C,iCAAuC;AACvC,+BAAkC;AAClC,gCAAsC;AACtC,qBAA6B;AAc7B,MAAM,QAIF;AAAA;AAAA,EAEF,CAAC,6BAAc,iDAAsB;AAAA,EACrC,CAAC,4CAAmB,+CAAqB;AAC3C;AAEA,MAAM,eAAe,OAAO,MAAyB,WAAmB;AACtE,QAAM,UAAgC,CAAC;AACvC,aAAW,aAAa,OAAO;AAC7B,eAAW,QAAQ,WAAW;AAC5B,cAAQ,KAAK,MAAM,KAAK,MAAM,MAAM,CAAC;AAAA,IACvC;AAAA,EACF;AACA,SAAO;AACT;AAEA,MAAM,mBAAmB,OAAO,MAAyB,WAAmB;AAC1E,QAAM,UAAgC,CAAC;AAEvC,aAAW,aAAa,OAAO;AAC7B,YAAQ;AAAA,MACN,GAAI,MAAM,QAAQ,IAAI,UAAU,IAAI,CAAC,SAAS,KAAK,MAAM,MAAM,CAAC,CAAC;AAAA,IACnE;AAAA,EACF;AAEA,SAAO;AACT;AAEA,MAAM,qBAAqB,CAAC,UAAkB;AAC5C,QAAM,WAAW,OAAO,SAAS,OAAO;AACxC,SAAO,WAAW,eAAe;AACnC;AAEO,MAAM,eAAe,OAC1B,MACA,UACgC;AAChC,QAAM,QAAQ,QAAQ,OAAO,OAAO;AACpC,QAAM,aAAS;AAAA,IACb,OAAO,SAAS;AAAA,IAChB,GAAI,SAAS,SAAS,CAAC,aAAAA,QAAM,WAAW,iBAAY,CAAC,IAAI,CAAC;AAAA,EAC5D;AAEA,MAAI;AACF,UAAM,OAAO,mBAAmB,KAAK;AACrC,UAAM,UAAU,MAAM,KAAK,MAAM,MAAM;AACvC,UAAM,SAAS,eAAe,OAAO;AACrC,UAAM,MAAM,QAAQ,OAAO,OAAO;AAClC,WAAO,MAAM,4BAA4B,OAAO,OAAO,OAAO,GAAG,CAAC,GAAG;AACrE,WAAO;AAAA,EACT,SAAS,KAAK;AACZ,WAAO,IAAI,OAAO,KAAK,4BAA4B,CAAC;AACpD,WAAO,WAAO,qBAAQ,GAAG,CAAC;AAE1B,YAAQ,WAAW;AAEnB,WAAO,EAAE,IAAI,OAAO,SAAS,OAAO,aAAa,CAAC,EAAE;AAAA,EACtD;AACF;AAEA,MAAM,iBAAiB,CAAC,YACtB,QAAQ;AAAA,EACN,CAAC,KAAK,UAAU;AAAA,IACd,IAAI,IAAI,MAAM,KAAK;AAAA,IACnB,SAAS,IAAI,WAAW,KAAK;AAAA,IAC7B,aAAa,CAAC,GAAI,IAAI,eAAe,CAAC,GAAI,GAAI,KAAK,eAAe,CAAC,CAAE;AAAA,EACvE;AAAA,EACA,EAAE,IAAI,MAAM,SAAS,MAAM;AAC7B;",
6
6
  "names": ["chalk"]
7
7
  }
@@ -0,0 +1,4 @@
1
+ import type { Logger } from '../../../utils/logging';
2
+ import type { InternalLintResult } from '../internal';
3
+ export declare const detectBadCodeowners: () => Promise<InternalLintResult>;
4
+ export declare const tryDetectBadCodeowners: (_mode: "format" | "lint", logger: Logger) => Promise<InternalLintResult>;
@@ -0,0 +1,72 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+ var detectBadCodeowners_exports = {};
20
+ __export(detectBadCodeowners_exports, {
21
+ detectBadCodeowners: () => detectBadCodeowners,
22
+ tryDetectBadCodeowners: () => tryDetectBadCodeowners
23
+ });
24
+ module.exports = __toCommonJS(detectBadCodeowners_exports);
25
+ var import_util = require("util");
26
+ var import__ = require("../../..");
27
+ var import_project = require("../../configure/analysis/project");
28
+ const detectBadCodeowners = async () => {
29
+ const gitRoot = await import__.Git.findRoot({ dir: process.cwd() });
30
+ const reader = (0, import_project.createDestinationFileReader)(gitRoot ?? process.cwd());
31
+ const annotations = (await Promise.all(
32
+ [".github/CODEOWNERS", "CODEOWNERS", "docs/CODEOWNERS"].map(
33
+ async (filename) => {
34
+ const lines = (await reader(filename))?.split("\n");
35
+ if (lines?.some((line) => line.startsWith("- "))) {
36
+ return [
37
+ {
38
+ message: "CODEOWNERS file has a line starting with `- `. This is probably an autoformatting mistake, where your editor thinks this file is a markdown file and is trying to format a list item. Did you mean to use `*` instead?",
39
+ path: filename
40
+ }
41
+ ];
42
+ }
43
+ return [];
44
+ }
45
+ )
46
+ )).flat();
47
+ annotations.sort((a, b) => a.path.localeCompare(b.path));
48
+ return {
49
+ ok: annotations.length === 0,
50
+ fixable: false,
51
+ annotations
52
+ };
53
+ };
54
+ const tryDetectBadCodeowners = async (_mode, logger) => {
55
+ try {
56
+ return await detectBadCodeowners();
57
+ } catch (err) {
58
+ logger.warn("Failed to detect bad CODEOWNERS.");
59
+ logger.subtle((0, import_util.inspect)(err));
60
+ return {
61
+ ok: false,
62
+ fixable: false,
63
+ annotations: []
64
+ };
65
+ }
66
+ };
67
+ // Annotate the CommonJS export names for ESM import in node:
68
+ 0 && (module.exports = {
69
+ detectBadCodeowners,
70
+ tryDetectBadCodeowners
71
+ });
72
+ //# sourceMappingURL=detectBadCodeowners.js.map
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../../../src/cli/lint/internalLints/detectBadCodeowners.ts"],
4
+ "sourcesContent": ["import { inspect } from 'util';\n\nimport { Git } from '../../..';\nimport type { Logger } from '../../../utils/logging';\nimport { createDestinationFileReader } from '../../configure/analysis/project';\nimport type { InternalLintResult } from '../internal';\n\nexport const detectBadCodeowners = async (): Promise<InternalLintResult> => {\n const gitRoot = await Git.findRoot({ dir: process.cwd() });\n const reader = createDestinationFileReader(gitRoot ?? process.cwd());\n\n const annotations = (\n await Promise.all(\n ['.github/CODEOWNERS', 'CODEOWNERS', 'docs/CODEOWNERS'].map(\n async (filename) => {\n const lines = (await reader(filename))?.split('\\n');\n\n if (lines?.some((line) => line.startsWith('- '))) {\n return [\n {\n message:\n 'CODEOWNERS file has a line starting with `- `. This is probably an autoformatting mistake, where your editor thinks this file is a markdown file and is trying to format a list item. Did you mean to use `*` instead?',\n path: filename,\n },\n ];\n }\n\n return [];\n },\n ),\n )\n ).flat();\n\n // TODO: Use `toSorted` once we drop support for Node 18.\n annotations.sort((a, b) => a.path.localeCompare(b.path));\n\n return {\n ok: annotations.length === 0,\n fixable: false,\n annotations,\n };\n};\n\nexport const tryDetectBadCodeowners = async (\n _mode: 'format' | 'lint',\n logger: Logger,\n): Promise<InternalLintResult> => {\n try {\n return await detectBadCodeowners();\n } catch (err) {\n logger.warn('Failed to detect bad CODEOWNERS.');\n logger.subtle(inspect(err));\n\n return {\n ok: false,\n fixable: false,\n annotations: [],\n };\n }\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,eAAoB;AAEpB,qBAA4C;AAGrC,MAAM,sBAAsB,YAAyC;AAC1E,QAAM,UAAU,MAAM,aAAI,SAAS,EAAE,KAAK,QAAQ,IAAI,EAAE,CAAC;AACzD,QAAM,aAAS,4CAA4B,WAAW,QAAQ,IAAI,CAAC;AAEnE,QAAM,eACJ,MAAM,QAAQ;AAAA,IACZ,CAAC,sBAAsB,cAAc,iBAAiB,EAAE;AAAA,MACtD,OAAO,aAAa;AAClB,cAAM,SAAS,MAAM,OAAO,QAAQ,IAAI,MAAM,IAAI;AAElD,YAAI,OAAO,KAAK,CAAC,SAAS,KAAK,WAAW,IAAI,CAAC,GAAG;AAChD,iBAAO;AAAA,YACL;AAAA,cACE,SACE;AAAA,cACF,MAAM;AAAA,YACR;AAAA,UACF;AAAA,QACF;AAEA,eAAO,CAAC;AAAA,MACV;AAAA,IACF;AAAA,EACF,GACA,KAAK;AAGP,cAAY,KAAK,CAAC,GAAG,MAAM,EAAE,KAAK,cAAc,EAAE,IAAI,CAAC;AAEvD,SAAO;AAAA,IACL,IAAI,YAAY,WAAW;AAAA,IAC3B,SAAS;AAAA,IACT;AAAA,EACF;AACF;AAEO,MAAM,yBAAyB,OACpC,OACA,WACgC;AAChC,MAAI;AACF,WAAO,MAAM,oBAAoB;AAAA,EACnC,SAAS,KAAK;AACZ,WAAO,KAAK,kCAAkC;AAC9C,WAAO,WAAO,qBAAQ,GAAG,CAAC;AAE1B,WAAO;AAAA,MACL,IAAI;AAAA,MACJ,SAAS;AAAA,MACT,aAAa,CAAC;AAAA,IAChB;AAAA,EACF;AACF;",
6
+ "names": []
7
+ }
@@ -103,7 +103,8 @@ const isPatchableSkubaType = async () => {
103
103
  import_zod.z.object({
104
104
  skuba: import_zod.z.object({
105
105
  type: import_zod.z.string().optional()
106
- })
106
+ }),
107
+ files: import_zod.z.string().array().optional()
107
108
  })
108
109
  );
109
110
  if (!packageJson) {
@@ -111,6 +112,12 @@ const isPatchableSkubaType = async () => {
111
112
  "package.json not found, ensure it is in the correct location"
112
113
  );
113
114
  }
115
+ if (packageJson.files) {
116
+ import_logging.log.warn(
117
+ "Migrations are not supported for packages; update manually to ensure major runtime deprecations are intended"
118
+ );
119
+ return false;
120
+ }
114
121
  const type = packageJson?.skuba.type;
115
122
  if (!type) {
116
123
  import_logging.log.warn(
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../../src/cli/migrate/nodeVersion/checks.ts"],
4
- "sourcesContent": ["import findUp from 'find-up';\nimport fs from 'fs-extra';\nimport { coerce, lte, satisfies } from 'semver';\nimport { type ZodRawShape, z } from 'zod';\n\nimport { log } from '../../../utils/logging';\n\nconst getParentFile = async (file: string) => {\n const path = await findUp(file, { cwd: process.cwd() });\n if (!path) {\n return undefined;\n }\n return {\n fileContent: await fs.readFile(path, 'utf-8'),\n path,\n };\n};\n\nexport const extractFromParentPackageJson = async <T extends ZodRawShape>(\n schema: z.ZodObject<T>,\n) => {\n const file = await getParentFile('package.json');\n if (!file) {\n return { packageJson: undefined, packageJsonRelativePath: undefined };\n }\n const { fileContent: packageJson, path } = file;\n let rawJSON;\n try {\n rawJSON = JSON.parse(packageJson) as unknown;\n } catch {\n throw new Error(`${path} is not valid JSON`);\n }\n const result = schema.safeParse(rawJSON);\n if (!result.success) {\n return { packageJson: undefined, packageJsonRelativePath: path };\n }\n\n return { packageJson: result.data, packageJsonRelativePath: path };\n};\n\nexport const isPatchableServerlessVersion = async (): Promise<boolean> => {\n const { packageJson, packageJsonRelativePath } =\n await extractFromParentPackageJson(\n z.object({\n devDependencies: z.object({\n serverless: z.string().optional(),\n }),\n }),\n );\n if (!packageJson) {\n throw new Error(\n 'package.json not found, ensure it is in the correct location',\n );\n }\n\n const serverlessVersion = packageJson?.devDependencies.serverless;\n\n if (!serverlessVersion) {\n log.subtle(\n `Serverless version not found in ${packageJsonRelativePath}, assuming it is not a dependency`,\n );\n return true;\n }\n\n if (!satisfies(serverlessVersion, '4.x.x')) {\n log.warn(\n `Serverless version ${serverlessVersion} cannot be migrated; use Serverless 4.x to automatically migrate Serverless files`,\n );\n return false;\n }\n\n log.ok(\n `Proceeding with migration of Serverless version ${serverlessVersion}`,\n );\n return true;\n};\n\nexport const isPatchableSkubaType = async (): Promise<boolean> => {\n const { packageJson, packageJsonRelativePath } =\n await extractFromParentPackageJson(\n z.object({\n skuba: z.object({\n type: z.string().optional(),\n }),\n }),\n );\n\n if (!packageJson) {\n throw new Error(\n 'package.json not found, ensure it is in the correct location',\n );\n }\n\n const type = packageJson?.skuba.type;\n\n if (!type) {\n log.warn(\n `skuba project type not found in ${packageJsonRelativePath}; add a package.json#/skuba/type to ensure the correct migration can be applied`,\n );\n return false;\n }\n if (type === 'package') {\n log.warn(\n 'Migrations are not supported for packages; update manually to ensure major runtime deprecations are intended',\n );\n return false;\n }\n\n log.ok(`Proceeding with migration of skuba project type ${type}`);\n return true;\n};\n\nexport const isPatchableNodeVersion = async (\n targetNodeVersion: number,\n): Promise<boolean> => {\n const nvmrcFile = await getParentFile('.nvmrc');\n const nodeVersionFile = await getParentFile('.node-version');\n const { packageJson } = await extractFromParentPackageJson(\n z.object({\n engines: z.object({\n node: z.string(),\n }),\n }),\n );\n\n const nvmrcNodeVersion = nvmrcFile?.fileContent;\n const nodeVersion = nodeVersionFile?.fileContent;\n const engineVersion = packageJson?.engines.node;\n\n const currentNodeVersion = nvmrcNodeVersion || nodeVersion || engineVersion;\n\n const coercedTargetVersion = coerce(targetNodeVersion.toString())?.version;\n const coercedCurrentVersion = coerce(currentNodeVersion)?.version;\n\n const isNodeVersionValid =\n coercedTargetVersion &&\n coercedCurrentVersion &&\n lte(coercedCurrentVersion, coercedTargetVersion);\n\n if (!isNodeVersionValid) {\n log.warn(\n `Node.js version ${coercedCurrentVersion ?? 'unknown'} cannot be migrated to ${coercedTargetVersion}`,\n );\n return false;\n }\n\n log.ok(\n `Proceeding with migration from Node.js ${coercedCurrentVersion} to ${coercedTargetVersion}`,\n );\n return true;\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAmB;AACnB,sBAAe;AACf,oBAAuC;AACvC,iBAAoC;AAEpC,qBAAoB;AAEpB,MAAM,gBAAgB,OAAO,SAAiB;AAC5C,QAAM,OAAO,UAAM,eAAAA,SAAO,MAAM,EAAE,KAAK,QAAQ,IAAI,EAAE,CAAC;AACtD,MAAI,CAAC,MAAM;AACT,WAAO;AAAA,EACT;AACA,SAAO;AAAA,IACL,aAAa,MAAM,gBAAAC,QAAG,SAAS,MAAM,OAAO;AAAA,IAC5C;AAAA,EACF;AACF;AAEO,MAAM,+BAA+B,OAC1C,WACG;AACH,QAAM,OAAO,MAAM,cAAc,cAAc;AAC/C,MAAI,CAAC,MAAM;AACT,WAAO,EAAE,aAAa,QAAW,yBAAyB,OAAU;AAAA,EACtE;AACA,QAAM,EAAE,aAAa,aAAa,KAAK,IAAI;AAC3C,MAAI;AACJ,MAAI;AACF,cAAU,KAAK,MAAM,WAAW;AAAA,EAClC,QAAQ;AACN,UAAM,IAAI,MAAM,GAAG,IAAI,oBAAoB;AAAA,EAC7C;AACA,QAAM,SAAS,OAAO,UAAU,OAAO;AACvC,MAAI,CAAC,OAAO,SAAS;AACnB,WAAO,EAAE,aAAa,QAAW,yBAAyB,KAAK;AAAA,EACjE;AAEA,SAAO,EAAE,aAAa,OAAO,MAAM,yBAAyB,KAAK;AACnE;AAEO,MAAM,+BAA+B,YAA8B;AACxE,QAAM,EAAE,aAAa,wBAAwB,IAC3C,MAAM;AAAA,IACJ,aAAE,OAAO;AAAA,MACP,iBAAiB,aAAE,OAAO;AAAA,QACxB,YAAY,aAAE,OAAO,EAAE,SAAS;AAAA,MAClC,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF,MAAI,CAAC,aAAa;AAChB,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,oBAAoB,aAAa,gBAAgB;AAEvD,MAAI,CAAC,mBAAmB;AACtB,uBAAI;AAAA,MACF,mCAAmC,uBAAuB;AAAA,IAC5D;AACA,WAAO;AAAA,EACT;AAEA,MAAI,KAAC,yBAAU,mBAAmB,OAAO,GAAG;AAC1C,uBAAI;AAAA,MACF,sBAAsB,iBAAiB;AAAA,IACzC;AACA,WAAO;AAAA,EACT;AAEA,qBAAI;AAAA,IACF,mDAAmD,iBAAiB;AAAA,EACtE;AACA,SAAO;AACT;AAEO,MAAM,uBAAuB,YAA8B;AAChE,QAAM,EAAE,aAAa,wBAAwB,IAC3C,MAAM;AAAA,IACJ,aAAE,OAAO;AAAA,MACP,OAAO,aAAE,OAAO;AAAA,QACd,MAAM,aAAE,OAAO,EAAE,SAAS;AAAA,MAC5B,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAEF,MAAI,CAAC,aAAa;AAChB,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,OAAO,aAAa,MAAM;AAEhC,MAAI,CAAC,MAAM;AACT,uBAAI;AAAA,MACF,mCAAmC,uBAAuB;AAAA,IAC5D;AACA,WAAO;AAAA,EACT;AACA,MAAI,SAAS,WAAW;AACtB,uBAAI;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAEA,qBAAI,GAAG,mDAAmD,IAAI,EAAE;AAChE,SAAO;AACT;AAEO,MAAM,yBAAyB,OACpC,sBACqB;AACrB,QAAM,YAAY,MAAM,cAAc,QAAQ;AAC9C,QAAM,kBAAkB,MAAM,cAAc,eAAe;AAC3D,QAAM,EAAE,YAAY,IAAI,MAAM;AAAA,IAC5B,aAAE,OAAO;AAAA,MACP,SAAS,aAAE,OAAO;AAAA,QAChB,MAAM,aAAE,OAAO;AAAA,MACjB,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAEA,QAAM,mBAAmB,WAAW;AACpC,QAAM,cAAc,iBAAiB;AACrC,QAAM,gBAAgB,aAAa,QAAQ;AAE3C,QAAM,qBAAqB,oBAAoB,eAAe;AAE9D,QAAM,2BAAuB,sBAAO,kBAAkB,SAAS,CAAC,GAAG;AACnE,QAAM,4BAAwB,sBAAO,kBAAkB,GAAG;AAE1D,QAAM,qBACJ,wBACA,6BACA,mBAAI,uBAAuB,oBAAoB;AAEjD,MAAI,CAAC,oBAAoB;AACvB,uBAAI;AAAA,MACF,mBAAmB,yBAAyB,SAAS,0BAA0B,oBAAoB;AAAA,IACrG;AACA,WAAO;AAAA,EACT;AAEA,qBAAI;AAAA,IACF,0CAA0C,qBAAqB,OAAO,oBAAoB;AAAA,EAC5F;AACA,SAAO;AACT;",
4
+ "sourcesContent": ["import findUp from 'find-up';\nimport fs from 'fs-extra';\nimport { coerce, lte, satisfies } from 'semver';\nimport { type ZodRawShape, z } from 'zod';\n\nimport { log } from '../../../utils/logging';\n\nconst getParentFile = async (file: string) => {\n const path = await findUp(file, { cwd: process.cwd() });\n if (!path) {\n return undefined;\n }\n return {\n fileContent: await fs.readFile(path, 'utf-8'),\n path,\n };\n};\n\nexport const extractFromParentPackageJson = async <T extends ZodRawShape>(\n schema: z.ZodObject<T>,\n) => {\n const file = await getParentFile('package.json');\n if (!file) {\n return { packageJson: undefined, packageJsonRelativePath: undefined };\n }\n const { fileContent: packageJson, path } = file;\n let rawJSON;\n try {\n rawJSON = JSON.parse(packageJson) as unknown;\n } catch {\n throw new Error(`${path} is not valid JSON`);\n }\n const result = schema.safeParse(rawJSON);\n if (!result.success) {\n return { packageJson: undefined, packageJsonRelativePath: path };\n }\n\n return { packageJson: result.data, packageJsonRelativePath: path };\n};\n\nexport const isPatchableServerlessVersion = async (): Promise<boolean> => {\n const { packageJson, packageJsonRelativePath } =\n await extractFromParentPackageJson(\n z.object({\n devDependencies: z.object({\n serverless: z.string().optional(),\n }),\n }),\n );\n if (!packageJson) {\n throw new Error(\n 'package.json not found, ensure it is in the correct location',\n );\n }\n\n const serverlessVersion = packageJson?.devDependencies.serverless;\n\n if (!serverlessVersion) {\n log.subtle(\n `Serverless version not found in ${packageJsonRelativePath}, assuming it is not a dependency`,\n );\n return true;\n }\n\n if (!satisfies(serverlessVersion, '4.x.x')) {\n log.warn(\n `Serverless version ${serverlessVersion} cannot be migrated; use Serverless 4.x to automatically migrate Serverless files`,\n );\n return false;\n }\n\n log.ok(\n `Proceeding with migration of Serverless version ${serverlessVersion}`,\n );\n return true;\n};\n\nexport const isPatchableSkubaType = async (): Promise<boolean> => {\n const { packageJson, packageJsonRelativePath } =\n await extractFromParentPackageJson(\n z.object({\n skuba: z.object({\n type: z.string().optional(),\n }),\n files: z.string().array().optional(),\n }),\n );\n\n if (!packageJson) {\n throw new Error(\n 'package.json not found, ensure it is in the correct location',\n );\n }\n\n if (packageJson.files) {\n log.warn(\n 'Migrations are not supported for packages; update manually to ensure major runtime deprecations are intended',\n );\n return false;\n }\n\n const type = packageJson?.skuba.type;\n\n if (!type) {\n log.warn(\n `skuba project type not found in ${packageJsonRelativePath}; add a package.json#/skuba/type to ensure the correct migration can be applied`,\n );\n return false;\n }\n if (type === 'package') {\n log.warn(\n 'Migrations are not supported for packages; update manually to ensure major runtime deprecations are intended',\n );\n return false;\n }\n\n log.ok(`Proceeding with migration of skuba project type ${type}`);\n return true;\n};\n\nexport const isPatchableNodeVersion = async (\n targetNodeVersion: number,\n): Promise<boolean> => {\n const nvmrcFile = await getParentFile('.nvmrc');\n const nodeVersionFile = await getParentFile('.node-version');\n const { packageJson } = await extractFromParentPackageJson(\n z.object({\n engines: z.object({\n node: z.string(),\n }),\n }),\n );\n\n const nvmrcNodeVersion = nvmrcFile?.fileContent;\n const nodeVersion = nodeVersionFile?.fileContent;\n const engineVersion = packageJson?.engines.node;\n\n const currentNodeVersion = nvmrcNodeVersion || nodeVersion || engineVersion;\n\n const coercedTargetVersion = coerce(targetNodeVersion.toString())?.version;\n const coercedCurrentVersion = coerce(currentNodeVersion)?.version;\n\n const isNodeVersionValid =\n coercedTargetVersion &&\n coercedCurrentVersion &&\n lte(coercedCurrentVersion, coercedTargetVersion);\n\n if (!isNodeVersionValid) {\n log.warn(\n `Node.js version ${coercedCurrentVersion ?? 'unknown'} cannot be migrated to ${coercedTargetVersion}`,\n );\n return false;\n }\n\n log.ok(\n `Proceeding with migration from Node.js ${coercedCurrentVersion} to ${coercedTargetVersion}`,\n );\n return true;\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAmB;AACnB,sBAAe;AACf,oBAAuC;AACvC,iBAAoC;AAEpC,qBAAoB;AAEpB,MAAM,gBAAgB,OAAO,SAAiB;AAC5C,QAAM,OAAO,UAAM,eAAAA,SAAO,MAAM,EAAE,KAAK,QAAQ,IAAI,EAAE,CAAC;AACtD,MAAI,CAAC,MAAM;AACT,WAAO;AAAA,EACT;AACA,SAAO;AAAA,IACL,aAAa,MAAM,gBAAAC,QAAG,SAAS,MAAM,OAAO;AAAA,IAC5C;AAAA,EACF;AACF;AAEO,MAAM,+BAA+B,OAC1C,WACG;AACH,QAAM,OAAO,MAAM,cAAc,cAAc;AAC/C,MAAI,CAAC,MAAM;AACT,WAAO,EAAE,aAAa,QAAW,yBAAyB,OAAU;AAAA,EACtE;AACA,QAAM,EAAE,aAAa,aAAa,KAAK,IAAI;AAC3C,MAAI;AACJ,MAAI;AACF,cAAU,KAAK,MAAM,WAAW;AAAA,EAClC,QAAQ;AACN,UAAM,IAAI,MAAM,GAAG,IAAI,oBAAoB;AAAA,EAC7C;AACA,QAAM,SAAS,OAAO,UAAU,OAAO;AACvC,MAAI,CAAC,OAAO,SAAS;AACnB,WAAO,EAAE,aAAa,QAAW,yBAAyB,KAAK;AAAA,EACjE;AAEA,SAAO,EAAE,aAAa,OAAO,MAAM,yBAAyB,KAAK;AACnE;AAEO,MAAM,+BAA+B,YAA8B;AACxE,QAAM,EAAE,aAAa,wBAAwB,IAC3C,MAAM;AAAA,IACJ,aAAE,OAAO;AAAA,MACP,iBAAiB,aAAE,OAAO;AAAA,QACxB,YAAY,aAAE,OAAO,EAAE,SAAS;AAAA,MAClC,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF,MAAI,CAAC,aAAa;AAChB,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,QAAM,oBAAoB,aAAa,gBAAgB;AAEvD,MAAI,CAAC,mBAAmB;AACtB,uBAAI;AAAA,MACF,mCAAmC,uBAAuB;AAAA,IAC5D;AACA,WAAO;AAAA,EACT;AAEA,MAAI,KAAC,yBAAU,mBAAmB,OAAO,GAAG;AAC1C,uBAAI;AAAA,MACF,sBAAsB,iBAAiB;AAAA,IACzC;AACA,WAAO;AAAA,EACT;AAEA,qBAAI;AAAA,IACF,mDAAmD,iBAAiB;AAAA,EACtE;AACA,SAAO;AACT;AAEO,MAAM,uBAAuB,YAA8B;AAChE,QAAM,EAAE,aAAa,wBAAwB,IAC3C,MAAM;AAAA,IACJ,aAAE,OAAO;AAAA,MACP,OAAO,aAAE,OAAO;AAAA,QACd,MAAM,aAAE,OAAO,EAAE,SAAS;AAAA,MAC5B,CAAC;AAAA,MACD,OAAO,aAAE,OAAO,EAAE,MAAM,EAAE,SAAS;AAAA,IACrC,CAAC;AAAA,EACH;AAEF,MAAI,CAAC,aAAa;AAChB,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAEA,MAAI,YAAY,OAAO;AACrB,uBAAI;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAEA,QAAM,OAAO,aAAa,MAAM;AAEhC,MAAI,CAAC,MAAM;AACT,uBAAI;AAAA,MACF,mCAAmC,uBAAuB;AAAA,IAC5D;AACA,WAAO;AAAA,EACT;AACA,MAAI,SAAS,WAAW;AACtB,uBAAI;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAEA,qBAAI,GAAG,mDAAmD,IAAI,EAAE;AAChE,SAAO;AACT;AAEO,MAAM,yBAAyB,OACpC,sBACqB;AACrB,QAAM,YAAY,MAAM,cAAc,QAAQ;AAC9C,QAAM,kBAAkB,MAAM,cAAc,eAAe;AAC3D,QAAM,EAAE,YAAY,IAAI,MAAM;AAAA,IAC5B,aAAE,OAAO;AAAA,MACP,SAAS,aAAE,OAAO;AAAA,QAChB,MAAM,aAAE,OAAO;AAAA,MACjB,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAEA,QAAM,mBAAmB,WAAW;AACpC,QAAM,cAAc,iBAAiB;AACrC,QAAM,gBAAgB,aAAa,QAAQ;AAE3C,QAAM,qBAAqB,oBAAoB,eAAe;AAE9D,QAAM,2BAAuB,sBAAO,kBAAkB,SAAS,CAAC,GAAG;AACnE,QAAM,4BAAwB,sBAAO,kBAAkB,GAAG;AAE1D,QAAM,qBACJ,wBACA,6BACA,mBAAI,uBAAuB,oBAAoB;AAEjD,MAAI,CAAC,oBAAoB;AACvB,uBAAI;AAAA,MACF,mBAAmB,yBAAyB,SAAS,0BAA0B,oBAAoB;AAAA,IACrG;AACA,WAAO;AAAA,EACT;AAEA,qBAAI;AAAA,IACF,0CAA0C,qBAAqB,OAAO,oBAAoB;AAAA,EAC5F;AACA,SAAO;AACT;",
6
6
  "names": ["findUp", "fs"]
7
7
  }
@@ -1,9 +1,7 @@
1
1
  "use strict";
2
- var __create = Object.create;
3
2
  var __defProp = Object.defineProperty;
4
3
  var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
4
  var __getOwnPropNames = Object.getOwnPropertyNames;
6
- var __getProtoOf = Object.getPrototypeOf;
7
5
  var __hasOwnProp = Object.prototype.hasOwnProperty;
8
6
  var __export = (target, all) => {
9
7
  for (var name in all)
@@ -17,14 +15,6 @@ var __copyProps = (to, from, except, desc) => {
17
15
  }
18
16
  return to;
19
17
  };
20
- var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
- // If the importer is in node compatibility mode or this is not an ESM
22
- // file that has been converted to a CommonJS file using a Babel-
23
- // compatible transform (i.e. "__esModule" has not been set), then set
24
- // "default" to the CommonJS "module.exports" for node compatibility.
25
- isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
- mod
27
- ));
28
18
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
19
  var getNodeTypesVersion_exports = {};
30
20
  __export(getNodeTypesVersion_exports, {
@@ -32,34 +22,24 @@ __export(getNodeTypesVersion_exports, {
32
22
  });
33
23
  module.exports = __toCommonJS(getNodeTypesVersion_exports);
34
24
  var import_util = require("util");
35
- var import_npm_registry_fetch = __toESM(require("npm-registry-fetch"));
36
25
  var import_semver = require("semver");
37
- var import_zod = require("zod");
38
26
  var import_logging = require("../../../utils/logging");
39
- const NpmFetchResponse = import_zod.z.object({
40
- versions: import_zod.z.record(
41
- import_zod.z.string(),
42
- import_zod.z.object({
43
- name: import_zod.z.string(),
44
- version: import_zod.z.string(),
45
- deprecated: import_zod.z.string().optional()
46
- })
47
- )
48
- });
27
+ var import_version = require("../../../utils/version");
49
28
  const getNodeTypesVersion = async (major, defaultVersion) => {
50
29
  try {
51
- const response = await import_npm_registry_fetch.default.json("@types/node", {
52
- headers: {
53
- Accept: "application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*"
54
- }
55
- });
56
- const parsedVersions = NpmFetchResponse.safeParse(response);
57
- if (!parsedVersions.success) {
58
- throw new Error("Failed to parse @types/node response from npm");
59
- }
60
- const { version } = Object.values(parsedVersions.data.versions).filter(
30
+ const versions = await (0, import_version.getNpmVersions)("@types/node");
31
+ const matchingVersions = Object.values(versions ?? {}).filter(
61
32
  (v) => (0, import_semver.valid)(v.version) && (0, import_semver.satisfies)(v.version, `${major}.x.x`) && !v.deprecated
62
- ).reduce((a, b) => (0, import_semver.gt)(a.version, b.version) ? a : b);
33
+ );
34
+ if (!matchingVersions.length) {
35
+ return {
36
+ version: defaultVersion,
37
+ err: `No matching @types/node versions for Node.js ${major}`
38
+ };
39
+ }
40
+ const { version } = matchingVersions.reduce(
41
+ (a, b) => (0, import_semver.gt)(a.version, b.version) ? a : b
42
+ );
63
43
  return {
64
44
  version
65
45
  };
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../../src/cli/migrate/nodeVersion/getNodeTypesVersion.ts"],
4
- "sourcesContent": ["import { inspect } from 'util';\n\nimport npmFetch from 'npm-registry-fetch';\nimport { gt, satisfies, valid } from 'semver';\nimport { z } from 'zod';\n\nimport { log } from '../../../utils/logging';\n\ntype VersionResult = {\n version: string;\n err?: string;\n};\n\nconst NpmFetchResponse = z.object({\n versions: z.record(\n z.string(),\n z.object({\n name: z.string(),\n version: z.string(),\n deprecated: z.string().optional(),\n }),\n ),\n});\n\nexport const getNodeTypesVersion = async (\n major: number,\n defaultVersion: string,\n): Promise<VersionResult> => {\n try {\n const response = await npmFetch.json('@types/node', {\n headers: {\n Accept:\n 'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*',\n },\n });\n\n const parsedVersions = NpmFetchResponse.safeParse(response);\n if (!parsedVersions.success) {\n throw new Error('Failed to parse @types/node response from npm');\n }\n\n const { version } = Object.values(parsedVersions.data.versions)\n .filter(\n (v) =>\n valid(v.version) &&\n satisfies(v.version, `${major}.x.x`) &&\n !v.deprecated,\n )\n .reduce((a, b) => (gt(a.version, b.version) ? a : b));\n\n return {\n version,\n };\n } catch (err) {\n log.subtle(inspect(err));\n return {\n version: defaultVersion,\n err: `Failed to fetch latest @types/node version, using fallback version ${defaultVersion}`,\n };\n }\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,gCAAqB;AACrB,oBAAqC;AACrC,iBAAkB;AAElB,qBAAoB;AAOpB,MAAM,mBAAmB,aAAE,OAAO;AAAA,EAChC,UAAU,aAAE;AAAA,IACV,aAAE,OAAO;AAAA,IACT,aAAE,OAAO;AAAA,MACP,MAAM,aAAE,OAAO;AAAA,MACf,SAAS,aAAE,OAAO;AAAA,MAClB,YAAY,aAAE,OAAO,EAAE,SAAS;AAAA,IAClC,CAAC;AAAA,EACH;AACF,CAAC;AAEM,MAAM,sBAAsB,OACjC,OACA,mBAC2B;AAC3B,MAAI;AACF,UAAM,WAAW,MAAM,0BAAAA,QAAS,KAAK,eAAe;AAAA,MAClD,SAAS;AAAA,QACP,QACE;AAAA,MACJ;AAAA,IACF,CAAC;AAED,UAAM,iBAAiB,iBAAiB,UAAU,QAAQ;AAC1D,QAAI,CAAC,eAAe,SAAS;AAC3B,YAAM,IAAI,MAAM,+CAA+C;AAAA,IACjE;AAEA,UAAM,EAAE,QAAQ,IAAI,OAAO,OAAO,eAAe,KAAK,QAAQ,EAC3D;AAAA,MACC,CAAC,UACC,qBAAM,EAAE,OAAO,SACf,yBAAU,EAAE,SAAS,GAAG,KAAK,MAAM,KACnC,CAAC,EAAE;AAAA,IACP,EACC,OAAO,CAAC,GAAG,UAAO,kBAAG,EAAE,SAAS,EAAE,OAAO,IAAI,IAAI,CAAE;AAEtD,WAAO;AAAA,MACL;AAAA,IACF;AAAA,EACF,SAAS,KAAK;AACZ,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,WAAO;AAAA,MACL,SAAS;AAAA,MACT,KAAK,sEAAsE,cAAc;AAAA,IAC3F;AAAA,EACF;AACF;",
6
- "names": ["npmFetch"]
4
+ "sourcesContent": ["import { inspect } from 'util';\n\nimport { gt, satisfies, valid } from 'semver';\n\nimport { log } from '../../../utils/logging';\nimport { getNpmVersions } from '../../../utils/version';\n\ntype VersionResult = {\n version: string;\n err?: string;\n};\n\nexport const getNodeTypesVersion = async (\n major: number,\n defaultVersion: string,\n): Promise<VersionResult> => {\n try {\n const versions = await getNpmVersions('@types/node');\n\n const matchingVersions = Object.values(versions ?? {}).filter(\n (v) =>\n valid(v.version) &&\n satisfies(v.version, `${major}.x.x`) &&\n !v.deprecated,\n );\n\n if (!matchingVersions.length) {\n return {\n version: defaultVersion,\n err: `No matching @types/node versions for Node.js ${major}`,\n };\n }\n\n const { version } = matchingVersions.reduce((a, b) =>\n gt(a.version, b.version) ? a : b,\n );\n\n return {\n version,\n };\n } catch (err) {\n log.subtle(inspect(err));\n return {\n version: defaultVersion,\n err: `Failed to fetch latest @types/node version, using fallback version ${defaultVersion}`,\n };\n }\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,oBAAqC;AAErC,qBAAoB;AACpB,qBAA+B;AAOxB,MAAM,sBAAsB,OACjC,OACA,mBAC2B;AAC3B,MAAI;AACF,UAAM,WAAW,UAAM,+BAAe,aAAa;AAEnD,UAAM,mBAAmB,OAAO,OAAO,YAAY,CAAC,CAAC,EAAE;AAAA,MACrD,CAAC,UACC,qBAAM,EAAE,OAAO,SACf,yBAAU,EAAE,SAAS,GAAG,KAAK,MAAM,KACnC,CAAC,EAAE;AAAA,IACP;AAEA,QAAI,CAAC,iBAAiB,QAAQ;AAC5B,aAAO;AAAA,QACL,SAAS;AAAA,QACT,KAAK,gDAAgD,KAAK;AAAA,MAC5D;AAAA,IACF;AAEA,UAAM,EAAE,QAAQ,IAAI,iBAAiB;AAAA,MAAO,CAAC,GAAG,UAC9C,kBAAG,EAAE,SAAS,EAAE,OAAO,IAAI,IAAI;AAAA,IACjC;AAEA,WAAO;AAAA,MACL;AAAA,IACF;AAAA,EACF,SAAS,KAAK;AACZ,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,WAAO;AAAA,MACL,SAAS;AAAA,MACT,KAAK,sEAAsE,cAAc;AAAA,IAC3F;AAAA,EACF;AACF;",
6
+ "names": []
7
7
  }
@@ -35,6 +35,7 @@ var import_util = require("util");
35
35
  var import_fast_glob = require("fast-glob");
36
36
  var import_fs_extra = __toESM(require("fs-extra"));
37
37
  var import_logging = require("../../../utils/logging");
38
+ var import_packageManager = require("../../../utils/packageManager");
38
39
  var import_project = require("../../configure/analysis/project");
39
40
  var import_checks = require("./checks");
40
41
  var import_getNodeTypesVersion = require("./getNodeTypesVersion");
@@ -89,13 +90,13 @@ const subPatches = ({
89
90
  },
90
91
  {
91
92
  files: "**/package.json",
92
- regex: /(\\?"@types\/node\\?": \\?")(\^)?[0-9.]+(\\?(",?)\\?n?)/gm,
93
+ regex: /("@types\/node":\s*")(\^)?(\d+\.\d+\.\d+)(")/gm,
93
94
  tests: [import_checks.isPatchableServerlessVersion],
94
95
  replace: `$1$2${nodeTypesVersion}$4`
95
96
  },
96
97
  {
97
98
  files: "**/package.json",
98
- regex: /(\\?"engines\\?":\s*{\\?n?[^}]*\\?"node\\?":\s*\\?">=)(\d+(?:\.\d+)*)\\?("[^}]*})(?![^}]*\\?"skuba\\?":\s*{\\?n?[^}]*\\?"type\\?":\s*\\?"package\\?")/gm,
99
+ regex: /(["']engines["']:\s*{[\s\S]*?["']node["']:\s*["']>=)(\d+(?:\.\d+)*)(['"]\s*})/gm,
99
100
  tests: [import_checks.isPatchableServerlessVersion, import_checks.isPatchableSkubaType],
100
101
  replace: `$1${nodeVersion}$3`
101
102
  },
@@ -179,6 +180,7 @@ const nodeVersionMigration = async ({
179
180
  import_logging.log.warn(err);
180
181
  }
181
182
  await upgrade({ nodeVersion, nodeTypesVersion, ECMAScriptVersion }, dir);
183
+ await (0, import_packageManager.relock)(dir);
182
184
  import_logging.log.ok("Upgraded to Node.js", nodeVersion);
183
185
  } catch (error) {
184
186
  import_logging.log.err("Failed to upgrade");
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../../../src/cli/migrate/nodeVersion/index.ts"],
4
- "sourcesContent": ["import { inspect } from 'util';\n\nimport { glob } from 'fast-glob';\nimport fs from 'fs-extra';\n\nimport { log } from '../../../utils/logging';\nimport { createDestinationFileReader } from '../../configure/analysis/project';\n\nimport {\n isPatchableNodeVersion,\n isPatchableServerlessVersion,\n isPatchableSkubaType,\n} from './checks';\nimport { getNodeTypesVersion } from './getNodeTypesVersion';\n\ntype FileSelector =\n | { files: string; file?: never }\n | { file: string; files?: never };\n\ntype SubPatch = FileSelector & {\n tests?: Array<() => Promise<boolean>>;\n regex?: RegExp;\n replace: string;\n};\n\nconst subPatches = ({\n nodeVersion,\n nodeTypesVersion,\n ECMAScriptVersion,\n}: Versions): SubPatch[] => [\n { file: '.nvmrc', replace: `${nodeVersion}\\n` },\n {\n files: '**/Dockerfile*',\n\n regex:\n /^FROM(.*) (public.ecr.aws\\/docker\\/library\\/)?node:([0-9]+(?:\\.[0-9]+(?:\\.[0-9]+)?)?)(-[a-z0-9]+)?(@sha256:[a-f0-9]{64})?( .*)?$/gm,\n replace: `FROM$1 $2node:${nodeVersion}$4$6`,\n },\n {\n files: '**/Dockerfile*',\n regex:\n /^FROM(.*) gcr.io\\/distroless\\/nodejs\\d+-debian(\\d+)(@sha256:[a-f0-9]{64})?(\\.[^- \\n]+)?(-[^ \\n]+)?( .+|)$/gm,\n replace: `FROM$1 gcr.io/distroless/nodejs${nodeVersion}-debian$2$4$5$6`,\n },\n\n {\n files: '**/serverless*.y*ml',\n regex: /\\bnodejs\\d+.x\\b/gm,\n tests: [isPatchableServerlessVersion],\n replace: `nodejs${nodeVersion}.x`,\n },\n {\n files: '**/serverless*.y*ml',\n regex: /\\bnode\\d+\\b/gm,\n tests: [isPatchableServerlessVersion],\n replace: `node${nodeVersion}`,\n },\n\n {\n files: '**/infra/**/*.ts',\n regex: /NODEJS_\\d+_X/g,\n replace: `NODEJS_${nodeVersion}_X`,\n },\n {\n files: '**/infra/**/*.ts',\n regex: /(target:\\s*'node)(\\d+)(.+)$/gm,\n replace: `$1${nodeVersion}$3`,\n },\n\n {\n files: '**/.buildkite/*',\n regex:\n /(image: )(public.ecr.aws\\/docker\\/library\\/)?(node:)[0-9.]+(\\.[^- \\n]+)?(-[^ \\n]+)?$/gm,\n replace: `$1$2$3${nodeVersion}$5`,\n },\n {\n files: '.node-version*',\n regex: /(\\d+(?:\\.\\d+)*)/g,\n replace: `${nodeVersion}`,\n },\n\n {\n files: '**/package.json',\n regex: /(\\\\?\"@types\\/node\\\\?\": \\\\?\")(\\^)?[0-9.]+(\\\\?(\",?)\\\\?n?)/gm,\n tests: [isPatchableServerlessVersion],\n replace: `$1$2${nodeTypesVersion}$4`,\n },\n {\n files: '**/package.json',\n regex:\n /(\\\\?\"engines\\\\?\":\\s*{\\\\?n?[^}]*\\\\?\"node\\\\?\":\\s*\\\\?\">=)(\\d+(?:\\.\\d+)*)\\\\?(\"[^}]*})(?![^}]*\\\\?\"skuba\\\\?\":\\s*{\\\\?n?[^}]*\\\\?\"type\\\\?\":\\s*\\\\?\"package\\\\?\")/gm,\n tests: [isPatchableServerlessVersion, isPatchableSkubaType],\n replace: `$1${nodeVersion}$3`,\n },\n\n {\n files: '**/tsconfig*.json',\n regex: /(\"target\":\\s*\")(ES\\d+)\"/gim,\n tests: [isPatchableServerlessVersion, isPatchableSkubaType],\n replace: `$1${ECMAScriptVersion}\"`,\n },\n {\n files: '**/tsconfig*.json',\n regex: /(\"lib\":\\s*\\[)([\\S\\s]*?)(ES\\d+)([\\S\\s]*?)(\\])/gim,\n tests: [isPatchableServerlessVersion, isPatchableSkubaType],\n replace: `$1$2${ECMAScriptVersion}$4$5`,\n },\n\n {\n files: '**/docker-compose*.y*ml',\n regex:\n /(image: )(public.ecr.aws\\/docker\\/library\\/)?(node:)[0-9.]+(\\.[^- \\n]+)?(-[^ \\n]+)?$/gm,\n\n replace: `$1$2$3${nodeVersion}$5`,\n },\n];\n\ntype Versions = {\n nodeVersion: number;\n nodeTypesVersion: string;\n ECMAScriptVersion: string;\n};\n\nconst runSubPatch = async (dir: string, patch: SubPatch) => {\n const readFile = createDestinationFileReader(dir);\n const paths = patch.file\n ? [patch.file]\n : await glob(patch.files ?? [], { cwd: dir });\n\n await Promise.all(\n paths.map(async (path) => {\n if (path.includes('node_modules')) {\n return;\n }\n const contents = await readFile(path);\n if (!contents) {\n return;\n }\n\n if (patch.regex && !patch.regex.test(contents)) {\n return;\n }\n\n if (patch.tests) {\n const results = await Promise.all(patch.tests.map((test) => test()));\n if (!results.every(Boolean)) {\n return;\n }\n }\n\n await writePatchedContents({\n path,\n contents,\n templated: patch.replace,\n regex: patch.regex,\n });\n }),\n );\n};\n\nconst writePatchedContents = async ({\n path,\n contents,\n templated,\n regex,\n}: {\n path: string;\n contents: string;\n templated: string;\n regex?: RegExp;\n}) =>\n await fs.promises.writeFile(\n path,\n regex ? contents.replaceAll(regex, templated) : templated,\n );\n\nconst upgrade = async (versions: Versions, dir: string) => {\n for (const subPatch of subPatches(versions)) {\n await runSubPatch(dir, subPatch);\n }\n};\n\nexport const nodeVersionMigration = async (\n {\n nodeVersion,\n ECMAScriptVersion,\n defaultNodeTypesVersion,\n }: {\n nodeVersion: number;\n ECMAScriptVersion: string;\n defaultNodeTypesVersion: string;\n },\n dir = process.cwd(),\n) => {\n log.ok(`Upgrading to Node.js ${nodeVersion}`);\n try {\n if (!(await isPatchableNodeVersion(nodeVersion))) {\n throw new Error('Node.js version is not patchable');\n }\n\n const { version: nodeTypesVersion, err } = await getNodeTypesVersion(\n nodeVersion,\n defaultNodeTypesVersion,\n );\n if (err) {\n log.warn(err);\n }\n await upgrade({ nodeVersion, nodeTypesVersion, ECMAScriptVersion }, dir);\n log.ok('Upgraded to Node.js', nodeVersion);\n } catch (error) {\n log.err('Failed to upgrade');\n log.subtle(inspect(error));\n process.exitCode = 1;\n }\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,uBAAqB;AACrB,sBAAe;AAEf,qBAAoB;AACpB,qBAA4C;AAE5C,oBAIO;AACP,iCAAoC;AAYpC,MAAM,aAAa,CAAC;AAAA,EAClB;AAAA,EACA;AAAA,EACA;AACF,MAA4B;AAAA,EAC1B,EAAE,MAAM,UAAU,SAAS,GAAG,WAAW;AAAA,EAAK;AAAA,EAC9C;AAAA,IACE,OAAO;AAAA,IAEP,OACE;AAAA,IACF,SAAS,iBAAiB,WAAW;AAAA,EACvC;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OACE;AAAA,IACF,SAAS,kCAAkC,WAAW;AAAA,EACxD;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO,CAAC,0CAA4B;AAAA,IACpC,SAAS,SAAS,WAAW;AAAA,EAC/B;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO,CAAC,0CAA4B;AAAA,IACpC,SAAS,OAAO,WAAW;AAAA,EAC7B;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,SAAS,UAAU,WAAW;AAAA,EAChC;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,SAAS,KAAK,WAAW;AAAA,EAC3B;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OACE;AAAA,IACF,SAAS,SAAS,WAAW;AAAA,EAC/B;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,SAAS,GAAG,WAAW;AAAA,EACzB;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO,CAAC,0CAA4B;AAAA,IACpC,SAAS,OAAO,gBAAgB;AAAA,EAClC;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OACE;AAAA,IACF,OAAO,CAAC,4CAA8B,kCAAoB;AAAA,IAC1D,SAAS,KAAK,WAAW;AAAA,EAC3B;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO,CAAC,4CAA8B,kCAAoB;AAAA,IAC1D,SAAS,KAAK,iBAAiB;AAAA,EACjC;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO,CAAC,4CAA8B,kCAAoB;AAAA,IAC1D,SAAS,OAAO,iBAAiB;AAAA,EACnC;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OACE;AAAA,IAEF,SAAS,SAAS,WAAW;AAAA,EAC/B;AACF;AAQA,MAAM,cAAc,OAAO,KAAa,UAAoB;AAC1D,QAAM,eAAW,4CAA4B,GAAG;AAChD,QAAM,QAAQ,MAAM,OAChB,CAAC,MAAM,IAAI,IACX,UAAM,uBAAK,MAAM,SAAS,CAAC,GAAG,EAAE,KAAK,IAAI,CAAC;AAE9C,QAAM,QAAQ;AAAA,IACZ,MAAM,IAAI,OAAO,SAAS;AACxB,UAAI,KAAK,SAAS,cAAc,GAAG;AACjC;AAAA,MACF;AACA,YAAM,WAAW,MAAM,SAAS,IAAI;AACpC,UAAI,CAAC,UAAU;AACb;AAAA,MACF;AAEA,UAAI,MAAM,SAAS,CAAC,MAAM,MAAM,KAAK,QAAQ,GAAG;AAC9C;AAAA,MACF;AAEA,UAAI,MAAM,OAAO;AACf,cAAM,UAAU,MAAM,QAAQ,IAAI,MAAM,MAAM,IAAI,CAAC,SAAS,KAAK,CAAC,CAAC;AACnE,YAAI,CAAC,QAAQ,MAAM,OAAO,GAAG;AAC3B;AAAA,QACF;AAAA,MACF;AAEA,YAAM,qBAAqB;AAAA,QACzB;AAAA,QACA;AAAA,QACA,WAAW,MAAM;AAAA,QACjB,OAAO,MAAM;AAAA,MACf,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF;AAEA,MAAM,uBAAuB,OAAO;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAME,MAAM,gBAAAA,QAAG,SAAS;AAAA,EAChB;AAAA,EACA,QAAQ,SAAS,WAAW,OAAO,SAAS,IAAI;AAClD;AAEF,MAAM,UAAU,OAAO,UAAoB,QAAgB;AACzD,aAAW,YAAY,WAAW,QAAQ,GAAG;AAC3C,UAAM,YAAY,KAAK,QAAQ;AAAA,EACjC;AACF;AAEO,MAAM,uBAAuB,OAClC;AAAA,EACE;AAAA,EACA;AAAA,EACA;AACF,GAKA,MAAM,QAAQ,IAAI,MACf;AACH,qBAAI,GAAG,wBAAwB,WAAW,EAAE;AAC5C,MAAI;AACF,QAAI,CAAE,UAAM,sCAAuB,WAAW,GAAI;AAChD,YAAM,IAAI,MAAM,kCAAkC;AAAA,IACpD;AAEA,UAAM,EAAE,SAAS,kBAAkB,IAAI,IAAI,UAAM;AAAA,MAC/C;AAAA,MACA;AAAA,IACF;AACA,QAAI,KAAK;AACP,yBAAI,KAAK,GAAG;AAAA,IACd;AACA,UAAM,QAAQ,EAAE,aAAa,kBAAkB,kBAAkB,GAAG,GAAG;AACvE,uBAAI,GAAG,uBAAuB,WAAW;AAAA,EAC3C,SAAS,OAAO;AACd,uBAAI,IAAI,mBAAmB;AAC3B,uBAAI,WAAO,qBAAQ,KAAK,CAAC;AACzB,YAAQ,WAAW;AAAA,EACrB;AACF;",
4
+ "sourcesContent": ["import { inspect } from 'util';\n\nimport { glob } from 'fast-glob';\nimport fs from 'fs-extra';\n\nimport { log } from '../../../utils/logging';\nimport { relock } from '../../../utils/packageManager';\nimport { createDestinationFileReader } from '../../configure/analysis/project';\n\nimport {\n isPatchableNodeVersion,\n isPatchableServerlessVersion,\n isPatchableSkubaType,\n} from './checks';\nimport { getNodeTypesVersion } from './getNodeTypesVersion';\n\ntype FileSelector =\n | { files: string; file?: never }\n | { file: string; files?: never };\n\ntype SubPatch = FileSelector & {\n tests?: Array<() => Promise<boolean>>;\n regex?: RegExp;\n replace: string;\n};\n\nconst subPatches = ({\n nodeVersion,\n nodeTypesVersion,\n ECMAScriptVersion,\n}: Versions): SubPatch[] => [\n { file: '.nvmrc', replace: `${nodeVersion}\\n` },\n {\n files: '**/Dockerfile*',\n\n regex:\n /^FROM(.*) (public.ecr.aws\\/docker\\/library\\/)?node:([0-9]+(?:\\.[0-9]+(?:\\.[0-9]+)?)?)(-[a-z0-9]+)?(@sha256:[a-f0-9]{64})?( .*)?$/gm,\n replace: `FROM$1 $2node:${nodeVersion}$4$6`,\n },\n {\n files: '**/Dockerfile*',\n regex:\n /^FROM(.*) gcr.io\\/distroless\\/nodejs\\d+-debian(\\d+)(@sha256:[a-f0-9]{64})?(\\.[^- \\n]+)?(-[^ \\n]+)?( .+|)$/gm,\n replace: `FROM$1 gcr.io/distroless/nodejs${nodeVersion}-debian$2$4$5$6`,\n },\n\n {\n files: '**/serverless*.y*ml',\n regex: /\\bnodejs\\d+.x\\b/gm,\n tests: [isPatchableServerlessVersion],\n replace: `nodejs${nodeVersion}.x`,\n },\n {\n files: '**/serverless*.y*ml',\n regex: /\\bnode\\d+\\b/gm,\n tests: [isPatchableServerlessVersion],\n replace: `node${nodeVersion}`,\n },\n\n {\n files: '**/infra/**/*.ts',\n regex: /NODEJS_\\d+_X/g,\n replace: `NODEJS_${nodeVersion}_X`,\n },\n {\n files: '**/infra/**/*.ts',\n regex: /(target:\\s*'node)(\\d+)(.+)$/gm,\n replace: `$1${nodeVersion}$3`,\n },\n\n {\n files: '**/.buildkite/*',\n regex:\n /(image: )(public.ecr.aws\\/docker\\/library\\/)?(node:)[0-9.]+(\\.[^- \\n]+)?(-[^ \\n]+)?$/gm,\n replace: `$1$2$3${nodeVersion}$5`,\n },\n {\n files: '.node-version*',\n regex: /(\\d+(?:\\.\\d+)*)/g,\n replace: `${nodeVersion}`,\n },\n\n {\n files: '**/package.json',\n regex: /(\"@types\\/node\":\\s*\")(\\^)?(\\d+\\.\\d+\\.\\d+)(\")/gm,\n tests: [isPatchableServerlessVersion],\n replace: `$1$2${nodeTypesVersion}$4`,\n },\n {\n files: '**/package.json',\n regex:\n /([\"']engines[\"']:\\s*{[\\s\\S]*?[\"']node[\"']:\\s*[\"']>=)(\\d+(?:\\.\\d+)*)(['\"]\\s*})/gm,\n tests: [isPatchableServerlessVersion, isPatchableSkubaType],\n replace: `$1${nodeVersion}$3`,\n },\n\n {\n files: '**/tsconfig*.json',\n regex: /(\"target\":\\s*\")(ES\\d+)\"/gim,\n tests: [isPatchableServerlessVersion, isPatchableSkubaType],\n replace: `$1${ECMAScriptVersion}\"`,\n },\n {\n files: '**/tsconfig*.json',\n regex: /(\"lib\":\\s*\\[)([\\S\\s]*?)(ES\\d+)([\\S\\s]*?)(\\])/gim,\n tests: [isPatchableServerlessVersion, isPatchableSkubaType],\n replace: `$1$2${ECMAScriptVersion}$4$5`,\n },\n\n {\n files: '**/docker-compose*.y*ml',\n regex:\n /(image: )(public.ecr.aws\\/docker\\/library\\/)?(node:)[0-9.]+(\\.[^- \\n]+)?(-[^ \\n]+)?$/gm,\n\n replace: `$1$2$3${nodeVersion}$5`,\n },\n];\n\ntype Versions = {\n nodeVersion: number;\n nodeTypesVersion: string;\n ECMAScriptVersion: string;\n};\n\nconst runSubPatch = async (dir: string, patch: SubPatch) => {\n const readFile = createDestinationFileReader(dir);\n const paths = patch.file\n ? [patch.file]\n : await glob(patch.files ?? [], { cwd: dir });\n\n await Promise.all(\n paths.map(async (path) => {\n if (path.includes('node_modules')) {\n return;\n }\n const contents = await readFile(path);\n if (!contents) {\n return;\n }\n\n if (patch.regex && !patch.regex.test(contents)) {\n return;\n }\n\n if (patch.tests) {\n const results = await Promise.all(patch.tests.map((test) => test()));\n if (!results.every(Boolean)) {\n return;\n }\n }\n\n await writePatchedContents({\n path,\n contents,\n templated: patch.replace,\n regex: patch.regex,\n });\n }),\n );\n};\n\nconst writePatchedContents = async ({\n path,\n contents,\n templated,\n regex,\n}: {\n path: string;\n contents: string;\n templated: string;\n regex?: RegExp;\n}) =>\n await fs.promises.writeFile(\n path,\n regex ? contents.replaceAll(regex, templated) : templated,\n );\n\nconst upgrade = async (versions: Versions, dir: string) => {\n for (const subPatch of subPatches(versions)) {\n await runSubPatch(dir, subPatch);\n }\n};\n\nexport const nodeVersionMigration = async (\n {\n nodeVersion,\n ECMAScriptVersion,\n defaultNodeTypesVersion,\n }: {\n nodeVersion: number;\n ECMAScriptVersion: string;\n defaultNodeTypesVersion: string;\n },\n dir = process.cwd(),\n) => {\n log.ok(`Upgrading to Node.js ${nodeVersion}`);\n try {\n if (!(await isPatchableNodeVersion(nodeVersion))) {\n throw new Error('Node.js version is not patchable');\n }\n\n const { version: nodeTypesVersion, err } = await getNodeTypesVersion(\n nodeVersion,\n defaultNodeTypesVersion,\n );\n if (err) {\n log.warn(err);\n }\n await upgrade({ nodeVersion, nodeTypesVersion, ECMAScriptVersion }, dir);\n await relock(dir);\n\n log.ok('Upgraded to Node.js', nodeVersion);\n } catch (error) {\n log.err('Failed to upgrade');\n log.subtle(inspect(error));\n process.exitCode = 1;\n }\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,uBAAqB;AACrB,sBAAe;AAEf,qBAAoB;AACpB,4BAAuB;AACvB,qBAA4C;AAE5C,oBAIO;AACP,iCAAoC;AAYpC,MAAM,aAAa,CAAC;AAAA,EAClB;AAAA,EACA;AAAA,EACA;AACF,MAA4B;AAAA,EAC1B,EAAE,MAAM,UAAU,SAAS,GAAG,WAAW;AAAA,EAAK;AAAA,EAC9C;AAAA,IACE,OAAO;AAAA,IAEP,OACE;AAAA,IACF,SAAS,iBAAiB,WAAW;AAAA,EACvC;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OACE;AAAA,IACF,SAAS,kCAAkC,WAAW;AAAA,EACxD;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO,CAAC,0CAA4B;AAAA,IACpC,SAAS,SAAS,WAAW;AAAA,EAC/B;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO,CAAC,0CAA4B;AAAA,IACpC,SAAS,OAAO,WAAW;AAAA,EAC7B;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,SAAS,UAAU,WAAW;AAAA,EAChC;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,SAAS,KAAK,WAAW;AAAA,EAC3B;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OACE;AAAA,IACF,SAAS,SAAS,WAAW;AAAA,EAC/B;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,SAAS,GAAG,WAAW;AAAA,EACzB;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO,CAAC,0CAA4B;AAAA,IACpC,SAAS,OAAO,gBAAgB;AAAA,EAClC;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OACE;AAAA,IACF,OAAO,CAAC,4CAA8B,kCAAoB;AAAA,IAC1D,SAAS,KAAK,WAAW;AAAA,EAC3B;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO,CAAC,4CAA8B,kCAAoB;AAAA,IAC1D,SAAS,KAAK,iBAAiB;AAAA,EACjC;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO,CAAC,4CAA8B,kCAAoB;AAAA,IAC1D,SAAS,OAAO,iBAAiB;AAAA,EACnC;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OACE;AAAA,IAEF,SAAS,SAAS,WAAW;AAAA,EAC/B;AACF;AAQA,MAAM,cAAc,OAAO,KAAa,UAAoB;AAC1D,QAAM,eAAW,4CAA4B,GAAG;AAChD,QAAM,QAAQ,MAAM,OAChB,CAAC,MAAM,IAAI,IACX,UAAM,uBAAK,MAAM,SAAS,CAAC,GAAG,EAAE,KAAK,IAAI,CAAC;AAE9C,QAAM,QAAQ;AAAA,IACZ,MAAM,IAAI,OAAO,SAAS;AACxB,UAAI,KAAK,SAAS,cAAc,GAAG;AACjC;AAAA,MACF;AACA,YAAM,WAAW,MAAM,SAAS,IAAI;AACpC,UAAI,CAAC,UAAU;AACb;AAAA,MACF;AAEA,UAAI,MAAM,SAAS,CAAC,MAAM,MAAM,KAAK,QAAQ,GAAG;AAC9C;AAAA,MACF;AAEA,UAAI,MAAM,OAAO;AACf,cAAM,UAAU,MAAM,QAAQ,IAAI,MAAM,MAAM,IAAI,CAAC,SAAS,KAAK,CAAC,CAAC;AACnE,YAAI,CAAC,QAAQ,MAAM,OAAO,GAAG;AAC3B;AAAA,QACF;AAAA,MACF;AAEA,YAAM,qBAAqB;AAAA,QACzB;AAAA,QACA;AAAA,QACA,WAAW,MAAM;AAAA,QACjB,OAAO,MAAM;AAAA,MACf,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF;AAEA,MAAM,uBAAuB,OAAO;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAME,MAAM,gBAAAA,QAAG,SAAS;AAAA,EAChB;AAAA,EACA,QAAQ,SAAS,WAAW,OAAO,SAAS,IAAI;AAClD;AAEF,MAAM,UAAU,OAAO,UAAoB,QAAgB;AACzD,aAAW,YAAY,WAAW,QAAQ,GAAG;AAC3C,UAAM,YAAY,KAAK,QAAQ;AAAA,EACjC;AACF;AAEO,MAAM,uBAAuB,OAClC;AAAA,EACE;AAAA,EACA;AAAA,EACA;AACF,GAKA,MAAM,QAAQ,IAAI,MACf;AACH,qBAAI,GAAG,wBAAwB,WAAW,EAAE;AAC5C,MAAI;AACF,QAAI,CAAE,UAAM,sCAAuB,WAAW,GAAI;AAChD,YAAM,IAAI,MAAM,kCAAkC;AAAA,IACpD;AAEA,UAAM,EAAE,SAAS,kBAAkB,IAAI,IAAI,UAAM;AAAA,MAC/C;AAAA,MACA;AAAA,IACF;AACA,QAAI,KAAK;AACP,yBAAI,KAAK,GAAG;AAAA,IACd;AACA,UAAM,QAAQ,EAAE,aAAa,kBAAkB,kBAAkB,GAAG,GAAG;AACvE,cAAM,8BAAO,GAAG;AAEhB,uBAAI,GAAG,uBAAuB,WAAW;AAAA,EAC3C,SAAS,OAAO;AACd,uBAAI,IAAI,mBAAmB;AAC3B,uBAAI,WAAO,qBAAQ,KAAK,CAAC;AACzB,YAAQ,WAAW;AAAA,EACrB;AACF;",
6
6
  "names": ["fs"]
7
7
  }
@@ -1,4 +1,4 @@
1
- export type TextProcessor = (contents: string) => string;
1
+ export type TextProcessor = (sourcePath: string, contents: string) => string;
2
2
  export declare const copyFile: (sourcePath: string, destinationPath: string, { overwrite, processors, }: Pick<CopyFilesOptions, "overwrite" | "processors">) => Promise<void>;
3
3
  interface CopyFilesOptions {
4
4
  sourceRoot: string;
package/lib/utils/copy.js CHANGED
@@ -45,7 +45,7 @@ const copyFile = async (sourcePath, destinationPath, {
45
45
  }) => {
46
46
  const oldContents = await import_fs_extra.default.promises.readFile(sourcePath, "utf8");
47
47
  const newContents = processors.reduce(
48
- (contents, process) => process(contents),
48
+ (contents, process) => process(sourcePath, contents),
49
49
  oldContents
50
50
  );
51
51
  if (oldContents === newContents && sourcePath === destinationPath) {
@@ -62,8 +62,16 @@ const copyFile = async (sourcePath, destinationPath, {
62
62
  throw err;
63
63
  }
64
64
  };
65
- const createEjsRenderer = (templateData) => (contents) => import_ejs.default.render(contents, templateData);
66
- const createStringReplacer = (replacements) => (contents) => replacements.reduce(
65
+ const createEjsRenderer = (templateData) => (sourcePath, contents) => {
66
+ try {
67
+ return import_ejs.default.render(contents, templateData, { strict: false });
68
+ } catch (err) {
69
+ import_logging.log.err("Failed to render", import_logging.log.bold(sourcePath));
70
+ import_logging.log.subtle(err);
71
+ return contents;
72
+ }
73
+ };
74
+ const createStringReplacer = (replacements) => (_sourcePath, contents) => replacements.reduce(
67
75
  (newContents, { input, output }) => newContents.replace(input, output),
68
76
  contents
69
77
  );
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/utils/copy.ts"],
4
- "sourcesContent": ["import path from 'path';\n\nimport ejs from 'ejs';\nimport fs from 'fs-extra';\n\nimport { isErrorWithCode } from './error';\nimport { log } from './logging';\n\nexport type TextProcessor = (contents: string) => string;\n\nexport const copyFile = async (\n sourcePath: string,\n destinationPath: string,\n {\n overwrite = true,\n processors,\n }: Pick<CopyFilesOptions, 'overwrite' | 'processors'>,\n) => {\n const oldContents = await fs.promises.readFile(sourcePath, 'utf8');\n\n const newContents = processors.reduce(\n (contents, process) => process(contents),\n oldContents,\n );\n\n if (oldContents === newContents && sourcePath === destinationPath) {\n return;\n }\n\n try {\n await fs.promises.writeFile(destinationPath, newContents, {\n flag: overwrite ? 'w' : 'wx',\n });\n } catch (err) {\n if (isErrorWithCode(err, 'EEXIST')) {\n return;\n }\n\n throw err;\n }\n};\n\ninterface CopyFilesOptions {\n sourceRoot: string;\n destinationRoot: string;\n\n include: (pathname: string) => boolean;\n overwrite?: boolean;\n processors: TextProcessor[];\n stripUnderscorePrefix?: boolean;\n}\n\nexport const createEjsRenderer =\n (templateData: Record<string, unknown>): TextProcessor =>\n (contents) =>\n ejs.render(contents, templateData);\n\nexport const createStringReplacer =\n (\n replacements: Array<{\n input: RegExp;\n output: string;\n }>,\n ): TextProcessor =>\n (contents) =>\n replacements.reduce(\n (newContents, { input, output }) => newContents.replace(input, output),\n contents,\n );\n\nexport const copyFiles = async (\n opts: CopyFilesOptions,\n currentSourceDir: string = opts.sourceRoot,\n currentDestinationDir: string = opts.destinationRoot,\n) => {\n const filenames = await fs.promises.readdir(currentSourceDir);\n\n const toDestinationPath = (filename: string) =>\n path.join(\n currentDestinationDir,\n opts.stripUnderscorePrefix\n ? filename\n .replace(/^_\\./, '.')\n .replace(/^_package\\.json/, 'package.json')\n .replace(/^_eslint\\.config\\.js/, 'eslint.config.js')\n : filename,\n );\n\n const filteredFilenames = filenames.filter((filename) =>\n opts.include(\n path.relative(opts.destinationRoot, toDestinationPath(filename)),\n ),\n );\n\n await Promise.all(\n filteredFilenames.map(async (filename) => {\n const sourcePath = path.join(currentSourceDir, filename);\n const destinationPath = toDestinationPath(filename);\n\n try {\n await copyFile(sourcePath, destinationPath, opts);\n } catch (err) {\n if (isErrorWithCode(err, 'EISDIR')) {\n await fs.promises.mkdir(destinationPath, { recursive: true });\n return copyFiles(opts, sourcePath, destinationPath);\n }\n\n log.err('Failed to render', log.bold(sourcePath));\n\n throw err;\n }\n }),\n );\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,iBAAgB;AAChB,sBAAe;AAEf,mBAAgC;AAChC,qBAAoB;AAIb,MAAM,WAAW,OACtB,YACA,iBACA;AAAA,EACE,YAAY;AAAA,EACZ;AACF,MACG;AACH,QAAM,cAAc,MAAM,gBAAAA,QAAG,SAAS,SAAS,YAAY,MAAM;AAEjE,QAAM,cAAc,WAAW;AAAA,IAC7B,CAAC,UAAU,YAAY,QAAQ,QAAQ;AAAA,IACvC;AAAA,EACF;AAEA,MAAI,gBAAgB,eAAe,eAAe,iBAAiB;AACjE;AAAA,EACF;AAEA,MAAI;AACF,UAAM,gBAAAA,QAAG,SAAS,UAAU,iBAAiB,aAAa;AAAA,MACxD,MAAM,YAAY,MAAM;AAAA,IAC1B,CAAC;AAAA,EACH,SAAS,KAAK;AACZ,YAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC;AAAA,IACF;AAEA,UAAM;AAAA,EACR;AACF;AAYO,MAAM,oBACX,CAAC,iBACD,CAAC,aACC,WAAAC,QAAI,OAAO,UAAU,YAAY;AAE9B,MAAM,uBACX,CACE,iBAKF,CAAC,aACC,aAAa;AAAA,EACX,CAAC,aAAa,EAAE,OAAO,OAAO,MAAM,YAAY,QAAQ,OAAO,MAAM;AAAA,EACrE;AACF;AAEG,MAAM,YAAY,OACvB,MACA,mBAA2B,KAAK,YAChC,wBAAgC,KAAK,oBAClC;AACH,QAAM,YAAY,MAAM,gBAAAD,QAAG,SAAS,QAAQ,gBAAgB;AAE5D,QAAM,oBAAoB,CAAC,aACzB,YAAAE,QAAK;AAAA,IACH;AAAA,IACA,KAAK,wBACD,SACG,QAAQ,QAAQ,GAAG,EACnB,QAAQ,mBAAmB,cAAc,EACzC,QAAQ,wBAAwB,kBAAkB,IACrD;AAAA,EACN;AAEF,QAAM,oBAAoB,UAAU;AAAA,IAAO,CAAC,aAC1C,KAAK;AAAA,MACH,YAAAA,QAAK,SAAS,KAAK,iBAAiB,kBAAkB,QAAQ,CAAC;AAAA,IACjE;AAAA,EACF;AAEA,QAAM,QAAQ;AAAA,IACZ,kBAAkB,IAAI,OAAO,aAAa;AACxC,YAAM,aAAa,YAAAA,QAAK,KAAK,kBAAkB,QAAQ;AACvD,YAAM,kBAAkB,kBAAkB,QAAQ;AAElD,UAAI;AACF,cAAM,SAAS,YAAY,iBAAiB,IAAI;AAAA,MAClD,SAAS,KAAK;AACZ,gBAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC,gBAAM,gBAAAF,QAAG,SAAS,MAAM,iBAAiB,EAAE,WAAW,KAAK,CAAC;AAC5D,iBAAO,UAAU,MAAM,YAAY,eAAe;AAAA,QACpD;AAEA,2BAAI,IAAI,oBAAoB,mBAAI,KAAK,UAAU,CAAC;AAEhD,cAAM;AAAA,MACR;AAAA,IACF,CAAC;AAAA,EACH;AACF;",
4
+ "sourcesContent": ["import path from 'path';\n\nimport ejs from 'ejs';\nimport fs from 'fs-extra';\n\nimport { isErrorWithCode } from './error';\nimport { log } from './logging';\n\nexport type TextProcessor = (sourcePath: string, contents: string) => string;\n\nexport const copyFile = async (\n sourcePath: string,\n destinationPath: string,\n {\n overwrite = true,\n processors,\n }: Pick<CopyFilesOptions, 'overwrite' | 'processors'>,\n) => {\n const oldContents = await fs.promises.readFile(sourcePath, 'utf8');\n\n const newContents = processors.reduce(\n (contents, process) => process(sourcePath, contents),\n oldContents,\n );\n\n if (oldContents === newContents && sourcePath === destinationPath) {\n return;\n }\n\n try {\n await fs.promises.writeFile(destinationPath, newContents, {\n flag: overwrite ? 'w' : 'wx',\n });\n } catch (err) {\n if (isErrorWithCode(err, 'EEXIST')) {\n return;\n }\n\n throw err;\n }\n};\n\ninterface CopyFilesOptions {\n sourceRoot: string;\n destinationRoot: string;\n\n include: (pathname: string) => boolean;\n overwrite?: boolean;\n processors: TextProcessor[];\n stripUnderscorePrefix?: boolean;\n}\n\nexport const createEjsRenderer =\n (templateData: Record<string, unknown>): TextProcessor =>\n (sourcePath: string, contents) => {\n try {\n return ejs.render(contents, templateData, { strict: false });\n } catch (err) {\n log.err('Failed to render', log.bold(sourcePath));\n log.subtle(err);\n return contents;\n }\n };\n\nexport const createStringReplacer =\n (\n replacements: Array<{\n input: RegExp;\n output: string;\n }>,\n ): TextProcessor =>\n (_sourcePath: string, contents) =>\n replacements.reduce(\n (newContents, { input, output }) => newContents.replace(input, output),\n contents,\n );\n\nexport const copyFiles = async (\n opts: CopyFilesOptions,\n currentSourceDir: string = opts.sourceRoot,\n currentDestinationDir: string = opts.destinationRoot,\n) => {\n const filenames = await fs.promises.readdir(currentSourceDir);\n\n const toDestinationPath = (filename: string) =>\n path.join(\n currentDestinationDir,\n opts.stripUnderscorePrefix\n ? filename\n .replace(/^_\\./, '.')\n .replace(/^_package\\.json/, 'package.json')\n .replace(/^_eslint\\.config\\.js/, 'eslint.config.js')\n : filename,\n );\n\n const filteredFilenames = filenames.filter((filename) =>\n opts.include(\n path.relative(opts.destinationRoot, toDestinationPath(filename)),\n ),\n );\n\n await Promise.all(\n filteredFilenames.map(async (filename) => {\n const sourcePath = path.join(currentSourceDir, filename);\n const destinationPath = toDestinationPath(filename);\n\n try {\n await copyFile(sourcePath, destinationPath, opts);\n } catch (err) {\n if (isErrorWithCode(err, 'EISDIR')) {\n await fs.promises.mkdir(destinationPath, { recursive: true });\n return copyFiles(opts, sourcePath, destinationPath);\n }\n\n log.err('Failed to render', log.bold(sourcePath));\n\n throw err;\n }\n }),\n );\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,iBAAgB;AAChB,sBAAe;AAEf,mBAAgC;AAChC,qBAAoB;AAIb,MAAM,WAAW,OACtB,YACA,iBACA;AAAA,EACE,YAAY;AAAA,EACZ;AACF,MACG;AACH,QAAM,cAAc,MAAM,gBAAAA,QAAG,SAAS,SAAS,YAAY,MAAM;AAEjE,QAAM,cAAc,WAAW;AAAA,IAC7B,CAAC,UAAU,YAAY,QAAQ,YAAY,QAAQ;AAAA,IACnD;AAAA,EACF;AAEA,MAAI,gBAAgB,eAAe,eAAe,iBAAiB;AACjE;AAAA,EACF;AAEA,MAAI;AACF,UAAM,gBAAAA,QAAG,SAAS,UAAU,iBAAiB,aAAa;AAAA,MACxD,MAAM,YAAY,MAAM;AAAA,IAC1B,CAAC;AAAA,EACH,SAAS,KAAK;AACZ,YAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC;AAAA,IACF;AAEA,UAAM;AAAA,EACR;AACF;AAYO,MAAM,oBACX,CAAC,iBACD,CAAC,YAAoB,aAAa;AAChC,MAAI;AACF,WAAO,WAAAC,QAAI,OAAO,UAAU,cAAc,EAAE,QAAQ,MAAM,CAAC;AAAA,EAC7D,SAAS,KAAK;AACZ,uBAAI,IAAI,oBAAoB,mBAAI,KAAK,UAAU,CAAC;AAChD,uBAAI,OAAO,GAAG;AACd,WAAO;AAAA,EACT;AACF;AAEK,MAAM,uBACX,CACE,iBAKF,CAAC,aAAqB,aACpB,aAAa;AAAA,EACX,CAAC,aAAa,EAAE,OAAO,OAAO,MAAM,YAAY,QAAQ,OAAO,MAAM;AAAA,EACrE;AACF;AAEG,MAAM,YAAY,OACvB,MACA,mBAA2B,KAAK,YAChC,wBAAgC,KAAK,oBAClC;AACH,QAAM,YAAY,MAAM,gBAAAD,QAAG,SAAS,QAAQ,gBAAgB;AAE5D,QAAM,oBAAoB,CAAC,aACzB,YAAAE,QAAK;AAAA,IACH;AAAA,IACA,KAAK,wBACD,SACG,QAAQ,QAAQ,GAAG,EACnB,QAAQ,mBAAmB,cAAc,EACzC,QAAQ,wBAAwB,kBAAkB,IACrD;AAAA,EACN;AAEF,QAAM,oBAAoB,UAAU;AAAA,IAAO,CAAC,aAC1C,KAAK;AAAA,MACH,YAAAA,QAAK,SAAS,KAAK,iBAAiB,kBAAkB,QAAQ,CAAC;AAAA,IACjE;AAAA,EACF;AAEA,QAAM,QAAQ;AAAA,IACZ,kBAAkB,IAAI,OAAO,aAAa;AACxC,YAAM,aAAa,YAAAA,QAAK,KAAK,kBAAkB,QAAQ;AACvD,YAAM,kBAAkB,kBAAkB,QAAQ;AAElD,UAAI;AACF,cAAM,SAAS,YAAY,iBAAiB,IAAI;AAAA,MAClD,SAAS,KAAK;AACZ,gBAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC,gBAAM,gBAAAF,QAAG,SAAS,MAAM,iBAAiB,EAAE,WAAW,KAAK,CAAC;AAC5D,iBAAO,UAAU,MAAM,YAAY,eAAe;AAAA,QACpD;AAEA,2BAAI,IAAI,oBAAoB,mBAAI,KAAK,UAAU,CAAC;AAEhD,cAAM;AAAA,MACR;AAAA,IACF,CAAC;AAAA,EACH;AACF;",
6
6
  "names": ["fs", "ejs", "path"]
7
7
  }
@@ -19,6 +19,7 @@ declare const PACKAGE_MANAGERS: {
19
19
  };
20
20
  export declare const configForPackageManager: (packageManager: PackageManager) => PackageManagerConfig;
21
21
  export declare const detectPackageManager: (cwd?: string) => Promise<PackageManagerConfig>;
22
+ export declare const relock: (cwd?: string) => Promise<void>;
22
23
  export type PackageManager = z.infer<typeof packageManagerSchema>;
23
24
  export declare const packageManagerSchema: z.ZodDefault<z.ZodEnum<["pnpm", "yarn"]>>;
24
25
  export {};
@@ -31,12 +31,14 @@ __export(packageManager_exports, {
31
31
  DEFAULT_PACKAGE_MANAGER: () => DEFAULT_PACKAGE_MANAGER,
32
32
  configForPackageManager: () => configForPackageManager,
33
33
  detectPackageManager: () => detectPackageManager,
34
- packageManagerSchema: () => packageManagerSchema
34
+ packageManagerSchema: () => packageManagerSchema,
35
+ relock: () => relock
35
36
  });
36
37
  module.exports = __toCommonJS(packageManager_exports);
37
38
  var import_find_up = __toESM(require("find-up"));
38
39
  var import_is_installed_globally = __toESM(require("is-installed-globally"));
39
40
  var import_zod = require("zod");
41
+ var import_exec = require("./exec");
40
42
  var import_logging = require("./logging");
41
43
  const DEFAULT_PACKAGE_MANAGER = "yarn";
42
44
  const PACKAGE_MANAGERS = {
@@ -87,6 +89,10 @@ const detectPackageManager = async (cwd) => {
87
89
  }
88
90
  return configForPackageManager(packageManager);
89
91
  };
92
+ const relock = async (cwd) => {
93
+ const packageManager = await detectPackageManager(cwd);
94
+ await (0, import_exec.exec)(packageManager.install);
95
+ };
90
96
  const findDepth = async (filename, cwd) => {
91
97
  const path = await (0, import_find_up.default)(filename, { cwd });
92
98
  return path ? path.split("/").length : void 0;
@@ -97,6 +103,7 @@ const packageManagerSchema = import_zod.z.enum(["pnpm", "yarn"]).default(DEFAULT
97
103
  DEFAULT_PACKAGE_MANAGER,
98
104
  configForPackageManager,
99
105
  detectPackageManager,
100
- packageManagerSchema
106
+ packageManagerSchema,
107
+ relock
101
108
  });
102
109
  //# sourceMappingURL=packageManager.js.map
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/utils/packageManager.ts"],
4
- "sourcesContent": ["import findUp from 'find-up';\nimport isInstalledGlobally from 'is-installed-globally';\nimport { z } from 'zod';\n\nimport { log } from './logging';\n\n// TODO: consider changing to this to `pnpm` in a future major version.\nexport const DEFAULT_PACKAGE_MANAGER = 'yarn';\n\nexport type PackageManagerConfig =\n (typeof PACKAGE_MANAGERS)[keyof typeof PACKAGE_MANAGERS] & {\n command: PackageManager;\n };\n\nconst PACKAGE_MANAGERS = {\n pnpm: {\n exec: 'pnpm exec',\n install: 'pnpm install',\n runSilent: 'pnpm --silent run',\n update: isInstalledGlobally ? 'pnpm update --global' : 'pnpm update',\n },\n yarn: {\n exec: 'yarn',\n install: 'yarn install',\n runSilent: 'yarn -s',\n update: isInstalledGlobally ? 'yarn global upgrade' : 'yarn upgrade',\n },\n};\n\nexport const configForPackageManager = (\n packageManager: PackageManager,\n): PackageManagerConfig => ({\n ...PACKAGE_MANAGERS[packageManager],\n command: packageManager,\n});\n\nexport const detectPackageManager = async (\n cwd?: string,\n): Promise<PackageManagerConfig> => {\n let packageManager: PackageManager = DEFAULT_PACKAGE_MANAGER;\n\n try {\n const [yarnDepth, pnpmDepth] = await Promise.all([\n findDepth('yarn.lock', cwd),\n findDepth('pnpm-lock.yaml', cwd),\n ]);\n\n if (yarnDepth === undefined && pnpmDepth === undefined) {\n throw new Error('No package manager lockfile found.');\n }\n\n packageManager = (pnpmDepth ?? -1) > (yarnDepth ?? -1) ? 'pnpm' : 'yarn';\n } catch (err) {\n log.warn(\n `Failed to detect package manager; defaulting to ${log.bold(\n DEFAULT_PACKAGE_MANAGER,\n )}.`,\n );\n log.subtle(\n (() => {\n switch (true) {\n case err instanceof Error:\n return err.message;\n\n default:\n return String(err);\n }\n })(),\n );\n }\n\n return configForPackageManager(packageManager);\n};\n\nconst findDepth = async (filename: string, cwd?: string) => {\n const path = await findUp(filename, { cwd });\n return path ? path.split('/').length : undefined;\n};\n\nexport type PackageManager = z.infer<typeof packageManagerSchema>;\n\nexport const packageManagerSchema = z\n .enum(['pnpm', 'yarn'])\n .default(DEFAULT_PACKAGE_MANAGER);\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAmB;AACnB,mCAAgC;AAChC,iBAAkB;AAElB,qBAAoB;AAGb,MAAM,0BAA0B;AAOvC,MAAM,mBAAmB;AAAA,EACvB,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,SAAS;AAAA,IACT,WAAW;AAAA,IACX,QAAQ,6BAAAA,UAAsB,yBAAyB;AAAA,EACzD;AAAA,EACA,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,SAAS;AAAA,IACT,WAAW;AAAA,IACX,QAAQ,6BAAAA,UAAsB,wBAAwB;AAAA,EACxD;AACF;AAEO,MAAM,0BAA0B,CACrC,oBAC0B;AAAA,EAC1B,GAAG,iBAAiB,cAAc;AAAA,EAClC,SAAS;AACX;AAEO,MAAM,uBAAuB,OAClC,QACkC;AAClC,MAAI,iBAAiC;AAErC,MAAI;AACF,UAAM,CAAC,WAAW,SAAS,IAAI,MAAM,QAAQ,IAAI;AAAA,MAC/C,UAAU,aAAa,GAAG;AAAA,MAC1B,UAAU,kBAAkB,GAAG;AAAA,IACjC,CAAC;AAED,QAAI,cAAc,UAAa,cAAc,QAAW;AACtD,YAAM,IAAI,MAAM,oCAAoC;AAAA,IACtD;AAEA,sBAAkB,aAAa,OAAO,aAAa,MAAM,SAAS;AAAA,EACpE,SAAS,KAAK;AACZ,uBAAI;AAAA,MACF,mDAAmD,mBAAI;AAAA,QACrD;AAAA,MACF,CAAC;AAAA,IACH;AACA,uBAAI;AAAA,OACD,MAAM;AACL,gBAAQ,MAAM;AAAA,UACZ,KAAK,eAAe;AAClB,mBAAO,IAAI;AAAA,UAEb;AACE,mBAAO,OAAO,GAAG;AAAA,QACrB;AAAA,MACF,GAAG;AAAA,IACL;AAAA,EACF;AAEA,SAAO,wBAAwB,cAAc;AAC/C;AAEA,MAAM,YAAY,OAAO,UAAkB,QAAiB;AAC1D,QAAM,OAAO,UAAM,eAAAC,SAAO,UAAU,EAAE,IAAI,CAAC;AAC3C,SAAO,OAAO,KAAK,MAAM,GAAG,EAAE,SAAS;AACzC;AAIO,MAAM,uBAAuB,aACjC,KAAK,CAAC,QAAQ,MAAM,CAAC,EACrB,QAAQ,uBAAuB;",
4
+ "sourcesContent": ["import findUp from 'find-up';\nimport isInstalledGlobally from 'is-installed-globally';\nimport { z } from 'zod';\n\nimport { exec } from './exec';\nimport { log } from './logging';\n\n// TODO: consider changing to this to `pnpm` in a future major version.\nexport const DEFAULT_PACKAGE_MANAGER = 'yarn';\n\nexport type PackageManagerConfig =\n (typeof PACKAGE_MANAGERS)[keyof typeof PACKAGE_MANAGERS] & {\n command: PackageManager;\n };\n\nconst PACKAGE_MANAGERS = {\n pnpm: {\n exec: 'pnpm exec',\n install: 'pnpm install',\n runSilent: 'pnpm --silent run',\n update: isInstalledGlobally ? 'pnpm update --global' : 'pnpm update',\n },\n yarn: {\n exec: 'yarn',\n install: 'yarn install',\n runSilent: 'yarn -s',\n update: isInstalledGlobally ? 'yarn global upgrade' : 'yarn upgrade',\n },\n};\n\nexport const configForPackageManager = (\n packageManager: PackageManager,\n): PackageManagerConfig => ({\n ...PACKAGE_MANAGERS[packageManager],\n command: packageManager,\n});\n\nexport const detectPackageManager = async (\n cwd?: string,\n): Promise<PackageManagerConfig> => {\n let packageManager: PackageManager = DEFAULT_PACKAGE_MANAGER;\n\n try {\n const [yarnDepth, pnpmDepth] = await Promise.all([\n findDepth('yarn.lock', cwd),\n findDepth('pnpm-lock.yaml', cwd),\n ]);\n\n if (yarnDepth === undefined && pnpmDepth === undefined) {\n throw new Error('No package manager lockfile found.');\n }\n\n packageManager = (pnpmDepth ?? -1) > (yarnDepth ?? -1) ? 'pnpm' : 'yarn';\n } catch (err) {\n log.warn(\n `Failed to detect package manager; defaulting to ${log.bold(\n DEFAULT_PACKAGE_MANAGER,\n )}.`,\n );\n log.subtle(\n (() => {\n switch (true) {\n case err instanceof Error:\n return err.message;\n\n default:\n return String(err);\n }\n })(),\n );\n }\n\n return configForPackageManager(packageManager);\n};\n\nexport const relock = async (cwd?: string) => {\n const packageManager = await detectPackageManager(cwd);\n await exec(packageManager.install);\n};\n\nconst findDepth = async (filename: string, cwd?: string) => {\n const path = await findUp(filename, { cwd });\n return path ? path.split('/').length : undefined;\n};\n\nexport type PackageManager = z.infer<typeof packageManagerSchema>;\n\nexport const packageManagerSchema = z\n .enum(['pnpm', 'yarn'])\n .default(DEFAULT_PACKAGE_MANAGER);\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAmB;AACnB,mCAAgC;AAChC,iBAAkB;AAElB,kBAAqB;AACrB,qBAAoB;AAGb,MAAM,0BAA0B;AAOvC,MAAM,mBAAmB;AAAA,EACvB,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,SAAS;AAAA,IACT,WAAW;AAAA,IACX,QAAQ,6BAAAA,UAAsB,yBAAyB;AAAA,EACzD;AAAA,EACA,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,SAAS;AAAA,IACT,WAAW;AAAA,IACX,QAAQ,6BAAAA,UAAsB,wBAAwB;AAAA,EACxD;AACF;AAEO,MAAM,0BAA0B,CACrC,oBAC0B;AAAA,EAC1B,GAAG,iBAAiB,cAAc;AAAA,EAClC,SAAS;AACX;AAEO,MAAM,uBAAuB,OAClC,QACkC;AAClC,MAAI,iBAAiC;AAErC,MAAI;AACF,UAAM,CAAC,WAAW,SAAS,IAAI,MAAM,QAAQ,IAAI;AAAA,MAC/C,UAAU,aAAa,GAAG;AAAA,MAC1B,UAAU,kBAAkB,GAAG;AAAA,IACjC,CAAC;AAED,QAAI,cAAc,UAAa,cAAc,QAAW;AACtD,YAAM,IAAI,MAAM,oCAAoC;AAAA,IACtD;AAEA,sBAAkB,aAAa,OAAO,aAAa,MAAM,SAAS;AAAA,EACpE,SAAS,KAAK;AACZ,uBAAI;AAAA,MACF,mDAAmD,mBAAI;AAAA,QACrD;AAAA,MACF,CAAC;AAAA,IACH;AACA,uBAAI;AAAA,OACD,MAAM;AACL,gBAAQ,MAAM;AAAA,UACZ,KAAK,eAAe;AAClB,mBAAO,IAAI;AAAA,UAEb;AACE,mBAAO,OAAO,GAAG;AAAA,QACrB;AAAA,MACF,GAAG;AAAA,IACL;AAAA,EACF;AAEA,SAAO,wBAAwB,cAAc;AAC/C;AAEO,MAAM,SAAS,OAAO,QAAiB;AAC5C,QAAM,iBAAiB,MAAM,qBAAqB,GAAG;AACrD,YAAM,kBAAK,eAAe,OAAO;AACnC;AAEA,MAAM,YAAY,OAAO,UAAkB,QAAiB;AAC1D,QAAM,OAAO,UAAM,eAAAC,SAAO,UAAU,EAAE,IAAI,CAAC;AAC3C,SAAO,OAAO,KAAK,MAAM,GAAG,EAAE,SAAS;AACzC;AAIO,MAAM,uBAAuB,aACjC,KAAK,CAAC,QAAQ,MAAM,CAAC,EACrB,QAAQ,uBAAuB;",
6
6
  "names": ["isInstalledGlobally", "findUp"]
7
7
  }
@@ -1,4 +1,20 @@
1
- export declare const latestNpmVersion: (packageName: string) => Promise<string>;
1
+ import { z } from 'zod';
2
+ declare const NpmVersions: z.ZodRecord<z.ZodString, z.ZodObject<{
3
+ name: z.ZodString;
4
+ version: z.ZodString;
5
+ deprecated: z.ZodOptional<z.ZodString>;
6
+ }, "strip", z.ZodTypeAny, {
7
+ name: string;
8
+ version: string;
9
+ deprecated?: string | undefined;
10
+ }, {
11
+ name: string;
12
+ version: string;
13
+ deprecated?: string | undefined;
14
+ }>>;
15
+ export type NpmVersions = z.infer<typeof NpmVersions>;
16
+ export declare const getNpmVersions: (packageName: string) => Promise<NpmVersions | null>;
17
+ export declare const getLatestNpmVersion: (packageName: string) => Promise<string | null>;
2
18
  export declare const getSkubaVersion: () => Promise<string>;
3
19
  type SkubaVersionInfo = {
4
20
  isStale: true;
@@ -28,31 +28,60 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
28
28
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
29
29
  var version_exports = {};
30
30
  __export(version_exports, {
31
+ getLatestNpmVersion: () => getLatestNpmVersion,
32
+ getNpmVersions: () => getNpmVersions,
31
33
  getSkubaVersion: () => getSkubaVersion,
32
- getSkubaVersionInfo: () => getSkubaVersionInfo,
33
- latestNpmVersion: () => latestNpmVersion
34
+ getSkubaVersionInfo: () => getSkubaVersionInfo
34
35
  });
35
36
  module.exports = __toCommonJS(version_exports);
36
- var import_libnpmsearch = __toESM(require("libnpmsearch"));
37
- var import_validate_npm_package_name = __toESM(require("validate-npm-package-name"));
37
+ var import_npm_registry_fetch = __toESM(require("npm-registry-fetch"));
38
+ var import_zod = require("zod");
38
39
  var import_manifest = require("./manifest");
39
40
  var import_wait = require("./wait");
40
- const latestNpmVersion = async (packageName) => {
41
- const { validForNewPackages } = (0, import_validate_npm_package_name.default)(packageName);
42
- if (!validForNewPackages) {
43
- throw new Error(`Package "${packageName}" does not have a valid name`);
44
- }
45
- const [result] = await (0, import_libnpmsearch.default)(packageName, { limit: 1, timeout: 5e3 });
46
- if (result?.name !== packageName) {
47
- throw new Error(
48
- `Package "${packageName}" does not exist on the npm registry`
49
- );
41
+ const NpmVersions = import_zod.z.record(
42
+ import_zod.z.string(),
43
+ import_zod.z.object({
44
+ name: import_zod.z.string(),
45
+ version: import_zod.z.string(),
46
+ deprecated: import_zod.z.string().optional()
47
+ })
48
+ );
49
+ const PackageResponse = import_zod.z.object({
50
+ "dist-tags": import_zod.z.record(import_zod.z.string(), import_zod.z.string()).optional(),
51
+ versions: NpmVersions
52
+ });
53
+ const getNpmPackage = async (packageName) => {
54
+ try {
55
+ const response = await import_npm_registry_fetch.default.json(packageName, {
56
+ headers: {
57
+ Accept: "application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*"
58
+ }
59
+ });
60
+ const parsedResponse = PackageResponse.safeParse(response);
61
+ if (!parsedResponse.success) {
62
+ throw new Error(
63
+ `Failed to parse package response from npm for package ${packageName}`
64
+ );
65
+ }
66
+ return parsedResponse.data;
67
+ } catch (error) {
68
+ if (error instanceof Error && "statusCode" in error && error.statusCode === 404) {
69
+ return null;
70
+ }
71
+ throw error;
50
72
  }
51
- return result.version;
73
+ };
74
+ const getNpmVersions = async (packageName) => {
75
+ const response = await getNpmPackage(packageName);
76
+ return response?.versions ?? null;
77
+ };
78
+ const getLatestNpmVersion = async (packageName) => {
79
+ const response = await getNpmPackage(packageName);
80
+ return response?.["dist-tags"]?.latest ?? null;
52
81
  };
53
82
  const latestSkubaVersion = async () => {
54
83
  try {
55
- const result = await (0, import_wait.withTimeout)(latestNpmVersion("skuba"), { s: 2 });
84
+ const result = await (0, import_wait.withTimeout)(getLatestNpmVersion("skuba"), { s: 2 });
56
85
  return result.ok ? result.value : null;
57
86
  } catch {
58
87
  return null;
@@ -82,8 +111,9 @@ const getSkubaVersionInfo = async () => {
82
111
  };
83
112
  // Annotate the CommonJS export names for ESM import in node:
84
113
  0 && (module.exports = {
114
+ getLatestNpmVersion,
115
+ getNpmVersions,
85
116
  getSkubaVersion,
86
- getSkubaVersionInfo,
87
- latestNpmVersion
117
+ getSkubaVersionInfo
88
118
  });
89
119
  //# sourceMappingURL=version.js.map
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "version": 3,
3
3
  "sources": ["../../src/utils/version.ts"],
4
- "sourcesContent": ["import searchNpm from 'libnpmsearch';\nimport validatePackageName from 'validate-npm-package-name';\n\nimport { getSkubaManifest } from './manifest';\nimport { withTimeout } from './wait';\n\nexport const latestNpmVersion = async (\n packageName: string,\n): Promise<string> => {\n const { validForNewPackages } = validatePackageName(packageName);\n\n if (!validForNewPackages) {\n throw new Error(`Package \"${packageName}\" does not have a valid name`);\n }\n\n const [result] = await searchNpm(packageName, { limit: 1, timeout: 5_000 });\n\n if (result?.name !== packageName) {\n throw new Error(\n `Package \"${packageName}\" does not exist on the npm registry`,\n );\n }\n\n return result.version;\n};\n\nconst latestSkubaVersion = async (): Promise<string | null> => {\n try {\n const result = await withTimeout(latestNpmVersion('skuba'), { s: 2 });\n\n return result.ok ? result.value : null;\n } catch {\n return null;\n }\n};\n\nexport const getSkubaVersion = async (): Promise<string> => {\n const { version } = await getSkubaManifest();\n\n return version;\n};\n\ntype SkubaVersionInfo =\n | {\n isStale: true;\n\n local: string;\n latest: string;\n }\n | {\n isStale: false;\n\n local: string;\n latest: string | null;\n };\n\nexport const getSkubaVersionInfo = async (): Promise<SkubaVersionInfo> => {\n const [local, latest] = await Promise.all([\n getSkubaVersion(),\n latestSkubaVersion(),\n ]);\n\n if (latest === null) {\n // Assume we're up to date if we can't reach the npm registry\n return {\n isStale: false,\n local,\n latest,\n };\n }\n\n return {\n isStale: latest !== local,\n local,\n latest,\n };\n};\n"],
5
- "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0BAAsB;AACtB,uCAAgC;AAEhC,sBAAiC;AACjC,kBAA4B;AAErB,MAAM,mBAAmB,OAC9B,gBACoB;AACpB,QAAM,EAAE,oBAAoB,QAAI,iCAAAA,SAAoB,WAAW;AAE/D,MAAI,CAAC,qBAAqB;AACxB,UAAM,IAAI,MAAM,YAAY,WAAW,8BAA8B;AAAA,EACvE;AAEA,QAAM,CAAC,MAAM,IAAI,UAAM,oBAAAC,SAAU,aAAa,EAAE,OAAO,GAAG,SAAS,IAAM,CAAC;AAE1E,MAAI,QAAQ,SAAS,aAAa;AAChC,UAAM,IAAI;AAAA,MACR,YAAY,WAAW;AAAA,IACzB;AAAA,EACF;AAEA,SAAO,OAAO;AAChB;AAEA,MAAM,qBAAqB,YAAoC;AAC7D,MAAI;AACF,UAAM,SAAS,UAAM,yBAAY,iBAAiB,OAAO,GAAG,EAAE,GAAG,EAAE,CAAC;AAEpE,WAAO,OAAO,KAAK,OAAO,QAAQ;AAAA,EACpC,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAEO,MAAM,kBAAkB,YAA6B;AAC1D,QAAM,EAAE,QAAQ,IAAI,UAAM,kCAAiB;AAE3C,SAAO;AACT;AAgBO,MAAM,sBAAsB,YAAuC;AACxE,QAAM,CAAC,OAAO,MAAM,IAAI,MAAM,QAAQ,IAAI;AAAA,IACxC,gBAAgB;AAAA,IAChB,mBAAmB;AAAA,EACrB,CAAC;AAED,MAAI,WAAW,MAAM;AAEnB,WAAO;AAAA,MACL,SAAS;AAAA,MACT;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,SAAS,WAAW;AAAA,IACpB;AAAA,IACA;AAAA,EACF;AACF;",
6
- "names": ["validatePackageName", "searchNpm"]
4
+ "sourcesContent": ["import npmFetch from 'npm-registry-fetch';\nimport { z } from 'zod';\n\nimport { getSkubaManifest } from './manifest';\nimport { withTimeout } from './wait';\n\nconst NpmVersions = z.record(\n z.string(),\n z.object({\n name: z.string(),\n version: z.string(),\n deprecated: z.string().optional(),\n }),\n);\n\nexport type NpmVersions = z.infer<typeof NpmVersions>;\n\nconst PackageResponse = z.object({\n 'dist-tags': z.record(z.string(), z.string()).optional(),\n versions: NpmVersions,\n});\n\nconst getNpmPackage = async (packageName: string) => {\n try {\n const response = await npmFetch.json(packageName, {\n headers: {\n Accept:\n 'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*',\n },\n });\n\n const parsedResponse = PackageResponse.safeParse(response);\n if (!parsedResponse.success) {\n throw new Error(\n `Failed to parse package response from npm for package ${packageName}`,\n );\n }\n\n return parsedResponse.data;\n } catch (error) {\n if (\n error instanceof Error &&\n 'statusCode' in error &&\n error.statusCode === 404\n ) {\n return null;\n }\n throw error;\n }\n};\n\nexport const getNpmVersions = async (\n packageName: string,\n): Promise<NpmVersions | null> => {\n const response = await getNpmPackage(packageName);\n return response?.versions ?? null;\n};\n\nexport const getLatestNpmVersion = async (\n packageName: string,\n): Promise<string | null> => {\n const response = await getNpmPackage(packageName);\n return response?.['dist-tags']?.latest ?? null;\n};\n\nconst latestSkubaVersion = async (): Promise<string | null> => {\n try {\n const result = await withTimeout(getLatestNpmVersion('skuba'), { s: 2 });\n\n return result.ok ? result.value : null;\n } catch {\n return null;\n }\n};\n\nexport const getSkubaVersion = async (): Promise<string> => {\n const { version } = await getSkubaManifest();\n\n return version;\n};\n\ntype SkubaVersionInfo =\n | {\n isStale: true;\n\n local: string;\n latest: string;\n }\n | {\n isStale: false;\n\n local: string;\n latest: string | null;\n };\n\nexport const getSkubaVersionInfo = async (): Promise<SkubaVersionInfo> => {\n const [local, latest] = await Promise.all([\n getSkubaVersion(),\n latestSkubaVersion(),\n ]);\n\n if (latest === null) {\n // Assume we're up to date if we can't reach the npm registry\n return {\n isStale: false,\n local,\n latest,\n };\n }\n\n return {\n isStale: latest !== local,\n local,\n latest,\n };\n};\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gCAAqB;AACrB,iBAAkB;AAElB,sBAAiC;AACjC,kBAA4B;AAE5B,MAAM,cAAc,aAAE;AAAA,EACpB,aAAE,OAAO;AAAA,EACT,aAAE,OAAO;AAAA,IACP,MAAM,aAAE,OAAO;AAAA,IACf,SAAS,aAAE,OAAO;AAAA,IAClB,YAAY,aAAE,OAAO,EAAE,SAAS;AAAA,EAClC,CAAC;AACH;AAIA,MAAM,kBAAkB,aAAE,OAAO;AAAA,EAC/B,aAAa,aAAE,OAAO,aAAE,OAAO,GAAG,aAAE,OAAO,CAAC,EAAE,SAAS;AAAA,EACvD,UAAU;AACZ,CAAC;AAED,MAAM,gBAAgB,OAAO,gBAAwB;AACnD,MAAI;AACF,UAAM,WAAW,MAAM,0BAAAA,QAAS,KAAK,aAAa;AAAA,MAChD,SAAS;AAAA,QACP,QACE;AAAA,MACJ;AAAA,IACF,CAAC;AAED,UAAM,iBAAiB,gBAAgB,UAAU,QAAQ;AACzD,QAAI,CAAC,eAAe,SAAS;AAC3B,YAAM,IAAI;AAAA,QACR,yDAAyD,WAAW;AAAA,MACtE;AAAA,IACF;AAEA,WAAO,eAAe;AAAA,EACxB,SAAS,OAAO;AACd,QACE,iBAAiB,SACjB,gBAAgB,SAChB,MAAM,eAAe,KACrB;AACA,aAAO;AAAA,IACT;AACA,UAAM;AAAA,EACR;AACF;AAEO,MAAM,iBAAiB,OAC5B,gBACgC;AAChC,QAAM,WAAW,MAAM,cAAc,WAAW;AAChD,SAAO,UAAU,YAAY;AAC/B;AAEO,MAAM,sBAAsB,OACjC,gBAC2B;AAC3B,QAAM,WAAW,MAAM,cAAc,WAAW;AAChD,SAAO,WAAW,WAAW,GAAG,UAAU;AAC5C;AAEA,MAAM,qBAAqB,YAAoC;AAC7D,MAAI;AACF,UAAM,SAAS,UAAM,yBAAY,oBAAoB,OAAO,GAAG,EAAE,GAAG,EAAE,CAAC;AAEvE,WAAO,OAAO,KAAK,OAAO,QAAQ;AAAA,EACpC,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAEO,MAAM,kBAAkB,YAA6B;AAC1D,QAAM,EAAE,QAAQ,IAAI,UAAM,kCAAiB;AAE3C,SAAO;AACT;AAgBO,MAAM,sBAAsB,YAAuC;AACxE,QAAM,CAAC,OAAO,MAAM,IAAI,MAAM,QAAQ,IAAI;AAAA,IACxC,gBAAgB;AAAA,IAChB,mBAAmB;AAAA,EACrB,CAAC;AAED,MAAI,WAAW,MAAM;AAEnB,WAAO;AAAA,MACL,SAAS;AAAA,MACT;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,SAAS,WAAW;AAAA,IACpB;AAAA,IACA;AAAA,EACF;AACF;",
6
+ "names": ["npmFetch"]
7
7
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "skuba",
3
- "version": "10.0.0-node-22-20250226231811",
3
+ "version": "10.0.1-sub-fixes-20250314061613",
4
4
  "private": false,
5
5
  "description": "SEEK development toolkit for backend applications and packages",
6
6
  "homepage": "https://github.com/seek-oss/skuba#readme",
@@ -78,7 +78,6 @@
78
78
  "isomorphic-git": "^1.11.1",
79
79
  "jest": "^29.0.1",
80
80
  "jest-watch-typeahead": "^2.1.1",
81
- "libnpmsearch": "^8.0.0",
82
81
  "lodash.mergewith": "^4.6.2",
83
82
  "minimist": "^1.2.6",
84
83
  "normalize-package-data": "^7.0.0",
@@ -97,13 +96,12 @@
97
96
  "tsconfig-paths": "^4.0.0",
98
97
  "tsconfig-seek": "2.0.0",
99
98
  "tsx": "^4.16.2",
100
- "typescript": "~5.7.0",
101
- "validate-npm-package-name": "^6.0.0",
99
+ "typescript": "~5.8.0",
102
100
  "zod": "^3.22.4",
103
- "eslint-config-skuba": "5.1.0-node-22-20250226231811"
101
+ "eslint-config-skuba": "5.1.0"
104
102
  },
105
103
  "devDependencies": {
106
- "@changesets/cli": "2.27.12",
104
+ "@changesets/cli": "2.28.1",
107
105
  "@changesets/get-github-info": "0.6.0",
108
106
  "@jest/reporters": "29.7.0",
109
107
  "@jest/test-result": "29.7.0",
@@ -111,7 +109,6 @@
111
109
  "@types/express": "5.0.0",
112
110
  "@types/fs-extra": "11.0.4",
113
111
  "@types/koa": "2.15.0",
114
- "@types/libnpmsearch": "2.0.7",
115
112
  "@types/lodash.mergewith": "4.6.9",
116
113
  "@types/minimist": "1.2.5",
117
114
  "@types/module-alias": "2.0.4",
@@ -120,13 +117,12 @@
120
117
  "@types/picomatch": "3.0.2",
121
118
  "@types/semver": "7.5.8",
122
119
  "@types/supertest": "6.0.2",
123
- "@types/validate-npm-package-name": "4.0.2",
124
120
  "enhanced-resolve": "5.18.1",
125
121
  "express": "5.0.1",
126
122
  "fastify": "5.2.1",
127
123
  "jest-diff": "29.7.0",
128
124
  "jsonfile": "6.1.0",
129
- "koa": "2.15.4",
125
+ "koa": "2.16.0",
130
126
  "memfs": "4.17.0",
131
127
  "remark-cli": "12.0.1",
132
128
  "remark-preset-lint-recommended": "7.0.1",
@@ -153,7 +149,7 @@
153
149
  "entryPoint": "src/index.ts",
154
150
  "template": null,
155
151
  "type": "package",
156
- "version": "9.1.0"
152
+ "version": "10.0.0"
157
153
  },
158
154
  "scripts": {
159
155
  "build": "scripts/build.sh",
@@ -9,5 +9,4 @@ image: '{{values "prodAccountId"}}.dkr.ecr.<%- region %>.amazonaws.com/{{values
9
9
  # datadogSecretId: arn:aws:secretsmanager:<%- region %>:<aws-account-id>:secret:<secret-name>
10
10
 
11
11
  tags:
12
- seek:source:url: 'https://github.com/SEEK-Jobs/<%- repoName %>'
13
12
  # seek:system:name: 'TODO: https://rfc.skinfra.xyz/RFC051-AWS-Tagging-Standard.html#tagging-schema'
@@ -1,4 +1,4 @@
1
- # syntax=docker/dockerfile:1.13
1
+ # syntax=docker/dockerfile:1.14
2
2
 
3
3
  FROM public.ecr.aws/docker/library/node:22-alpine AS dev-deps
4
4
 
@@ -21,14 +21,14 @@
21
21
  },
22
22
  "devDependencies": {
23
23
  "@types/express": "^5.0.0",
24
- "@types/node": "^22.13.1",
24
+ "@types/node": "^22.13.10",
25
25
  "@types/supertest": "^6.0.0",
26
26
  "mime": "^4.0.1",
27
27
  "pino-pretty": "^13.0.0",
28
28
  "skuba": "*",
29
29
  "supertest": "^7.0.0"
30
30
  },
31
- "packageManager": "pnpm@9.15.4",
31
+ "packageManager": "pnpm@10.6.2",
32
32
  "engines": {
33
33
  "node": ">=22"
34
34
  }
@@ -1,4 +1,4 @@
1
- # syntax=docker/dockerfile:1.13
1
+ # syntax=docker/dockerfile:1.14
2
2
 
3
3
  FROM public.ecr.aws/docker/library/node:22-alpine AS dev-deps
4
4
 
@@ -16,10 +16,10 @@
16
16
  "skuba-dive": "^2.0.0"
17
17
  },
18
18
  "devDependencies": {
19
- "@types/node": "^22.13.1",
20
- "skuba": "10.0.0-node-22-20250226231811"
19
+ "@types/node": "^22.13.10",
20
+ "skuba": "10.0.1-sub-fixes-20250314061613"
21
21
  },
22
- "packageManager": "pnpm@9.15.4",
22
+ "packageManager": "pnpm@10.6.2",
23
23
  "engines": {
24
24
  "node": ">=22"
25
25
  }
@@ -9,5 +9,4 @@ image: '{{values "prodAccountId"}}.dkr.ecr.<%- region %>.amazonaws.com/{{values
9
9
  # datadogSecretId: arn:aws:secretsmanager:<%- region %>:<aws-account-id>:secret:<secret-name>
10
10
 
11
11
  tags:
12
- seek:source:url: 'https://github.com/SEEK-Jobs/<%- repoName %>'
13
12
  # seek:system:name: 'TODO: https://rfc.skinfra.xyz/RFC051-AWS-Tagging-Standard.html#tagging-schema'
@@ -1,4 +1,4 @@
1
- # syntax=docker/dockerfile:1.13
1
+ # syntax=docker/dockerfile:1.14
2
2
 
3
3
  FROM public.ecr.aws/docker/library/node:22-alpine AS dev-deps
4
4
 
@@ -36,7 +36,7 @@
36
36
  "@types/co-body": "^6.1.3",
37
37
  "@types/koa": "^2.13.4",
38
38
  "@types/koa__router": "^12.0.0",
39
- "@types/node": "^22.13.1",
39
+ "@types/node": "^22.13.10",
40
40
  "@types/supertest": "^6.0.0",
41
41
  "chance": "^1.1.8",
42
42
  "mime": "^4.0.1",
@@ -44,7 +44,7 @@
44
44
  "skuba": "*",
45
45
  "supertest": "^7.0.0"
46
46
  },
47
- "packageManager": "pnpm@9.15.4",
47
+ "packageManager": "pnpm@10.6.2",
48
48
  "engines": {
49
49
  "node": ">=22"
50
50
  }
@@ -1,4 +1,4 @@
1
- # syntax=docker/dockerfile:1.13
1
+ # syntax=docker/dockerfile:1.14
2
2
 
3
3
  FROM public.ecr.aws/docker/library/node:22-alpine AS dev-deps
4
4
 
@@ -13,7 +13,7 @@ import {
13
13
  aws_sqs,
14
14
  } from 'aws-cdk-lib';
15
15
  import type { Construct } from 'constructs';
16
- import { Datadog } from 'datadog-cdk-constructs-v2';
16
+ import { DatadogLambda } from 'datadog-cdk-constructs-v2';
17
17
 
18
18
  import { config } from './config';
19
19
 
@@ -117,7 +117,7 @@ export class AppStack extends Stack {
117
117
  config.datadogApiKeySecretArn,
118
118
  );
119
119
 
120
- const datadog = new Datadog(this, 'datadog', {
120
+ const datadog = new DatadogLambda(this, 'datadog', {
121
121
  apiKeySecret: datadogSecret,
122
122
  addLayers: false,
123
123
  enableDatadogLogs: false,
@@ -18,7 +18,7 @@
18
18
  "@aws-sdk/client-lambda": "^3.363.0",
19
19
  "@aws-sdk/client-sns": "^3.363.0",
20
20
  "@seek/logger": "^9.0.0",
21
- "datadog-lambda-js": "^9.0.0",
21
+ "datadog-lambda-js": "^10.0.0",
22
22
  "dd-trace": "^5.0.0",
23
23
  "skuba-dive": "^2.0.0",
24
24
  "zod": "^3.19.1"
@@ -27,18 +27,18 @@
27
27
  "@seek/aws-codedeploy-infra": "^2.1.0",
28
28
  "@types/aws-lambda": "^8.10.82",
29
29
  "@types/chance": "^1.1.3",
30
- "@types/node": "^22.13.1",
30
+ "@types/node": "^22.13.10",
31
31
  "aws-cdk": "^2.167.1",
32
32
  "aws-cdk-lib": "^2.167.1",
33
33
  "aws-sdk-client-mock": "^4.0.0",
34
34
  "aws-sdk-client-mock-jest": "^4.0.0",
35
35
  "chance": "^1.1.8",
36
36
  "constructs": "^10.0.17",
37
- "datadog-cdk-constructs-v2": "^1.18.0",
37
+ "datadog-cdk-constructs-v2": "^2.0.0",
38
38
  "pino-pretty": "^13.0.0",
39
- "skuba": "10.0.0-node-22-20250226231811"
39
+ "skuba": "10.0.1-sub-fixes-20250314061613"
40
40
  },
41
- "packageManager": "pnpm@9.15.4",
41
+ "packageManager": "pnpm@10.6.2",
42
42
  "engines": {
43
43
  "node": ">=22"
44
44
  }
@@ -34,7 +34,7 @@
34
34
  },
35
35
  "dependencies": {},
36
36
  "devDependencies": {
37
- "@types/node": "^22.13.1",
37
+ "@types/node": "^22.13.10",
38
38
  "commitizen": "^4.2.4",
39
39
  "skuba": "*"
40
40
  },
@@ -34,7 +34,7 @@
34
34
  },
35
35
  "dependencies": {},
36
36
  "devDependencies": {
37
- "@types/node": "^22.13.1",
37
+ "@types/node": "^22.13.10",
38
38
  "commitizen": "^4.2.4",
39
39
  "skuba": "*"
40
40
  },