skuba 10.0.0-node-22-20250218222641 → 10.0.0-node-22-20250218232801
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/cli/init/index.js +2 -2
- package/lib/cli/init/index.js.map +2 -2
- package/lib/cli/migrate/nodeVersion/{packageJsonChecks.d.ts → checks.d.ts} +1 -0
- package/lib/cli/migrate/nodeVersion/{packageJsonChecks.js → checks.js} +26 -10
- package/lib/cli/migrate/nodeVersion/checks.js.map +7 -0
- package/lib/cli/migrate/nodeVersion/index.js +11 -8
- package/lib/cli/migrate/nodeVersion/index.js.map +2 -2
- package/package.json +2 -2
- package/template/greeter/package.json +1 -1
- package/template/koa-rest-api/Dockerfile.dev-deps +1 -1
- package/template/lambda-sqs-worker-cdk/package.json +1 -1
- package/lib/cli/migrate/nodeVersion/packageJsonChecks.js.map +0 -7
package/lib/cli/init/index.js
CHANGED
|
@@ -102,8 +102,8 @@ const init = async (args = process.argv.slice(2)) => {
|
|
|
102
102
|
import_logging.log.newline();
|
|
103
103
|
await (0, import_git2.initialiseRepo)(destinationDir, templateData);
|
|
104
104
|
const [manifest, packageManagerConfig] = await Promise.all([
|
|
105
|
-
(0, import_manifest.getConsumerManifest)(),
|
|
106
|
-
(0, import_packageManager.detectPackageManager)()
|
|
105
|
+
(0, import_manifest.getConsumerManifest)(destinationDir),
|
|
106
|
+
(0, import_packageManager.detectPackageManager)(destinationDir)
|
|
107
107
|
]);
|
|
108
108
|
if (!manifest) {
|
|
109
109
|
throw new Error("Repository doesn't contain a package.json file.");
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/init/index.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\nimport { inspect } from 'util';\n\nimport { commitAllChanges } from '../../api/git';\nimport { hasDebugFlag } from '../../utils/args';\nimport { copyFiles, createEjsRenderer } from '../../utils/copy';\nimport { createInclusionFilter } from '../../utils/dir';\nimport { createExec, ensureCommands } from '../../utils/exec';\nimport { createLogger, log } from '../../utils/logging';\nimport { showLogoAndVersionInfo } from '../../utils/logo';\nimport { getConsumerManifest } from '../../utils/manifest';\nimport { detectPackageManager } from '../../utils/packageManager';\nimport {\n BASE_TEMPLATE_DIR,\n ensureTemplateConfigDeletion,\n} from '../../utils/template';\nimport { runPrettier } from '../adapter/prettier';\nimport { tryPatchRenovateConfig } from '../lint/internalLints/patchRenovateConfig';\n\nimport { getConfig } from './getConfig';\nimport { initialiseRepo } from './git';\nimport type { Input } from './types';\nimport { writePackageJson } from './writePackageJson';\n\nexport const init = async (args = process.argv.slice(2)) => {\n const opts: Input = {\n debug: hasDebugFlag(args),\n };\n\n const skubaVersionInfo = await showLogoAndVersionInfo();\n\n const {\n destinationDir,\n entryPoint,\n packageManager,\n templateComplete,\n templateData,\n templateName,\n type,\n } = await getConfig();\n\n await ensureCommands(packageManager);\n\n const include = await createInclusionFilter([\n path.join(destinationDir, '.gitignore'),\n path.join(BASE_TEMPLATE_DIR, '_.gitignore'),\n ]);\n\n const processors = [createEjsRenderer(templateData)];\n\n await copyFiles({\n sourceRoot: BASE_TEMPLATE_DIR,\n destinationRoot: destinationDir,\n include,\n // prefer template-specific files\n overwrite: false,\n processors,\n // base template has files like _eslint.config.js\n stripUnderscorePrefix: true,\n });\n\n await copyFiles({\n sourceRoot: destinationDir,\n destinationRoot: destinationDir,\n include,\n processors,\n });\n\n await Promise.all([\n templateComplete\n ? ensureTemplateConfigDeletion(destinationDir)\n : Promise.resolve(),\n\n writePackageJson({\n cwd: destinationDir,\n entryPoint,\n template: templateName,\n type,\n version: skubaVersionInfo.local,\n }),\n ]);\n\n const exec = createExec({\n cwd: destinationDir,\n stdio: 'pipe',\n streamStdio: packageManager,\n });\n\n log.newline();\n await initialiseRepo(destinationDir, templateData);\n\n const [manifest, packageManagerConfig] = await Promise.all([\n getConsumerManifest(),\n detectPackageManager(),\n ]);\n\n if (!manifest) {\n throw new Error(\"Repository doesn't contain a package.json file.\");\n }\n\n // Patch in a baseline Renovate preset based on the configured Git owner.\n await tryPatchRenovateConfig({\n mode: 'format',\n dir: destinationDir,\n manifest,\n packageManager: packageManagerConfig,\n });\n\n const skubaSlug = `skuba@${skubaVersionInfo.local}`;\n\n let depsInstalled = false;\n try {\n // The `-D` shorthand is portable across our package managers.\n await exec(packageManager, 'add', '-D', skubaSlug);\n\n // Templating can initially leave certain files in an unformatted state;\n // consider a Markdown table with columns sized based on content length.\n await runPrettier('format', createLogger(opts.debug), destinationDir);\n\n depsInstalled = true;\n } catch (err) {\n log.warn(inspect(err));\n }\n\n await commitAllChanges({\n dir: destinationDir,\n message: `Clone ${templateName}`,\n });\n\n const logGitHubRepoCreation = () => {\n log.plain(\n 'Next, create an empty',\n log.bold(`${templateData.orgName}/${templateData.repoName}`),\n 'repository:',\n );\n log.ok('https://github.com/new');\n };\n\n if (!depsInstalled) {\n log.newline();\n log.warn(log.bold('\u2717 Failed to install dependencies.'));\n\n log.newline();\n logGitHubRepoCreation();\n\n log.newline();\n log.plain('Then, resume initialisation:');\n log.ok('cd', destinationDir);\n // The `-D` shorthand is portable across our package managers.\n log.ok(packageManager, 'add', '-D', skubaSlug);\n log.ok(packageManager, 'run', 'format');\n log.ok('git add --all');\n log.ok('git commit --message', `'Pin ${skubaSlug}'`);\n log.ok(`git push --set-upstream origin ${templateData.defaultBranch}`);\n\n log.newline();\n process.exitCode = 1;\n return;\n }\n\n log.newline();\n log.ok(log.bold('\u2714 Project initialised!'));\n\n log.newline();\n logGitHubRepoCreation();\n\n log.newline();\n log.plain('Then, push your local changes:');\n log.ok('cd', destinationDir);\n log.ok(`git push --set-upstream origin ${templateData.defaultBranch}`);\n\n log.newline();\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAwB;AAExB,iBAAiC;AACjC,kBAA6B;AAC7B,kBAA6C;AAC7C,iBAAsC;AACtC,kBAA2C;AAC3C,qBAAkC;AAClC,kBAAuC;AACvC,sBAAoC;AACpC,4BAAqC;AACrC,sBAGO;AACP,sBAA4B;AAC5B,iCAAuC;AAEvC,uBAA0B;AAC1B,IAAAA,cAA+B;AAE/B,8BAAiC;AAE1B,MAAM,OAAO,OAAO,OAAO,QAAQ,KAAK,MAAM,CAAC,MAAM;AAC1D,QAAM,OAAc;AAAA,IAClB,WAAO,0BAAa,IAAI;AAAA,EAC1B;AAEA,QAAM,mBAAmB,UAAM,oCAAuB;AAEtD,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI,UAAM,4BAAU;AAEpB,YAAM,4BAAe,cAAc;AAEnC,QAAM,UAAU,UAAM,kCAAsB;AAAA,IAC1C,YAAAC,QAAK,KAAK,gBAAgB,YAAY;AAAA,IACtC,YAAAA,QAAK,KAAK,mCAAmB,aAAa;AAAA,EAC5C,CAAC;AAED,QAAM,aAAa,KAAC,+BAAkB,YAAY,CAAC;AAEnD,YAAM,uBAAU;AAAA,IACd,YAAY;AAAA,IACZ,iBAAiB;AAAA,IACjB;AAAA;AAAA,IAEA,WAAW;AAAA,IACX;AAAA;AAAA,IAEA,uBAAuB;AAAA,EACzB,CAAC;AAED,YAAM,uBAAU;AAAA,IACd,YAAY;AAAA,IACZ,iBAAiB;AAAA,IACjB;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,QAAQ,IAAI;AAAA,IAChB,uBACI,8CAA6B,cAAc,IAC3C,QAAQ,QAAQ;AAAA,QAEpB,0CAAiB;AAAA,MACf,KAAK;AAAA,MACL;AAAA,MACA,UAAU;AAAA,MACV;AAAA,MACA,SAAS,iBAAiB;AAAA,IAC5B,CAAC;AAAA,EACH,CAAC;AAED,QAAM,WAAO,wBAAW;AAAA,IACtB,KAAK;AAAA,IACL,OAAO;AAAA,IACP,aAAa;AAAA,EACf,CAAC;AAED,qBAAI,QAAQ;AACZ,YAAM,4BAAe,gBAAgB,YAAY;AAEjD,QAAM,CAAC,UAAU,oBAAoB,IAAI,MAAM,QAAQ,IAAI;AAAA,QACzD,qCAAoB;AAAA,
|
|
4
|
+
"sourcesContent": ["import path from 'path';\nimport { inspect } from 'util';\n\nimport { commitAllChanges } from '../../api/git';\nimport { hasDebugFlag } from '../../utils/args';\nimport { copyFiles, createEjsRenderer } from '../../utils/copy';\nimport { createInclusionFilter } from '../../utils/dir';\nimport { createExec, ensureCommands } from '../../utils/exec';\nimport { createLogger, log } from '../../utils/logging';\nimport { showLogoAndVersionInfo } from '../../utils/logo';\nimport { getConsumerManifest } from '../../utils/manifest';\nimport { detectPackageManager } from '../../utils/packageManager';\nimport {\n BASE_TEMPLATE_DIR,\n ensureTemplateConfigDeletion,\n} from '../../utils/template';\nimport { runPrettier } from '../adapter/prettier';\nimport { tryPatchRenovateConfig } from '../lint/internalLints/patchRenovateConfig';\n\nimport { getConfig } from './getConfig';\nimport { initialiseRepo } from './git';\nimport type { Input } from './types';\nimport { writePackageJson } from './writePackageJson';\n\nexport const init = async (args = process.argv.slice(2)) => {\n const opts: Input = {\n debug: hasDebugFlag(args),\n };\n\n const skubaVersionInfo = await showLogoAndVersionInfo();\n\n const {\n destinationDir,\n entryPoint,\n packageManager,\n templateComplete,\n templateData,\n templateName,\n type,\n } = await getConfig();\n\n await ensureCommands(packageManager);\n\n const include = await createInclusionFilter([\n path.join(destinationDir, '.gitignore'),\n path.join(BASE_TEMPLATE_DIR, '_.gitignore'),\n ]);\n\n const processors = [createEjsRenderer(templateData)];\n\n await copyFiles({\n sourceRoot: BASE_TEMPLATE_DIR,\n destinationRoot: destinationDir,\n include,\n // prefer template-specific files\n overwrite: false,\n processors,\n // base template has files like _eslint.config.js\n stripUnderscorePrefix: true,\n });\n\n await copyFiles({\n sourceRoot: destinationDir,\n destinationRoot: destinationDir,\n include,\n processors,\n });\n\n await Promise.all([\n templateComplete\n ? ensureTemplateConfigDeletion(destinationDir)\n : Promise.resolve(),\n\n writePackageJson({\n cwd: destinationDir,\n entryPoint,\n template: templateName,\n type,\n version: skubaVersionInfo.local,\n }),\n ]);\n\n const exec = createExec({\n cwd: destinationDir,\n stdio: 'pipe',\n streamStdio: packageManager,\n });\n\n log.newline();\n await initialiseRepo(destinationDir, templateData);\n\n const [manifest, packageManagerConfig] = await Promise.all([\n getConsumerManifest(destinationDir),\n detectPackageManager(destinationDir),\n ]);\n\n if (!manifest) {\n throw new Error(\"Repository doesn't contain a package.json file.\");\n }\n\n // Patch in a baseline Renovate preset based on the configured Git owner.\n await tryPatchRenovateConfig({\n mode: 'format',\n dir: destinationDir,\n manifest,\n packageManager: packageManagerConfig,\n });\n\n const skubaSlug = `skuba@${skubaVersionInfo.local}`;\n\n let depsInstalled = false;\n try {\n // The `-D` shorthand is portable across our package managers.\n await exec(packageManager, 'add', '-D', skubaSlug);\n\n // Templating can initially leave certain files in an unformatted state;\n // consider a Markdown table with columns sized based on content length.\n await runPrettier('format', createLogger(opts.debug), destinationDir);\n\n depsInstalled = true;\n } catch (err) {\n log.warn(inspect(err));\n }\n\n await commitAllChanges({\n dir: destinationDir,\n message: `Clone ${templateName}`,\n });\n\n const logGitHubRepoCreation = () => {\n log.plain(\n 'Next, create an empty',\n log.bold(`${templateData.orgName}/${templateData.repoName}`),\n 'repository:',\n );\n log.ok('https://github.com/new');\n };\n\n if (!depsInstalled) {\n log.newline();\n log.warn(log.bold('\u2717 Failed to install dependencies.'));\n\n log.newline();\n logGitHubRepoCreation();\n\n log.newline();\n log.plain('Then, resume initialisation:');\n log.ok('cd', destinationDir);\n // The `-D` shorthand is portable across our package managers.\n log.ok(packageManager, 'add', '-D', skubaSlug);\n log.ok(packageManager, 'run', 'format');\n log.ok('git add --all');\n log.ok('git commit --message', `'Pin ${skubaSlug}'`);\n log.ok(`git push --set-upstream origin ${templateData.defaultBranch}`);\n\n log.newline();\n process.exitCode = 1;\n return;\n }\n\n log.newline();\n log.ok(log.bold('\u2714 Project initialised!'));\n\n log.newline();\n logGitHubRepoCreation();\n\n log.newline();\n log.plain('Then, push your local changes:');\n log.ok('cd', destinationDir);\n log.ok(`git push --set-upstream origin ${templateData.defaultBranch}`);\n\n log.newline();\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAwB;AAExB,iBAAiC;AACjC,kBAA6B;AAC7B,kBAA6C;AAC7C,iBAAsC;AACtC,kBAA2C;AAC3C,qBAAkC;AAClC,kBAAuC;AACvC,sBAAoC;AACpC,4BAAqC;AACrC,sBAGO;AACP,sBAA4B;AAC5B,iCAAuC;AAEvC,uBAA0B;AAC1B,IAAAA,cAA+B;AAE/B,8BAAiC;AAE1B,MAAM,OAAO,OAAO,OAAO,QAAQ,KAAK,MAAM,CAAC,MAAM;AAC1D,QAAM,OAAc;AAAA,IAClB,WAAO,0BAAa,IAAI;AAAA,EAC1B;AAEA,QAAM,mBAAmB,UAAM,oCAAuB;AAEtD,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI,UAAM,4BAAU;AAEpB,YAAM,4BAAe,cAAc;AAEnC,QAAM,UAAU,UAAM,kCAAsB;AAAA,IAC1C,YAAAC,QAAK,KAAK,gBAAgB,YAAY;AAAA,IACtC,YAAAA,QAAK,KAAK,mCAAmB,aAAa;AAAA,EAC5C,CAAC;AAED,QAAM,aAAa,KAAC,+BAAkB,YAAY,CAAC;AAEnD,YAAM,uBAAU;AAAA,IACd,YAAY;AAAA,IACZ,iBAAiB;AAAA,IACjB;AAAA;AAAA,IAEA,WAAW;AAAA,IACX;AAAA;AAAA,IAEA,uBAAuB;AAAA,EACzB,CAAC;AAED,YAAM,uBAAU;AAAA,IACd,YAAY;AAAA,IACZ,iBAAiB;AAAA,IACjB;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,QAAQ,IAAI;AAAA,IAChB,uBACI,8CAA6B,cAAc,IAC3C,QAAQ,QAAQ;AAAA,QAEpB,0CAAiB;AAAA,MACf,KAAK;AAAA,MACL;AAAA,MACA,UAAU;AAAA,MACV;AAAA,MACA,SAAS,iBAAiB;AAAA,IAC5B,CAAC;AAAA,EACH,CAAC;AAED,QAAM,WAAO,wBAAW;AAAA,IACtB,KAAK;AAAA,IACL,OAAO;AAAA,IACP,aAAa;AAAA,EACf,CAAC;AAED,qBAAI,QAAQ;AACZ,YAAM,4BAAe,gBAAgB,YAAY;AAEjD,QAAM,CAAC,UAAU,oBAAoB,IAAI,MAAM,QAAQ,IAAI;AAAA,QACzD,qCAAoB,cAAc;AAAA,QAClC,4CAAqB,cAAc;AAAA,EACrC,CAAC;AAED,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,MAAM,iDAAiD;AAAA,EACnE;AAGA,YAAM,mDAAuB;AAAA,IAC3B,MAAM;AAAA,IACN,KAAK;AAAA,IACL;AAAA,IACA,gBAAgB;AAAA,EAClB,CAAC;AAED,QAAM,YAAY,SAAS,iBAAiB,KAAK;AAEjD,MAAI,gBAAgB;AACpB,MAAI;AAEF,UAAM,KAAK,gBAAgB,OAAO,MAAM,SAAS;AAIjD,cAAM,6BAAY,cAAU,6BAAa,KAAK,KAAK,GAAG,cAAc;AAEpE,oBAAgB;AAAA,EAClB,SAAS,KAAK;AACZ,uBAAI,SAAK,qBAAQ,GAAG,CAAC;AAAA,EACvB;AAEA,YAAM,6BAAiB;AAAA,IACrB,KAAK;AAAA,IACL,SAAS,SAAS,YAAY;AAAA,EAChC,CAAC;AAED,QAAM,wBAAwB,MAAM;AAClC,uBAAI;AAAA,MACF;AAAA,MACA,mBAAI,KAAK,GAAG,aAAa,OAAO,IAAI,aAAa,QAAQ,EAAE;AAAA,MAC3D;AAAA,IACF;AACA,uBAAI,GAAG,wBAAwB;AAAA,EACjC;AAEA,MAAI,CAAC,eAAe;AAClB,uBAAI,QAAQ;AACZ,uBAAI,KAAK,mBAAI,KAAK,wCAAmC,CAAC;AAEtD,uBAAI,QAAQ;AACZ,0BAAsB;AAEtB,uBAAI,QAAQ;AACZ,uBAAI,MAAM,8BAA8B;AACxC,uBAAI,GAAG,MAAM,cAAc;AAE3B,uBAAI,GAAG,gBAAgB,OAAO,MAAM,SAAS;AAC7C,uBAAI,GAAG,gBAAgB,OAAO,QAAQ;AACtC,uBAAI,GAAG,eAAe;AACtB,uBAAI,GAAG,wBAAwB,QAAQ,SAAS,GAAG;AACnD,uBAAI,GAAG,kCAAkC,aAAa,aAAa,EAAE;AAErE,uBAAI,QAAQ;AACZ,YAAQ,WAAW;AACnB;AAAA,EACF;AAEA,qBAAI,QAAQ;AACZ,qBAAI,GAAG,mBAAI,KAAK,6BAAwB,CAAC;AAEzC,qBAAI,QAAQ;AACZ,wBAAsB;AAEtB,qBAAI,QAAQ;AACZ,qBAAI,MAAM,gCAAgC;AAC1C,qBAAI,GAAG,MAAM,cAAc;AAC3B,qBAAI,GAAG,kCAAkC,aAAa,aAAa,EAAE;AAErE,qBAAI,QAAQ;AACd;",
|
|
6
6
|
"names": ["import_git", "path"]
|
|
7
7
|
}
|
|
@@ -2,3 +2,4 @@ import { type ZodRawShape, z } from 'zod';
|
|
|
2
2
|
export declare const extractFromParentPackageJson: <T extends ZodRawShape>(schema: z.ZodObject<T>) => Promise<z.infer<typeof schema> | undefined>;
|
|
3
3
|
export declare const isPatchableServerlessVersion: () => Promise<boolean>;
|
|
4
4
|
export declare const isPatchableSkubaType: () => Promise<boolean>;
|
|
5
|
+
export declare const isPatchableNodeVersion: (targetNodeVersion: number) => Promise<boolean>;
|
|
@@ -26,27 +26,28 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
|
|
|
26
26
|
mod
|
|
27
27
|
));
|
|
28
28
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
-
var
|
|
30
|
-
__export(
|
|
29
|
+
var checks_exports = {};
|
|
30
|
+
__export(checks_exports, {
|
|
31
31
|
extractFromParentPackageJson: () => extractFromParentPackageJson,
|
|
32
|
+
isPatchableNodeVersion: () => isPatchableNodeVersion,
|
|
32
33
|
isPatchableServerlessVersion: () => isPatchableServerlessVersion,
|
|
33
34
|
isPatchableSkubaType: () => isPatchableSkubaType
|
|
34
35
|
});
|
|
35
|
-
module.exports = __toCommonJS(
|
|
36
|
+
module.exports = __toCommonJS(checks_exports);
|
|
36
37
|
var import_find_up = __toESM(require("find-up"));
|
|
37
38
|
var import_fs_extra = __toESM(require("fs-extra"));
|
|
38
39
|
var import_semver = require("semver");
|
|
39
40
|
var import_zod = require("zod");
|
|
40
41
|
var import_logging = require("../../../utils/logging");
|
|
41
|
-
const
|
|
42
|
-
const
|
|
43
|
-
if (!
|
|
44
|
-
throw new Error(
|
|
42
|
+
const getParentFile = async (file) => {
|
|
43
|
+
const path = await (0, import_find_up.default)(file, { cwd: process.cwd() });
|
|
44
|
+
if (!path) {
|
|
45
|
+
throw new Error(`${file} not found`);
|
|
45
46
|
}
|
|
46
|
-
return import_fs_extra.default.readFile(
|
|
47
|
+
return import_fs_extra.default.readFile(path, "utf-8");
|
|
47
48
|
};
|
|
48
49
|
const extractFromParentPackageJson = async (schema) => {
|
|
49
|
-
const packageJson = await
|
|
50
|
+
const packageJson = await getParentFile("package.json");
|
|
50
51
|
let rawJSON;
|
|
51
52
|
try {
|
|
52
53
|
rawJSON = JSON.parse(packageJson);
|
|
@@ -103,10 +104,25 @@ const isPatchableSkubaType = async () => {
|
|
|
103
104
|
import_logging.log.ok("skuba type supported, proceeding with migration");
|
|
104
105
|
return true;
|
|
105
106
|
};
|
|
107
|
+
const isPatchableNodeVersion = async (targetNodeVersion) => {
|
|
108
|
+
const currentNodeVersion = await getParentFile(".nvmrc");
|
|
109
|
+
const coercedTargetVersion = (0, import_semver.coerce)(targetNodeVersion.toString())?.version;
|
|
110
|
+
const coercedCurrentVersion = (0, import_semver.coerce)(currentNodeVersion)?.version;
|
|
111
|
+
const isNodeVersionValid = coercedTargetVersion && coercedCurrentVersion && (0, import_semver.lte)(coercedCurrentVersion, coercedTargetVersion);
|
|
112
|
+
if (!isNodeVersionValid) {
|
|
113
|
+
import_logging.log.warn(
|
|
114
|
+
`Node version in .nvmrc is higher than the target version ${coercedTargetVersion}, please ensure the target version is greater than the current version ${coercedCurrentVersion}`
|
|
115
|
+
);
|
|
116
|
+
return false;
|
|
117
|
+
}
|
|
118
|
+
import_logging.log.ok("Valid node version found, proceeding with migration");
|
|
119
|
+
return true;
|
|
120
|
+
};
|
|
106
121
|
// Annotate the CommonJS export names for ESM import in node:
|
|
107
122
|
0 && (module.exports = {
|
|
108
123
|
extractFromParentPackageJson,
|
|
124
|
+
isPatchableNodeVersion,
|
|
109
125
|
isPatchableServerlessVersion,
|
|
110
126
|
isPatchableSkubaType
|
|
111
127
|
});
|
|
112
|
-
//# sourceMappingURL=
|
|
128
|
+
//# sourceMappingURL=checks.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../../../src/cli/migrate/nodeVersion/checks.ts"],
|
|
4
|
+
"sourcesContent": ["import findUp from 'find-up';\nimport fs from 'fs-extra';\nimport { coerce, lte, satisfies } from 'semver';\nimport { type ZodRawShape, z } from 'zod';\n\nimport { log } from '../../../utils/logging';\n\nconst getParentFile = async (file: string) => {\n const path = await findUp(file, { cwd: process.cwd() });\n if (!path) {\n throw new Error(`${file} not found`);\n }\n return fs.readFile(path, 'utf-8');\n};\n\nexport const extractFromParentPackageJson = async <T extends ZodRawShape>(\n schema: z.ZodObject<T>,\n): Promise<z.infer<typeof schema> | undefined> => {\n const packageJson = await getParentFile('package.json');\n let rawJSON;\n try {\n rawJSON = JSON.parse(packageJson) as unknown;\n } catch {\n throw new Error('package.json is not valid JSON');\n }\n const result = schema.safeParse(rawJSON);\n if (!result.success) {\n return undefined;\n }\n\n return result.data;\n};\n\nexport const isPatchableServerlessVersion = async (): Promise<boolean> => {\n const serverlessVersion = (\n await extractFromParentPackageJson(\n z.object({\n devDependencies: z.object({\n serverless: z.string(),\n }),\n }),\n )\n )?.devDependencies.serverless;\n\n if (!serverlessVersion) {\n log.subtle('Serverless version not found, assuming it is not a dependency');\n return true;\n }\n\n if (!satisfies(serverlessVersion, '4.x.x')) {\n log.warn(\n 'Serverless version not supported, please upgrade to 4.x to automatically update serverless files',\n );\n return false;\n }\n\n log.ok('Serverless version is supported, proceeding with migration');\n return true;\n};\n\nexport const isPatchableSkubaType = async (): Promise<boolean> => {\n const type = (\n await extractFromParentPackageJson(\n z.object({\n skuba: z.object({\n type: z.string(),\n }),\n }),\n )\n )?.skuba.type;\n\n if (!type) {\n log.warn(\n \"skuba type couldn't be found, please specify the type of project in the package.json, to ensure the correct migration is applied\",\n );\n return false;\n }\n if (type === 'package') {\n log.warn(\n 'skuba type package is not supported, packages should be updated manually to ensure major runtime deprecations are intended',\n );\n return false;\n }\n\n log.ok('skuba type supported, proceeding with migration');\n return true;\n};\n\nexport const isPatchableNodeVersion = async (\n targetNodeVersion: number,\n): Promise<boolean> => {\n const currentNodeVersion = await getParentFile('.nvmrc');\n\n const coercedTargetVersion = coerce(targetNodeVersion.toString())?.version;\n const coercedCurrentVersion = coerce(currentNodeVersion)?.version;\n\n const isNodeVersionValid =\n coercedTargetVersion &&\n coercedCurrentVersion &&\n lte(coercedCurrentVersion, coercedTargetVersion);\n\n if (!isNodeVersionValid) {\n log.warn(\n `Node version in .nvmrc is higher than the target version ${coercedTargetVersion}, please ensure the target version is greater than the current version ${coercedCurrentVersion}`,\n );\n return false;\n }\n\n log.ok('Valid node version found, proceeding with migration');\n return true;\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAmB;AACnB,sBAAe;AACf,oBAAuC;AACvC,iBAAoC;AAEpC,qBAAoB;AAEpB,MAAM,gBAAgB,OAAO,SAAiB;AAC5C,QAAM,OAAO,UAAM,eAAAA,SAAO,MAAM,EAAE,KAAK,QAAQ,IAAI,EAAE,CAAC;AACtD,MAAI,CAAC,MAAM;AACT,UAAM,IAAI,MAAM,GAAG,IAAI,YAAY;AAAA,EACrC;AACA,SAAO,gBAAAC,QAAG,SAAS,MAAM,OAAO;AAClC;AAEO,MAAM,+BAA+B,OAC1C,WACgD;AAChD,QAAM,cAAc,MAAM,cAAc,cAAc;AACtD,MAAI;AACJ,MAAI;AACF,cAAU,KAAK,MAAM,WAAW;AAAA,EAClC,QAAQ;AACN,UAAM,IAAI,MAAM,gCAAgC;AAAA,EAClD;AACA,QAAM,SAAS,OAAO,UAAU,OAAO;AACvC,MAAI,CAAC,OAAO,SAAS;AACnB,WAAO;AAAA,EACT;AAEA,SAAO,OAAO;AAChB;AAEO,MAAM,+BAA+B,YAA8B;AACxE,QAAM,qBACJ,MAAM;AAAA,IACJ,aAAE,OAAO;AAAA,MACP,iBAAiB,aAAE,OAAO;AAAA,QACxB,YAAY,aAAE,OAAO;AAAA,MACvB,CAAC;AAAA,IACH,CAAC;AAAA,EACH,IACC,gBAAgB;AAEnB,MAAI,CAAC,mBAAmB;AACtB,uBAAI,OAAO,+DAA+D;AAC1E,WAAO;AAAA,EACT;AAEA,MAAI,KAAC,yBAAU,mBAAmB,OAAO,GAAG;AAC1C,uBAAI;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAEA,qBAAI,GAAG,4DAA4D;AACnE,SAAO;AACT;AAEO,MAAM,uBAAuB,YAA8B;AAChE,QAAM,QACJ,MAAM;AAAA,IACJ,aAAE,OAAO;AAAA,MACP,OAAO,aAAE,OAAO;AAAA,QACd,MAAM,aAAE,OAAO;AAAA,MACjB,CAAC;AAAA,IACH,CAAC;AAAA,EACH,IACC,MAAM;AAET,MAAI,CAAC,MAAM;AACT,uBAAI;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AACA,MAAI,SAAS,WAAW;AACtB,uBAAI;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAEA,qBAAI,GAAG,iDAAiD;AACxD,SAAO;AACT;AAEO,MAAM,yBAAyB,OACpC,sBACqB;AACrB,QAAM,qBAAqB,MAAM,cAAc,QAAQ;AAEvD,QAAM,2BAAuB,sBAAO,kBAAkB,SAAS,CAAC,GAAG;AACnE,QAAM,4BAAwB,sBAAO,kBAAkB,GAAG;AAE1D,QAAM,qBACJ,wBACA,6BACA,mBAAI,uBAAuB,oBAAoB;AAEjD,MAAI,CAAC,oBAAoB;AACvB,uBAAI;AAAA,MACF,4DAA4D,oBAAoB,0EAA0E,qBAAqB;AAAA,IACjL;AACA,WAAO;AAAA,EACT;AAEA,qBAAI,GAAG,qDAAqD;AAC5D,SAAO;AACT;",
|
|
6
|
+
"names": ["findUp", "fs"]
|
|
7
|
+
}
|
|
@@ -36,8 +36,8 @@ var import_fast_glob = require("fast-glob");
|
|
|
36
36
|
var import_fs_extra = __toESM(require("fs-extra"));
|
|
37
37
|
var import_logging = require("../../../utils/logging");
|
|
38
38
|
var import_project = require("../../configure/analysis/project");
|
|
39
|
+
var import_checks = require("./checks");
|
|
39
40
|
var import_getNodeTypesVersion = require("./getNodeTypesVersion");
|
|
40
|
-
var import_packageJsonChecks = require("./packageJsonChecks");
|
|
41
41
|
const subPatches = ({
|
|
42
42
|
nodeVersion,
|
|
43
43
|
nodeTypesVersion,
|
|
@@ -58,7 +58,7 @@ const subPatches = ({
|
|
|
58
58
|
{
|
|
59
59
|
files: "**/serverless*.y*ml",
|
|
60
60
|
regex: /nodejs\d+.x/gm,
|
|
61
|
-
tests: [
|
|
61
|
+
tests: [import_checks.isPatchableServerlessVersion],
|
|
62
62
|
replace: `nodejs${nodeVersion}.x`
|
|
63
63
|
},
|
|
64
64
|
[
|
|
@@ -87,13 +87,13 @@ const subPatches = ({
|
|
|
87
87
|
{
|
|
88
88
|
files: "**/package.json",
|
|
89
89
|
regex: /(\\?"@types\/node\\?": \\?")(\^)?[0-9.]+(\\?(",?)\\?n?)/gm,
|
|
90
|
-
tests: [
|
|
90
|
+
tests: [import_checks.isPatchableServerlessVersion],
|
|
91
91
|
replace: `$1$2${nodeTypesVersion}$4`
|
|
92
92
|
},
|
|
93
93
|
{
|
|
94
94
|
files: "**/package.json",
|
|
95
95
|
regex: /(\\?"engines\\?":\s*{\\?n?[^}]*\\?"node\\?":\s*\\?">=)(\d+)\\?("[^}]*})(?![^}]*\\?"skuba\\?":\s*{\\?n?[^}]*\\?"type\\?":\s*\\?"package\\?")/gm,
|
|
96
|
-
tests: [
|
|
96
|
+
tests: [import_checks.isPatchableServerlessVersion, import_checks.isPatchableSkubaType],
|
|
97
97
|
replace: `$1${nodeVersion}$3`
|
|
98
98
|
}
|
|
99
99
|
],
|
|
@@ -101,13 +101,13 @@ const subPatches = ({
|
|
|
101
101
|
{
|
|
102
102
|
files: "**/tsconfig*.json",
|
|
103
103
|
regex: /("target":\s*")(ES\d+)"/gim,
|
|
104
|
-
tests: [
|
|
104
|
+
tests: [import_checks.isPatchableServerlessVersion, import_checks.isPatchableSkubaType],
|
|
105
105
|
replace: `$1${ECMAScriptVersion}"`
|
|
106
106
|
},
|
|
107
107
|
{
|
|
108
108
|
files: "**/tsconfig*.json",
|
|
109
109
|
regex: /("lib":\s*\[)([\S\s]*?)(ES\d+)([\S\s]*?)(\])/gim,
|
|
110
|
-
tests: [
|
|
110
|
+
tests: [import_checks.isPatchableServerlessVersion, import_checks.isPatchableSkubaType],
|
|
111
111
|
replace: `$1$2${ECMAScriptVersion}$4$5`
|
|
112
112
|
}
|
|
113
113
|
],
|
|
@@ -174,6 +174,9 @@ const nodeVersionMigration = async ({
|
|
|
174
174
|
}, dir = process.cwd()) => {
|
|
175
175
|
import_logging.log.ok(`Upgrading to Node.js ${nodeVersion}`);
|
|
176
176
|
try {
|
|
177
|
+
if (!await (0, import_checks.isPatchableNodeVersion)(nodeVersion)) {
|
|
178
|
+
throw new Error("Node version is not patchable");
|
|
179
|
+
}
|
|
177
180
|
const { version: nodeTypesVersion, err } = await (0, import_getNodeTypesVersion.getNodeTypesVersion)(
|
|
178
181
|
nodeVersion,
|
|
179
182
|
defaultNodeTypesVersion
|
|
@@ -183,9 +186,9 @@ const nodeVersionMigration = async ({
|
|
|
183
186
|
}
|
|
184
187
|
await upgrade({ nodeVersion, nodeTypesVersion, ECMAScriptVersion }, dir);
|
|
185
188
|
import_logging.log.ok("Upgraded to Node.js", nodeVersion);
|
|
186
|
-
} catch (
|
|
189
|
+
} catch (error) {
|
|
187
190
|
import_logging.log.err("Failed to upgrade");
|
|
188
|
-
import_logging.log.subtle((0, import_util.inspect)(
|
|
191
|
+
import_logging.log.subtle((0, import_util.inspect)(error));
|
|
189
192
|
process.exitCode = 1;
|
|
190
193
|
}
|
|
191
194
|
};
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../src/cli/migrate/nodeVersion/index.ts"],
|
|
4
|
-
"sourcesContent": ["import { inspect } from 'util';\n\nimport { glob } from 'fast-glob';\nimport fs from 'fs-extra';\n\nimport { log } from '../../../utils/logging';\nimport { createDestinationFileReader } from '../../configure/analysis/project';\n\nimport {
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,uBAAqB;AACrB,sBAAe;AAEf,qBAAoB;AACpB,qBAA4C;AAE5C,iCAAoC;
|
|
4
|
+
"sourcesContent": ["import { inspect } from 'util';\n\nimport { glob } from 'fast-glob';\nimport fs from 'fs-extra';\n\nimport { log } from '../../../utils/logging';\nimport { createDestinationFileReader } from '../../configure/analysis/project';\n\nimport {\n isPatchableNodeVersion,\n isPatchableServerlessVersion,\n isPatchableSkubaType,\n} from './checks';\nimport { getNodeTypesVersion } from './getNodeTypesVersion';\n\ntype FileSelector =\n | { files: string; file?: never }\n | { file: string; files?: never };\n\ntype SubPatch = FileSelector & {\n tests?: Array<() => Promise<boolean>>;\n regex?: RegExp;\n replace: string;\n};\n\ntype SubPatches = SubPatch | SubPatch[];\n\nconst subPatches = ({\n nodeVersion,\n nodeTypesVersion,\n ECMAScriptVersion,\n}: Versions): SubPatches[] => [\n { file: '.nvmrc', replace: `${nodeVersion}\\n` },\n {\n files: '**/Dockerfile*',\n\n regex:\n /^FROM(.*) (public.ecr.aws\\/docker\\/library\\/)?node:([0-9]+(?:\\.[0-9]+(?:\\.[0-9]+)?)?)(-[a-z0-9]+)?(@sha256:[a-f0-9]{64})?( .*)?$/gm,\n replace: `FROM$1 $2node:${nodeVersion}$4$6`,\n },\n {\n files: '**/Dockerfile*',\n regex:\n /^FROM(.*) gcr.io\\/distroless\\/nodejs\\d+-debian(\\d+)(@sha256:[a-f0-9]{64})?(\\.[^- \\n]+)?(-[^ \\n]+)?( .+|)$/gm,\n replace: `FROM$1 gcr.io/distroless/nodejs${nodeVersion}-debian$2$4$5$6`,\n },\n {\n files: '**/serverless*.y*ml',\n regex: /nodejs\\d+.x/gm,\n tests: [isPatchableServerlessVersion],\n replace: `nodejs${nodeVersion}.x`,\n },\n [\n {\n files: '**/infra/**/*.ts',\n regex: /NODEJS_\\d+_X/g,\n replace: `NODEJS_${nodeVersion}_X`,\n },\n {\n files: '**/infra/**/*.ts',\n regex: /(target:\\s*'node)(\\d+)(.+)$/gm,\n replace: `$1${nodeVersion}$3`,\n },\n ],\n {\n files: '**/.buildkite/*',\n regex:\n /(image: )(public.ecr.aws\\/docker\\/library\\/)?(node:)[0-9.]+(\\.[^- \\n]+)?(-[^ \\n]+)?$/gm,\n replace: `$1$2$3${nodeVersion}$5`,\n },\n {\n files: '.node-version*',\n regex: /(v)?\\d+\\.\\d+\\.\\d+(.+)?/gm,\n replace: `$1${nodeVersion}$2`,\n },\n [\n {\n files: '**/package.json',\n regex: /(\\\\?\"@types\\/node\\\\?\": \\\\?\")(\\^)?[0-9.]+(\\\\?(\",?)\\\\?n?)/gm,\n tests: [isPatchableServerlessVersion],\n replace: `$1$2${nodeTypesVersion}$4`,\n },\n {\n files: '**/package.json',\n regex:\n /(\\\\?\"engines\\\\?\":\\s*{\\\\?n?[^}]*\\\\?\"node\\\\?\":\\s*\\\\?\">=)(\\d+)\\\\?(\"[^}]*})(?![^}]*\\\\?\"skuba\\\\?\":\\s*{\\\\?n?[^}]*\\\\?\"type\\\\?\":\\s*\\\\?\"package\\\\?\")/gm,\n tests: [isPatchableServerlessVersion, isPatchableSkubaType],\n replace: `$1${nodeVersion}$3`,\n },\n ],\n [\n {\n files: '**/tsconfig*.json',\n regex: /(\"target\":\\s*\")(ES\\d+)\"/gim,\n tests: [isPatchableServerlessVersion, isPatchableSkubaType],\n replace: `$1${ECMAScriptVersion}\"`,\n },\n {\n files: '**/tsconfig*.json',\n regex: /(\"lib\":\\s*\\[)([\\S\\s]*?)(ES\\d+)([\\S\\s]*?)(\\])/gim,\n tests: [isPatchableServerlessVersion, isPatchableSkubaType],\n replace: `$1$2${ECMAScriptVersion}$4$5`,\n },\n ],\n {\n files: '**/docker-compose*.y*ml',\n regex:\n /(image: )(public.ecr.aws\\/docker\\/library\\/)?(node:)[0-9.]+(\\.[^- \\n]+)?(-[^ \\n]+)?$/gm,\n\n replace: `$1$2$3${nodeVersion}$5`,\n },\n];\n\ntype Versions = {\n nodeVersion: number;\n nodeTypesVersion: string;\n ECMAScriptVersion: string;\n};\n\nconst runSubPatch = async (dir: string, patch: SubPatches) => {\n if (Array.isArray(patch)) {\n for (const subPatch of patch) {\n await runSubPatch(dir, subPatch);\n }\n return;\n }\n const readFile = createDestinationFileReader(dir);\n const paths = patch.file\n ? [patch.file]\n : await glob(patch.files ?? [], { cwd: dir });\n\n await Promise.all(\n paths.map(async (path) => {\n if (path.includes('node_modules')) {\n return;\n }\n const contents = await readFile(path);\n if (!contents) {\n return;\n }\n\n if (patch.regex && !patch.regex.test(contents)) {\n return;\n }\n\n if (patch.tests) {\n const results = await Promise.all(patch.tests.map((test) => test()));\n if (!results.every(Boolean)) {\n return;\n }\n }\n\n await writePatchedContents({\n path,\n contents,\n templated: patch.replace,\n regex: patch.regex,\n });\n }),\n );\n};\n\nconst writePatchedContents = async ({\n path,\n contents,\n templated,\n regex,\n}: {\n path: string;\n contents: string;\n templated: string;\n regex?: RegExp;\n}) =>\n await fs.promises.writeFile(\n path,\n regex ? contents.replaceAll(regex, templated) : templated,\n );\n\nconst upgrade = async (versions: Versions, dir: string) => {\n await Promise.all(\n subPatches(versions).map((subPatch) => runSubPatch(dir, subPatch)),\n );\n};\n\nexport const nodeVersionMigration = async (\n {\n nodeVersion,\n ECMAScriptVersion,\n defaultNodeTypesVersion,\n }: {\n nodeVersion: number;\n ECMAScriptVersion: string;\n defaultNodeTypesVersion: string;\n },\n dir = process.cwd(),\n) => {\n log.ok(`Upgrading to Node.js ${nodeVersion}`);\n try {\n if (!(await isPatchableNodeVersion(nodeVersion))) {\n throw new Error('Node version is not patchable');\n }\n\n const { version: nodeTypesVersion, err } = await getNodeTypesVersion(\n nodeVersion,\n defaultNodeTypesVersion,\n );\n if (err) {\n log.warn(err);\n }\n await upgrade({ nodeVersion, nodeTypesVersion, ECMAScriptVersion }, dir);\n log.ok('Upgraded to Node.js', nodeVersion);\n } catch (error) {\n log.err('Failed to upgrade');\n log.subtle(inspect(error));\n process.exitCode = 1;\n }\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,uBAAqB;AACrB,sBAAe;AAEf,qBAAoB;AACpB,qBAA4C;AAE5C,oBAIO;AACP,iCAAoC;AAcpC,MAAM,aAAa,CAAC;AAAA,EAClB;AAAA,EACA;AAAA,EACA;AACF,MAA8B;AAAA,EAC5B,EAAE,MAAM,UAAU,SAAS,GAAG,WAAW;AAAA,EAAK;AAAA,EAC9C;AAAA,IACE,OAAO;AAAA,IAEP,OACE;AAAA,IACF,SAAS,iBAAiB,WAAW;AAAA,EACvC;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OACE;AAAA,IACF,SAAS,kCAAkC,WAAW;AAAA,EACxD;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO,CAAC,0CAA4B;AAAA,IACpC,SAAS,SAAS,WAAW;AAAA,EAC/B;AAAA,EACA;AAAA,IACE;AAAA,MACE,OAAO;AAAA,MACP,OAAO;AAAA,MACP,SAAS,UAAU,WAAW;AAAA,IAChC;AAAA,IACA;AAAA,MACE,OAAO;AAAA,MACP,OAAO;AAAA,MACP,SAAS,KAAK,WAAW;AAAA,IAC3B;AAAA,EACF;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OACE;AAAA,IACF,SAAS,SAAS,WAAW;AAAA,EAC/B;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,SAAS,KAAK,WAAW;AAAA,EAC3B;AAAA,EACA;AAAA,IACE;AAAA,MACE,OAAO;AAAA,MACP,OAAO;AAAA,MACP,OAAO,CAAC,0CAA4B;AAAA,MACpC,SAAS,OAAO,gBAAgB;AAAA,IAClC;AAAA,IACA;AAAA,MACE,OAAO;AAAA,MACP,OACE;AAAA,MACF,OAAO,CAAC,4CAA8B,kCAAoB;AAAA,MAC1D,SAAS,KAAK,WAAW;AAAA,IAC3B;AAAA,EACF;AAAA,EACA;AAAA,IACE;AAAA,MACE,OAAO;AAAA,MACP,OAAO;AAAA,MACP,OAAO,CAAC,4CAA8B,kCAAoB;AAAA,MAC1D,SAAS,KAAK,iBAAiB;AAAA,IACjC;AAAA,IACA;AAAA,MACE,OAAO;AAAA,MACP,OAAO;AAAA,MACP,OAAO,CAAC,4CAA8B,kCAAoB;AAAA,MAC1D,SAAS,OAAO,iBAAiB;AAAA,IACnC;AAAA,EACF;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OACE;AAAA,IAEF,SAAS,SAAS,WAAW;AAAA,EAC/B;AACF;AAQA,MAAM,cAAc,OAAO,KAAa,UAAsB;AAC5D,MAAI,MAAM,QAAQ,KAAK,GAAG;AACxB,eAAW,YAAY,OAAO;AAC5B,YAAM,YAAY,KAAK,QAAQ;AAAA,IACjC;AACA;AAAA,EACF;AACA,QAAM,eAAW,4CAA4B,GAAG;AAChD,QAAM,QAAQ,MAAM,OAChB,CAAC,MAAM,IAAI,IACX,UAAM,uBAAK,MAAM,SAAS,CAAC,GAAG,EAAE,KAAK,IAAI,CAAC;AAE9C,QAAM,QAAQ;AAAA,IACZ,MAAM,IAAI,OAAO,SAAS;AACxB,UAAI,KAAK,SAAS,cAAc,GAAG;AACjC;AAAA,MACF;AACA,YAAM,WAAW,MAAM,SAAS,IAAI;AACpC,UAAI,CAAC,UAAU;AACb;AAAA,MACF;AAEA,UAAI,MAAM,SAAS,CAAC,MAAM,MAAM,KAAK,QAAQ,GAAG;AAC9C;AAAA,MACF;AAEA,UAAI,MAAM,OAAO;AACf,cAAM,UAAU,MAAM,QAAQ,IAAI,MAAM,MAAM,IAAI,CAAC,SAAS,KAAK,CAAC,CAAC;AACnE,YAAI,CAAC,QAAQ,MAAM,OAAO,GAAG;AAC3B;AAAA,QACF;AAAA,MACF;AAEA,YAAM,qBAAqB;AAAA,QACzB;AAAA,QACA;AAAA,QACA,WAAW,MAAM;AAAA,QACjB,OAAO,MAAM;AAAA,MACf,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF;AAEA,MAAM,uBAAuB,OAAO;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAME,MAAM,gBAAAA,QAAG,SAAS;AAAA,EAChB;AAAA,EACA,QAAQ,SAAS,WAAW,OAAO,SAAS,IAAI;AAClD;AAEF,MAAM,UAAU,OAAO,UAAoB,QAAgB;AACzD,QAAM,QAAQ;AAAA,IACZ,WAAW,QAAQ,EAAE,IAAI,CAAC,aAAa,YAAY,KAAK,QAAQ,CAAC;AAAA,EACnE;AACF;AAEO,MAAM,uBAAuB,OAClC;AAAA,EACE;AAAA,EACA;AAAA,EACA;AACF,GAKA,MAAM,QAAQ,IAAI,MACf;AACH,qBAAI,GAAG,wBAAwB,WAAW,EAAE;AAC5C,MAAI;AACF,QAAI,CAAE,UAAM,sCAAuB,WAAW,GAAI;AAChD,YAAM,IAAI,MAAM,+BAA+B;AAAA,IACjD;AAEA,UAAM,EAAE,SAAS,kBAAkB,IAAI,IAAI,UAAM;AAAA,MAC/C;AAAA,MACA;AAAA,IACF;AACA,QAAI,KAAK;AACP,yBAAI,KAAK,GAAG;AAAA,IACd;AACA,UAAM,QAAQ,EAAE,aAAa,kBAAkB,kBAAkB,GAAG,GAAG;AACvE,uBAAI,GAAG,uBAAuB,WAAW;AAAA,EAC3C,SAAS,OAAO;AACd,uBAAI,IAAI,mBAAmB;AAC3B,uBAAI,WAAO,qBAAQ,KAAK,CAAC;AACzB,YAAQ,WAAW;AAAA,EACrB;AACF;",
|
|
6
6
|
"names": ["fs"]
|
|
7
7
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "skuba",
|
|
3
|
-
"version": "10.0.0-node-22-
|
|
3
|
+
"version": "10.0.0-node-22-20250218232801",
|
|
4
4
|
"private": false,
|
|
5
5
|
"description": "SEEK development toolkit for backend applications and packages",
|
|
6
6
|
"homepage": "https://github.com/seek-oss/skuba#readme",
|
|
@@ -100,7 +100,7 @@
|
|
|
100
100
|
"typescript": "~5.7.0",
|
|
101
101
|
"validate-npm-package-name": "^6.0.0",
|
|
102
102
|
"zod": "^3.22.4",
|
|
103
|
-
"eslint-config-skuba": "5.1.0-node-22-
|
|
103
|
+
"eslint-config-skuba": "5.1.0-node-22-20250218232801"
|
|
104
104
|
},
|
|
105
105
|
"devDependencies": {
|
|
106
106
|
"@changesets/cli": "2.27.12",
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# syntax=docker/dockerfile:1.13
|
|
2
2
|
|
|
3
|
-
FROM public.ecr.aws/docker/library/node:
|
|
3
|
+
FROM public.ecr.aws/docker/library/node:22-alpine AS dev-deps
|
|
4
4
|
|
|
5
5
|
RUN --mount=type=bind,source=package.json,target=package.json \
|
|
6
6
|
corepack enable pnpm && corepack install
|
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
{
|
|
2
|
-
"version": 3,
|
|
3
|
-
"sources": ["../../../../src/cli/migrate/nodeVersion/packageJsonChecks.ts"],
|
|
4
|
-
"sourcesContent": ["import findUp from 'find-up';\nimport fs from 'fs-extra';\nimport { satisfies } from 'semver';\nimport { type ZodRawShape, z } from 'zod';\n\nimport { log } from '../../../utils/logging';\n\nconst getParentPackageJson = async () => {\n const packageJsonPath = await findUp('package.json', { cwd: process.cwd() });\n if (!packageJsonPath) {\n throw new Error('package.json not found');\n }\n return fs.readFile(packageJsonPath, 'utf-8');\n};\n\nexport const extractFromParentPackageJson = async <T extends ZodRawShape>(\n schema: z.ZodObject<T>,\n): Promise<z.infer<typeof schema> | undefined> => {\n const packageJson = await getParentPackageJson();\n let rawJSON;\n try {\n rawJSON = JSON.parse(packageJson) as unknown;\n } catch {\n throw new Error('package.json is not valid JSON');\n }\n const result = schema.safeParse(rawJSON);\n if (!result.success) {\n return undefined;\n }\n\n return result.data;\n};\n\nexport const isPatchableServerlessVersion = async (): Promise<boolean> => {\n const serverlessVersion = (\n await extractFromParentPackageJson(\n z.object({\n devDependencies: z.object({\n serverless: z.string(),\n }),\n }),\n )\n )?.devDependencies.serverless;\n\n if (!serverlessVersion) {\n log.subtle('Serverless version not found, assuming it is not a dependency');\n return true;\n }\n\n if (!satisfies(serverlessVersion, '4.x.x')) {\n log.warn(\n 'Serverless version not supported, please upgrade to 4.x to automatically update serverless files',\n );\n return false;\n }\n\n log.ok('Serverless version is supported, proceeding with migration');\n return true;\n};\n\nexport const isPatchableSkubaType = async () => {\n const type = (\n await extractFromParentPackageJson(\n z.object({\n skuba: z.object({\n type: z.string(),\n }),\n }),\n )\n )?.skuba.type;\n\n if (!type) {\n log.warn(\n \"skuba type couldn't be found, please specify the type of project in the package.json, to ensure the correct migration is applied\",\n );\n return false;\n }\n if (type === 'package') {\n log.warn(\n 'skuba type package is not supported, packages should be updated manually to ensure major runtime deprecations are intended',\n );\n return false;\n }\n\n log.ok('skuba type supported, proceeding with migration');\n return true;\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAmB;AACnB,sBAAe;AACf,oBAA0B;AAC1B,iBAAoC;AAEpC,qBAAoB;AAEpB,MAAM,uBAAuB,YAAY;AACvC,QAAM,kBAAkB,UAAM,eAAAA,SAAO,gBAAgB,EAAE,KAAK,QAAQ,IAAI,EAAE,CAAC;AAC3E,MAAI,CAAC,iBAAiB;AACpB,UAAM,IAAI,MAAM,wBAAwB;AAAA,EAC1C;AACA,SAAO,gBAAAC,QAAG,SAAS,iBAAiB,OAAO;AAC7C;AAEO,MAAM,+BAA+B,OAC1C,WACgD;AAChD,QAAM,cAAc,MAAM,qBAAqB;AAC/C,MAAI;AACJ,MAAI;AACF,cAAU,KAAK,MAAM,WAAW;AAAA,EAClC,QAAQ;AACN,UAAM,IAAI,MAAM,gCAAgC;AAAA,EAClD;AACA,QAAM,SAAS,OAAO,UAAU,OAAO;AACvC,MAAI,CAAC,OAAO,SAAS;AACnB,WAAO;AAAA,EACT;AAEA,SAAO,OAAO;AAChB;AAEO,MAAM,+BAA+B,YAA8B;AACxE,QAAM,qBACJ,MAAM;AAAA,IACJ,aAAE,OAAO;AAAA,MACP,iBAAiB,aAAE,OAAO;AAAA,QACxB,YAAY,aAAE,OAAO;AAAA,MACvB,CAAC;AAAA,IACH,CAAC;AAAA,EACH,IACC,gBAAgB;AAEnB,MAAI,CAAC,mBAAmB;AACtB,uBAAI,OAAO,+DAA+D;AAC1E,WAAO;AAAA,EACT;AAEA,MAAI,KAAC,yBAAU,mBAAmB,OAAO,GAAG;AAC1C,uBAAI;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAEA,qBAAI,GAAG,4DAA4D;AACnE,SAAO;AACT;AAEO,MAAM,uBAAuB,YAAY;AAC9C,QAAM,QACJ,MAAM;AAAA,IACJ,aAAE,OAAO;AAAA,MACP,OAAO,aAAE,OAAO;AAAA,QACd,MAAM,aAAE,OAAO;AAAA,MACjB,CAAC;AAAA,IACH,CAAC;AAAA,EACH,IACC,MAAM;AAET,MAAI,CAAC,MAAM;AACT,uBAAI;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AACA,MAAI,SAAS,WAAW;AACtB,uBAAI;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAEA,qBAAI,GAAG,iDAAiD;AACxD,SAAO;AACT;",
|
|
6
|
-
"names": ["findUp", "fs"]
|
|
7
|
-
}
|