skuba 10.0.1-sub-fixes-20250314063621 → 10.0.2-aaron-test-20250315070234
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/cli/configure/index.js +1 -1
- package/lib/cli/configure/index.js.map +2 -2
- package/lib/cli/configure/modules/package.js +4 -1
- package/lib/cli/configure/modules/package.js.map +2 -2
- package/lib/cli/init/getConfig.js +2 -2
- package/lib/cli/init/getConfig.js.map +2 -2
- package/lib/cli/lint/index.js +1 -1
- package/lib/cli/lint/index.js.map +2 -2
- package/lib/cli/lint/internalLints/noSkubaTemplateJs.js +2 -2
- package/lib/cli/lint/internalLints/noSkubaTemplateJs.js.map +2 -2
- package/lib/cli/lint/internalLints/refreshConfigFiles.js +1 -1
- package/lib/cli/lint/internalLints/refreshConfigFiles.js.map +2 -2
- package/lib/cli/lint/internalLints/upgrade/index.js +2 -2
- package/lib/cli/lint/internalLints/upgrade/index.js.map +2 -2
- package/lib/cli/migrate/nodeVersion/checks.d.ts +4 -4
- package/lib/cli/migrate/nodeVersion/checks.js +17 -14
- package/lib/cli/migrate/nodeVersion/checks.js.map +2 -2
- package/lib/cli/migrate/nodeVersion/index.js +4 -2
- package/lib/cli/migrate/nodeVersion/index.js.map +2 -2
- package/lib/utils/logo.js +3 -1
- package/lib/utils/logo.js.map +2 -2
- package/lib/utils/packageManager.d.ts +11 -9
- package/lib/utils/packageManager.js +23 -10
- package/lib/utils/packageManager.js.map +2 -2
- package/package.json +1 -1
- package/template/express-rest-api/package.json +1 -1
- package/template/greeter/package.json +2 -2
- package/template/koa-rest-api/package.json +1 -1
- package/template/lambda-sqs-worker-cdk/package.json +2 -2
|
@@ -128,7 +128,7 @@ const configure = async () => {
|
|
|
128
128
|
import_logging.log.newline();
|
|
129
129
|
import_logging.log.warn(import_logging.log.bold("\u2717 Failed to install dependencies. Resume with:"));
|
|
130
130
|
import_logging.log.newline();
|
|
131
|
-
import_logging.log.plain(import_logging.log.bold(packageManager.install));
|
|
131
|
+
import_logging.log.plain(import_logging.log.bold(packageManager.command, "install"));
|
|
132
132
|
import_logging.log.plain(import_logging.log.bold(packageManager.command, "format"));
|
|
133
133
|
import_logging.log.newline();
|
|
134
134
|
process.exitCode = 1;
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/configure/index.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport { Select } from 'enquirer';\n\nimport { createInclusionFilter } from '../../utils/dir';\nimport { createExec, ensureCommands } from '../../utils/exec';\nimport { log } from '../../utils/logging';\nimport { showLogoAndVersionInfo } from '../../utils/logo';\nimport { detectPackageManager } from '../../utils/packageManager';\nimport { BASE_TEMPLATE_DIR } from '../../utils/template';\nimport { hasProp } from '../../utils/validation';\n\nimport { analyseConfiguration } from './analyseConfiguration';\nimport { analyseDependencies } from './analyseDependencies';\nimport { auditWorkingTree } from './analysis/git';\nimport { getDestinationManifest } from './analysis/package';\nimport { ensureTemplateCompletion } from './ensureTemplateCompletion';\nimport { getEntryPoint } from './getEntryPoint';\nimport { getProjectType } from './getProjectType';\n\nconst shouldApply = async (name: string) => {\n if (!process.stdin.isTTY) {\n return 'yes';\n }\n const prompt = new Select({\n choices: ['yes', 'no'] as const,\n message: 'Apply changes?',\n name,\n });\n\n const result = await prompt.run();\n\n return result === 'yes';\n};\n\nexport const configure = async () => {\n await showLogoAndVersionInfo();\n\n const [manifest, packageManager] = await Promise.all([\n getDestinationManifest(),\n detectPackageManager(),\n ]);\n\n await ensureCommands(packageManager.command);\n\n const destinationRoot = path.dirname(manifest.path);\n\n log.plain('Detected project root:', log.bold(destinationRoot));\n\n const [include] = await Promise.all([\n createInclusionFilter([\n path.join(destinationRoot, '.gitignore'),\n path.join(BASE_TEMPLATE_DIR, '_.gitignore'),\n ]),\n\n auditWorkingTree(destinationRoot),\n ]);\n\n const templateConfig = await ensureTemplateCompletion({\n destinationRoot,\n include,\n manifest,\n });\n\n const type = await getProjectType({\n manifest,\n templateConfig,\n });\n\n const entryPoint = await getEntryPoint({\n destinationRoot,\n manifest,\n templateConfig,\n type,\n });\n\n const fixDependencies = await analyseDependencies({\n destinationRoot,\n include,\n manifest,\n type,\n });\n\n if (fixDependencies) {\n log.newline();\n\n if (await shouldApply('fixDependencies')) {\n await fixDependencies();\n }\n }\n\n const firstRun = hasProp(manifest.packageJson, 'skuba');\n\n const fixConfiguration = await analyseConfiguration({\n destinationRoot,\n entryPoint,\n firstRun,\n packageManager,\n type,\n });\n\n if (fixConfiguration) {\n log.newline();\n\n if (await shouldApply('fixConfiguration')) {\n await fixConfiguration();\n }\n }\n\n if (fixDependencies) {\n const exec = createExec({\n stdio: 'pipe',\n streamStdio: packageManager.command,\n });\n\n log.newline();\n try {\n await exec(packageManager.command, 'install');\n } catch {\n log.newline();\n log.warn(log.bold('\u2717 Failed to install dependencies. Resume with:'));\n\n log.newline();\n log.plain(log.bold(packageManager.install));\n log.plain(log.bold(packageManager.command, 'format'));\n\n log.newline();\n process.exitCode = 1;\n return;\n }\n }\n\n if (fixConfiguration ?? fixDependencies) {\n log.newline();\n log.ok(log.bold('\u2714 All done! Try running:'));\n\n log.newline();\n log.plain(log.bold(packageManager.command, 'format'));\n }\n\n log.newline();\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAuB;AAEvB,iBAAsC;AACtC,kBAA2C;AAC3C,qBAAoB;AACpB,kBAAuC;AACvC,4BAAqC;AACrC,sBAAkC;AAClC,wBAAwB;AAExB,kCAAqC;AACrC,iCAAoC;AACpC,iBAAiC;AACjC,qBAAuC;AACvC,sCAAyC;AACzC,2BAA8B;AAC9B,4BAA+B;AAE/B,MAAM,cAAc,OAAO,SAAiB;AAC1C,MAAI,CAAC,QAAQ,MAAM,OAAO;AACxB,WAAO;AAAA,EACT;AACA,QAAM,SAAS,IAAI,uBAAO;AAAA,IACxB,SAAS,CAAC,OAAO,IAAI;AAAA,IACrB,SAAS;AAAA,IACT;AAAA,EACF,CAAC;AAED,QAAM,SAAS,MAAM,OAAO,IAAI;AAEhC,SAAO,WAAW;AACpB;AAEO,MAAM,YAAY,YAAY;AACnC,YAAM,oCAAuB;AAE7B,QAAM,CAAC,UAAU,cAAc,IAAI,MAAM,QAAQ,IAAI;AAAA,QACnD,uCAAuB;AAAA,QACvB,4CAAqB;AAAA,EACvB,CAAC;AAED,YAAM,4BAAe,eAAe,OAAO;AAE3C,QAAM,kBAAkB,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAElD,qBAAI,MAAM,0BAA0B,mBAAI,KAAK,eAAe,CAAC;AAE7D,QAAM,CAAC,OAAO,IAAI,MAAM,QAAQ,IAAI;AAAA,QAClC,kCAAsB;AAAA,MACpB,YAAAA,QAAK,KAAK,iBAAiB,YAAY;AAAA,MACvC,YAAAA,QAAK,KAAK,mCAAmB,aAAa;AAAA,IAC5C,CAAC;AAAA,QAED,6BAAiB,eAAe;AAAA,EAClC,CAAC;AAED,QAAM,iBAAiB,UAAM,0DAAyB;AAAA,IACpD;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,OAAO,UAAM,sCAAe;AAAA,IAChC;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,aAAa,UAAM,oCAAc;AAAA,IACrC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,kBAAkB,UAAM,gDAAoB;AAAA,IAChD;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,iBAAiB;AACnB,uBAAI,QAAQ;AAEZ,QAAI,MAAM,YAAY,iBAAiB,GAAG;AACxC,YAAM,gBAAgB;AAAA,IACxB;AAAA,EACF;AAEA,QAAM,eAAW,2BAAQ,SAAS,aAAa,OAAO;AAEtD,QAAM,mBAAmB,UAAM,kDAAqB;AAAA,IAClD;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,kBAAkB;AACpB,uBAAI,QAAQ;AAEZ,QAAI,MAAM,YAAY,kBAAkB,GAAG;AACzC,YAAM,iBAAiB;AAAA,IACzB;AAAA,EACF;AAEA,MAAI,iBAAiB;AACnB,UAAM,WAAO,wBAAW;AAAA,MACtB,OAAO;AAAA,MACP,aAAa,eAAe;AAAA,IAC9B,CAAC;AAED,uBAAI,QAAQ;AACZ,QAAI;AACF,YAAM,KAAK,eAAe,SAAS,SAAS;AAAA,IAC9C,QAAQ;AACN,yBAAI,QAAQ;AACZ,yBAAI,KAAK,mBAAI,KAAK,qDAAgD,CAAC;AAEnE,yBAAI,QAAQ;AACZ,yBAAI,MAAM,mBAAI,KAAK,eAAe,
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport { Select } from 'enquirer';\n\nimport { createInclusionFilter } from '../../utils/dir';\nimport { createExec, ensureCommands } from '../../utils/exec';\nimport { log } from '../../utils/logging';\nimport { showLogoAndVersionInfo } from '../../utils/logo';\nimport { detectPackageManager } from '../../utils/packageManager';\nimport { BASE_TEMPLATE_DIR } from '../../utils/template';\nimport { hasProp } from '../../utils/validation';\n\nimport { analyseConfiguration } from './analyseConfiguration';\nimport { analyseDependencies } from './analyseDependencies';\nimport { auditWorkingTree } from './analysis/git';\nimport { getDestinationManifest } from './analysis/package';\nimport { ensureTemplateCompletion } from './ensureTemplateCompletion';\nimport { getEntryPoint } from './getEntryPoint';\nimport { getProjectType } from './getProjectType';\n\nconst shouldApply = async (name: string) => {\n if (!process.stdin.isTTY) {\n return 'yes';\n }\n const prompt = new Select({\n choices: ['yes', 'no'] as const,\n message: 'Apply changes?',\n name,\n });\n\n const result = await prompt.run();\n\n return result === 'yes';\n};\n\nexport const configure = async () => {\n await showLogoAndVersionInfo();\n\n const [manifest, packageManager] = await Promise.all([\n getDestinationManifest(),\n detectPackageManager(),\n ]);\n\n await ensureCommands(packageManager.command);\n\n const destinationRoot = path.dirname(manifest.path);\n\n log.plain('Detected project root:', log.bold(destinationRoot));\n\n const [include] = await Promise.all([\n createInclusionFilter([\n path.join(destinationRoot, '.gitignore'),\n path.join(BASE_TEMPLATE_DIR, '_.gitignore'),\n ]),\n\n auditWorkingTree(destinationRoot),\n ]);\n\n const templateConfig = await ensureTemplateCompletion({\n destinationRoot,\n include,\n manifest,\n });\n\n const type = await getProjectType({\n manifest,\n templateConfig,\n });\n\n const entryPoint = await getEntryPoint({\n destinationRoot,\n manifest,\n templateConfig,\n type,\n });\n\n const fixDependencies = await analyseDependencies({\n destinationRoot,\n include,\n manifest,\n type,\n });\n\n if (fixDependencies) {\n log.newline();\n\n if (await shouldApply('fixDependencies')) {\n await fixDependencies();\n }\n }\n\n const firstRun = hasProp(manifest.packageJson, 'skuba');\n\n const fixConfiguration = await analyseConfiguration({\n destinationRoot,\n entryPoint,\n firstRun,\n packageManager,\n type,\n });\n\n if (fixConfiguration) {\n log.newline();\n\n if (await shouldApply('fixConfiguration')) {\n await fixConfiguration();\n }\n }\n\n if (fixDependencies) {\n const exec = createExec({\n stdio: 'pipe',\n streamStdio: packageManager.command,\n });\n\n log.newline();\n try {\n await exec(packageManager.command, 'install');\n } catch {\n log.newline();\n log.warn(log.bold('\u2717 Failed to install dependencies. Resume with:'));\n\n log.newline();\n log.plain(log.bold(packageManager.command, 'install'));\n log.plain(log.bold(packageManager.command, 'format'));\n\n log.newline();\n process.exitCode = 1;\n return;\n }\n }\n\n if (fixConfiguration ?? fixDependencies) {\n log.newline();\n log.ok(log.bold('\u2714 All done! Try running:'));\n\n log.newline();\n log.plain(log.bold(packageManager.command, 'format'));\n }\n\n log.newline();\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAuB;AAEvB,iBAAsC;AACtC,kBAA2C;AAC3C,qBAAoB;AACpB,kBAAuC;AACvC,4BAAqC;AACrC,sBAAkC;AAClC,wBAAwB;AAExB,kCAAqC;AACrC,iCAAoC;AACpC,iBAAiC;AACjC,qBAAuC;AACvC,sCAAyC;AACzC,2BAA8B;AAC9B,4BAA+B;AAE/B,MAAM,cAAc,OAAO,SAAiB;AAC1C,MAAI,CAAC,QAAQ,MAAM,OAAO;AACxB,WAAO;AAAA,EACT;AACA,QAAM,SAAS,IAAI,uBAAO;AAAA,IACxB,SAAS,CAAC,OAAO,IAAI;AAAA,IACrB,SAAS;AAAA,IACT;AAAA,EACF,CAAC;AAED,QAAM,SAAS,MAAM,OAAO,IAAI;AAEhC,SAAO,WAAW;AACpB;AAEO,MAAM,YAAY,YAAY;AACnC,YAAM,oCAAuB;AAE7B,QAAM,CAAC,UAAU,cAAc,IAAI,MAAM,QAAQ,IAAI;AAAA,QACnD,uCAAuB;AAAA,QACvB,4CAAqB;AAAA,EACvB,CAAC;AAED,YAAM,4BAAe,eAAe,OAAO;AAE3C,QAAM,kBAAkB,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAElD,qBAAI,MAAM,0BAA0B,mBAAI,KAAK,eAAe,CAAC;AAE7D,QAAM,CAAC,OAAO,IAAI,MAAM,QAAQ,IAAI;AAAA,QAClC,kCAAsB;AAAA,MACpB,YAAAA,QAAK,KAAK,iBAAiB,YAAY;AAAA,MACvC,YAAAA,QAAK,KAAK,mCAAmB,aAAa;AAAA,IAC5C,CAAC;AAAA,QAED,6BAAiB,eAAe;AAAA,EAClC,CAAC;AAED,QAAM,iBAAiB,UAAM,0DAAyB;AAAA,IACpD;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,OAAO,UAAM,sCAAe;AAAA,IAChC;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,aAAa,UAAM,oCAAc;AAAA,IACrC;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,kBAAkB,UAAM,gDAAoB;AAAA,IAChD;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,iBAAiB;AACnB,uBAAI,QAAQ;AAEZ,QAAI,MAAM,YAAY,iBAAiB,GAAG;AACxC,YAAM,gBAAgB;AAAA,IACxB;AAAA,EACF;AAEA,QAAM,eAAW,2BAAQ,SAAS,aAAa,OAAO;AAEtD,QAAM,mBAAmB,UAAM,kDAAqB;AAAA,IAClD;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,MAAI,kBAAkB;AACpB,uBAAI,QAAQ;AAEZ,QAAI,MAAM,YAAY,kBAAkB,GAAG;AACzC,YAAM,iBAAiB;AAAA,IACzB;AAAA,EACF;AAEA,MAAI,iBAAiB;AACnB,UAAM,WAAO,wBAAW;AAAA,MACtB,OAAO;AAAA,MACP,aAAa,eAAe;AAAA,IAC9B,CAAC;AAED,uBAAI,QAAQ;AACZ,QAAI;AACF,YAAM,KAAK,eAAe,SAAS,SAAS;AAAA,IAC9C,QAAQ;AACN,yBAAI,QAAQ;AACZ,yBAAI,KAAK,mBAAI,KAAK,qDAAgD,CAAC;AAEnE,yBAAI,QAAQ;AACZ,yBAAI,MAAM,mBAAI,KAAK,eAAe,SAAS,SAAS,CAAC;AACrD,yBAAI,MAAM,mBAAI,KAAK,eAAe,SAAS,QAAQ,CAAC;AAEpD,yBAAI,QAAQ;AACZ,cAAQ,WAAW;AACnB;AAAA,IACF;AAAA,EACF;AAEA,MAAI,oBAAoB,iBAAiB;AACvC,uBAAI,QAAQ;AACZ,uBAAI,GAAG,mBAAI,KAAK,+BAA0B,CAAC;AAE3C,uBAAI,QAAQ;AACZ,uBAAI,MAAM,mBAAI,KAAK,eAAe,SAAS,QAAQ,CAAC;AAAA,EACtD;AAEA,qBAAI,QAAQ;AACd;",
|
|
6
6
|
"names": ["path"]
|
|
7
7
|
}
|
|
@@ -93,7 +93,10 @@ const packageModule = async ({
|
|
|
93
93
|
outputData.scripts.prerelease,
|
|
94
94
|
outputData.scripts.release ?? "skuba release"
|
|
95
95
|
].filter((script) => typeof script === "string").map(
|
|
96
|
-
(script) => script.replace(
|
|
96
|
+
(script) => script.replace(
|
|
97
|
+
/^smt build$/,
|
|
98
|
+
`${packageManager.print.runSilent} build`
|
|
99
|
+
).replace(/^smt /, "skuba ").trim()
|
|
97
100
|
).filter(Boolean).join(" && ");
|
|
98
101
|
if (outputData.scripts.build === "skuba build-package") {
|
|
99
102
|
outputData.main = "./lib-commonjs/index.js";
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../src/cli/configure/modules/package.ts"],
|
|
4
|
-
"sourcesContent": ["import { getSkubaVersion } from '../../../utils/version';\nimport { deleteFiles } from '../processing/deleteFiles';\nimport { withPackage } from '../processing/package';\nimport { merge } from '../processing/record';\nimport type { Module, Options } from '../types';\n\nconst DEFAULT_PACKAGE_FILES = [\n 'lib*/**/*.d.ts',\n 'lib*/**/*.js',\n 'lib*/**/*.js.map',\n 'lib*/**/*.json',\n];\n\nexport const packageModule = async ({\n entryPoint,\n packageManager,\n type,\n}: Options): Promise<Module> => {\n const version = await getSkubaVersion();\n\n const initialData = {\n private: type !== 'package',\n\n scripts: {\n build: type === 'package' ? 'skuba build-package' : 'skuba build',\n format: 'skuba format',\n lint: 'skuba lint',\n ...(type === 'package' ? {} : { start: 'skuba start' }),\n test: 'skuba test --coverage',\n 'test:watch': 'skuba test --watch',\n },\n skuba: {\n entryPoint,\n template: null,\n type,\n version,\n },\n };\n\n const recurringData = {\n skuba: {\n entryPoint,\n type,\n version,\n },\n };\n\n return {\n ...deleteFiles('.npmignore', 'package-lock.json'),\n\n 'package.json': (inputFile) =>\n withPackage((inputData) => {\n const outputData = merge(\n inputData,\n 'skuba' in inputData ? recurringData : initialData,\n );\n\n outputData.license ??= 'UNLICENSED';\n outputData.scripts ??= {};\n\n // Workspaces can only be enabled in private projects\n if (outputData.workspaces && !outputData.private) {\n outputData.private = true;\n }\n\n delete outputData.scripts.commit;\n delete outputData.scripts['format:check'];\n delete outputData.scripts['lint:build'];\n delete outputData.scripts['lint:compile'];\n delete outputData.scripts['lint:eslint'];\n delete outputData.scripts['lint:prettier'];\n delete outputData.scripts['lint:tslint'];\n delete outputData.scripts['test:build'];\n delete outputData.scripts['test:jest'];\n delete outputData.typings;\n\n if (type === 'package') {\n outputData.files = (\n outputData.files ?? DEFAULT_PACKAGE_FILES\n ).flatMap((filePattern) =>\n filePattern === 'lib' ? DEFAULT_PACKAGE_FILES : [filePattern],\n );\n\n outputData.version ??= '0.0.0-semantically-released';\n\n // User-defined pre- and post-scripts are confusing and dropped by e.g.\n // Yarn 2.\n outputData.scripts.release = [\n outputData.scripts.prepublish,\n outputData.scripts.prerelease,\n outputData.scripts.release ?? 'skuba release',\n ]\n .filter((script): script is string => typeof script === 'string')\n .map((script) =>\n script\n .replace(/^smt build
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAgC;AAChC,yBAA4B;AAC5B,qBAA4B;AAC5B,oBAAsB;AAGtB,MAAM,wBAAwB;AAAA,EAC5B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEO,MAAM,gBAAgB,OAAO;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,MAAgC;AAC9B,QAAM,UAAU,UAAM,gCAAgB;AAEtC,QAAM,cAAc;AAAA,IAClB,SAAS,SAAS;AAAA,IAElB,SAAS;AAAA,MACP,OAAO,SAAS,YAAY,wBAAwB;AAAA,MACpD,QAAQ;AAAA,MACR,MAAM;AAAA,MACN,GAAI,SAAS,YAAY,CAAC,IAAI,EAAE,OAAO,cAAc;AAAA,MACrD,MAAM;AAAA,MACN,cAAc;AAAA,IAChB;AAAA,IACA,OAAO;AAAA,MACL;AAAA,MACA,UAAU;AAAA,MACV;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,gBAAgB;AAAA,IACpB,OAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,OAAG,gCAAY,cAAc,mBAAmB;AAAA,IAEhD,gBAAgB,CAAC,kBACf,4BAAY,CAAC,cAAc;AACzB,YAAM,iBAAa;AAAA,QACjB;AAAA,QACA,WAAW,YAAY,gBAAgB;AAAA,MACzC;AAEA,iBAAW,YAAY;AACvB,iBAAW,YAAY,CAAC;AAGxB,UAAI,WAAW,cAAc,CAAC,WAAW,SAAS;AAChD,mBAAW,UAAU;AAAA,MACvB;AAEA,aAAO,WAAW,QAAQ;AAC1B,aAAO,WAAW,QAAQ,cAAc;AACxC,aAAO,WAAW,QAAQ,YAAY;AACtC,aAAO,WAAW,QAAQ,cAAc;AACxC,aAAO,WAAW,QAAQ,aAAa;AACvC,aAAO,WAAW,QAAQ,eAAe;AACzC,aAAO,WAAW,QAAQ,aAAa;AACvC,aAAO,WAAW,QAAQ,YAAY;AACtC,aAAO,WAAW,QAAQ,WAAW;AACrC,aAAO,WAAW;AAElB,UAAI,SAAS,WAAW;AACtB,mBAAW,SACT,WAAW,SAAS,uBACpB;AAAA,UAAQ,CAAC,gBACT,gBAAgB,QAAQ,wBAAwB,CAAC,WAAW;AAAA,QAC9D;AAEA,mBAAW,YAAY;AAIvB,mBAAW,QAAQ,UAAU;AAAA,UAC3B,WAAW,QAAQ;AAAA,UACnB,WAAW,QAAQ;AAAA,UACnB,WAAW,QAAQ,WAAW;AAAA,QAChC,EACG,OAAO,CAAC,WAA6B,OAAO,WAAW,QAAQ,EAC/D;AAAA,UAAI,CAAC,WACJ,OACG,
|
|
4
|
+
"sourcesContent": ["import { getSkubaVersion } from '../../../utils/version';\nimport { deleteFiles } from '../processing/deleteFiles';\nimport { withPackage } from '../processing/package';\nimport { merge } from '../processing/record';\nimport type { Module, Options } from '../types';\n\nconst DEFAULT_PACKAGE_FILES = [\n 'lib*/**/*.d.ts',\n 'lib*/**/*.js',\n 'lib*/**/*.js.map',\n 'lib*/**/*.json',\n];\n\nexport const packageModule = async ({\n entryPoint,\n packageManager,\n type,\n}: Options): Promise<Module> => {\n const version = await getSkubaVersion();\n\n const initialData = {\n private: type !== 'package',\n\n scripts: {\n build: type === 'package' ? 'skuba build-package' : 'skuba build',\n format: 'skuba format',\n lint: 'skuba lint',\n ...(type === 'package' ? {} : { start: 'skuba start' }),\n test: 'skuba test --coverage',\n 'test:watch': 'skuba test --watch',\n },\n skuba: {\n entryPoint,\n template: null,\n type,\n version,\n },\n };\n\n const recurringData = {\n skuba: {\n entryPoint,\n type,\n version,\n },\n };\n\n return {\n ...deleteFiles('.npmignore', 'package-lock.json'),\n\n 'package.json': (inputFile) =>\n withPackage((inputData) => {\n const outputData = merge(\n inputData,\n 'skuba' in inputData ? recurringData : initialData,\n );\n\n outputData.license ??= 'UNLICENSED';\n outputData.scripts ??= {};\n\n // Workspaces can only be enabled in private projects\n if (outputData.workspaces && !outputData.private) {\n outputData.private = true;\n }\n\n delete outputData.scripts.commit;\n delete outputData.scripts['format:check'];\n delete outputData.scripts['lint:build'];\n delete outputData.scripts['lint:compile'];\n delete outputData.scripts['lint:eslint'];\n delete outputData.scripts['lint:prettier'];\n delete outputData.scripts['lint:tslint'];\n delete outputData.scripts['test:build'];\n delete outputData.scripts['test:jest'];\n delete outputData.typings;\n\n if (type === 'package') {\n outputData.files = (\n outputData.files ?? DEFAULT_PACKAGE_FILES\n ).flatMap((filePattern) =>\n filePattern === 'lib' ? DEFAULT_PACKAGE_FILES : [filePattern],\n );\n\n outputData.version ??= '0.0.0-semantically-released';\n\n // User-defined pre- and post-scripts are confusing and dropped by e.g.\n // Yarn 2.\n outputData.scripts.release = [\n outputData.scripts.prepublish,\n outputData.scripts.prerelease,\n outputData.scripts.release ?? 'skuba release',\n ]\n .filter((script): script is string => typeof script === 'string')\n .map((script) =>\n script\n .replace(\n /^smt build$/,\n `${packageManager.print.runSilent} build`,\n )\n .replace(/^smt /, 'skuba ')\n .trim(),\n )\n .filter(Boolean)\n .join(' && ');\n\n // Align with the required syntax for package.json#/paths\n if (outputData.scripts.build === 'skuba build-package') {\n outputData.main = './lib-commonjs/index.js';\n outputData.module = './lib-es2015/index.js';\n outputData.types = './lib-types/index.d.ts';\n } else {\n outputData.main = './lib/index.js';\n outputData.module = './lib/index.js';\n outputData.types = './lib/index.d.ts';\n }\n\n delete outputData.scripts.prepublish;\n delete outputData.scripts.prerelease;\n }\n\n return outputData;\n })(inputFile),\n };\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAgC;AAChC,yBAA4B;AAC5B,qBAA4B;AAC5B,oBAAsB;AAGtB,MAAM,wBAAwB;AAAA,EAC5B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEO,MAAM,gBAAgB,OAAO;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AACF,MAAgC;AAC9B,QAAM,UAAU,UAAM,gCAAgB;AAEtC,QAAM,cAAc;AAAA,IAClB,SAAS,SAAS;AAAA,IAElB,SAAS;AAAA,MACP,OAAO,SAAS,YAAY,wBAAwB;AAAA,MACpD,QAAQ;AAAA,MACR,MAAM;AAAA,MACN,GAAI,SAAS,YAAY,CAAC,IAAI,EAAE,OAAO,cAAc;AAAA,MACrD,MAAM;AAAA,MACN,cAAc;AAAA,IAChB;AAAA,IACA,OAAO;AAAA,MACL;AAAA,MACA,UAAU;AAAA,MACV;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,gBAAgB;AAAA,IACpB,OAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AAAA,IACL,OAAG,gCAAY,cAAc,mBAAmB;AAAA,IAEhD,gBAAgB,CAAC,kBACf,4BAAY,CAAC,cAAc;AACzB,YAAM,iBAAa;AAAA,QACjB;AAAA,QACA,WAAW,YAAY,gBAAgB;AAAA,MACzC;AAEA,iBAAW,YAAY;AACvB,iBAAW,YAAY,CAAC;AAGxB,UAAI,WAAW,cAAc,CAAC,WAAW,SAAS;AAChD,mBAAW,UAAU;AAAA,MACvB;AAEA,aAAO,WAAW,QAAQ;AAC1B,aAAO,WAAW,QAAQ,cAAc;AACxC,aAAO,WAAW,QAAQ,YAAY;AACtC,aAAO,WAAW,QAAQ,cAAc;AACxC,aAAO,WAAW,QAAQ,aAAa;AACvC,aAAO,WAAW,QAAQ,eAAe;AACzC,aAAO,WAAW,QAAQ,aAAa;AACvC,aAAO,WAAW,QAAQ,YAAY;AACtC,aAAO,WAAW,QAAQ,WAAW;AACrC,aAAO,WAAW;AAElB,UAAI,SAAS,WAAW;AACtB,mBAAW,SACT,WAAW,SAAS,uBACpB;AAAA,UAAQ,CAAC,gBACT,gBAAgB,QAAQ,wBAAwB,CAAC,WAAW;AAAA,QAC9D;AAEA,mBAAW,YAAY;AAIvB,mBAAW,QAAQ,UAAU;AAAA,UAC3B,WAAW,QAAQ;AAAA,UACnB,WAAW,QAAQ;AAAA,UACnB,WAAW,QAAQ,WAAW;AAAA,QAChC,EACG,OAAO,CAAC,WAA6B,OAAO,WAAW,QAAQ,EAC/D;AAAA,UAAI,CAAC,WACJ,OACG;AAAA,YACC;AAAA,YACA,GAAG,eAAe,MAAM,SAAS;AAAA,UACnC,EACC,QAAQ,SAAS,QAAQ,EACzB,KAAK;AAAA,QACV,EACC,OAAO,OAAO,EACd,KAAK,MAAM;AAGd,YAAI,WAAW,QAAQ,UAAU,uBAAuB;AACtD,qBAAW,OAAO;AAClB,qBAAW,SAAS;AACpB,qBAAW,QAAQ;AAAA,QACrB,OAAO;AACL,qBAAW,OAAO;AAClB,qBAAW,SAAS;AACpB,qBAAW,QAAQ;AAAA,QACrB;AAEA,eAAO,WAAW,QAAQ;AAC1B,eAAO,WAAW,QAAQ;AAAA,MAC5B;AAEA,aAAO;AAAA,IACT,CAAC,EAAE,SAAS;AAAA,EAChB;AACF;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -118,7 +118,7 @@ const cloneTemplate = async (templateName, destinationDir) => {
|
|
|
118
118
|
import_logging.log.warn(
|
|
119
119
|
"You may need to run",
|
|
120
120
|
import_logging.log.bold(
|
|
121
|
-
(0, import_packageManager.configForPackageManager)(templateConfig.packageManager).exec,
|
|
121
|
+
(0, import_packageManager.configForPackageManager)(templateConfig.packageManager).print.exec,
|
|
122
122
|
"skuba",
|
|
123
123
|
"configure"
|
|
124
124
|
),
|
|
@@ -225,7 +225,7 @@ const configureFromPrompt = async () => {
|
|
|
225
225
|
import_logging.log.newline();
|
|
226
226
|
import_logging.log.warn(
|
|
227
227
|
`Resume this later with ${import_chalk.default.bold(
|
|
228
|
-
(0, import_packageManager.configForPackageManager)(packageManager).exec,
|
|
228
|
+
(0, import_packageManager.configForPackageManager)(packageManager).print.exec,
|
|
229
229
|
"skuba configure"
|
|
230
230
|
)}.`
|
|
231
231
|
);
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/init/getConfig.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport { Form, type FormChoice } from 'enquirer';\nimport fs from 'fs-extra';\n\nimport { copyFiles } from '../../utils/copy';\nimport { isErrorWithCode } from '../../utils/error';\nimport { log } from '../../utils/logging';\nimport {\n DEFAULT_PACKAGE_MANAGER,\n configForPackageManager,\n} from '../../utils/packageManager';\nimport { getRandomPort } from '../../utils/port';\nimport {\n TEMPLATE_CONFIG_FILENAME,\n TEMPLATE_DIR,\n type TemplateConfig,\n templateConfigSchema,\n} from '../../utils/template';\n\nimport { downloadGitHubTemplate } from './git';\nimport {\n BASE_PROMPT_PROPS,\n type BaseFields,\n type Choice,\n GIT_PATH_PROMPT,\n SHOULD_CONTINUE_PROMPT,\n TEMPLATE_PROMPT,\n} from './prompts';\nimport { type InitConfig, initConfigInputSchema } from './types';\n\nexport const runForm = <T = Record<string, string>>(props: {\n choices: readonly Choice[];\n message: string;\n name: string;\n}) => {\n const { message, name } = props;\n\n const choices = props.choices.map((choice) => ({\n ...choice,\n validate: (value: string | undefined) => {\n if (\n !value ||\n value === '' ||\n (value === choice.initial && !choice.allowInitial)\n ) {\n return 'Form is not complete';\n }\n\n return choice.validate?.(value) ?? true;\n },\n }));\n\n const form = new Form<T>({\n choices,\n message,\n name,\n validate: async (values) => {\n const results = await Promise.all(\n choices.map((choice) => choice.validate(values[choice.name])),\n );\n\n return (\n results.find((result) => typeof result === 'string') ??\n results.every((result) => result === true)\n );\n },\n });\n\n return form.run();\n};\n\nconst confirmShouldContinue = async (choices: readonly FormChoice[]) => {\n const fieldsList = choices.map((choice) => choice.message);\n\n log.newline();\n log.plain('This template uses the following information:');\n log.newline();\n fieldsList.forEach((message) => log.subtle(`- ${message}`));\n\n log.newline();\n const result = await SHOULD_CONTINUE_PROMPT.run();\n\n return result === 'yes';\n};\n\nconst createDirectory = async (dir: string) => {\n try {\n await fs.promises.mkdir(dir);\n } catch (err) {\n if (isErrorWithCode(err, 'EEXIST')) {\n log.err(`The directory '${dir}' already exists.`);\n process.exit(1);\n }\n\n throw err;\n }\n};\n\nconst cloneTemplate = async (\n templateName: string,\n destinationDir: string,\n): Promise<TemplateConfig> => {\n const isCustomTemplate = templateName.startsWith('github:');\n\n if (isCustomTemplate) {\n const gitHubPath = templateName.slice('github:'.length);\n\n await downloadGitHubTemplate(gitHubPath, destinationDir);\n } else {\n const templateDir = path.join(TEMPLATE_DIR, templateName);\n\n await copyFiles({\n // assume built-in templates have no extraneous files\n include: () => true,\n sourceRoot: templateDir,\n destinationRoot: destinationDir,\n processors: [],\n // built-in templates have files like _package.json\n stripUnderscorePrefix: true,\n });\n }\n\n const templateConfig = getTemplateConfig(\n path.join(process.cwd(), destinationDir),\n );\n\n if (isCustomTemplate) {\n log.newline();\n log.warn(\n 'You may need to run',\n log.bold(\n configForPackageManager(templateConfig.packageManager).exec,\n 'skuba',\n 'configure',\n ),\n 'once this is done.',\n );\n }\n\n return templateConfig;\n};\n\nconst getTemplateName = async () => {\n const templateSelection = await TEMPLATE_PROMPT.run();\n\n if (templateSelection === 'github \u2192') {\n const gitHubPath = await GIT_PATH_PROMPT.run();\n return `github:${gitHubPath}`;\n }\n\n return templateSelection;\n};\n\nconst generatePlaceholders = (choices: FormChoice[]) =>\n Object.fromEntries(\n choices.map(({ name }) => [name, `<%- ${name} %>`] as const),\n );\n\nexport const getTemplateConfig = (dir: string): TemplateConfig => {\n const templateConfigPath = path.join(dir, TEMPLATE_CONFIG_FILENAME);\n\n try {\n // eslint-disable-next-line @typescript-eslint/no-require-imports\n const templateConfig = require(templateConfigPath) as unknown;\n\n return templateConfigSchema.parse(templateConfig);\n } catch (err) {\n if (isErrorWithCode(err, 'MODULE_NOT_FOUND')) {\n return {\n entryPoint: undefined,\n fields: [],\n packageManager: DEFAULT_PACKAGE_MANAGER,\n type: undefined,\n };\n }\n\n throw err;\n }\n};\n\nconst baseToTemplateData = async ({\n ownerName,\n platformName,\n repoName,\n defaultBranch,\n}: BaseFields) => {\n const [orgName, teamName] = ownerName.split('/');\n\n const port = String(await getRandomPort());\n\n if (!orgName) {\n throw new Error(`Invalid format for owner name: ${ownerName}`);\n }\n\n return {\n orgName,\n ownerName,\n repoName,\n defaultBranch,\n // Use standalone username in `teamName` contexts\n teamName: teamName ?? orgName,\n\n port,\n\n platformName,\n lambdaCdkArchitecture: platformName === 'amd64' ? 'X86_64' : 'ARM_64',\n lambdaServerlessArchitecture:\n platformName === 'amd64' ? 'x86_64' : platformName,\n };\n};\n\nexport const configureFromPrompt = async (): Promise<InitConfig> => {\n const { ownerName, platformName, repoName, defaultBranch } =\n await runForm<BaseFields>(BASE_PROMPT_PROPS);\n log.plain(chalk.cyan(repoName), 'by', chalk.cyan(ownerName));\n\n const templateData = await baseToTemplateData({\n ownerName,\n platformName,\n repoName,\n defaultBranch,\n });\n\n const destinationDir = repoName;\n\n await createDirectory(destinationDir);\n\n log.newline();\n const templateName = await getTemplateName();\n\n const { entryPoint, fields, noSkip, packageManager, type } =\n await cloneTemplate(templateName, destinationDir);\n\n if (fields.length === 0) {\n return {\n destinationDir,\n entryPoint,\n packageManager,\n templateComplete: true,\n templateData,\n templateName,\n type,\n };\n }\n\n const shouldContinue = noSkip ? true : await confirmShouldContinue(fields);\n\n if (shouldContinue) {\n log.newline();\n const customAnswers = await runForm({\n choices: fields,\n message: chalk.bold(`Complete ${chalk.cyan(templateName)}:`),\n name: 'customAnswers',\n });\n\n return {\n destinationDir,\n entryPoint,\n packageManager,\n templateComplete: true,\n templateData: { ...templateData, ...customAnswers },\n templateName,\n type,\n };\n }\n\n log.newline();\n log.warn(\n `Resume this later with ${chalk.bold(\n configForPackageManager(packageManager).exec,\n 'skuba configure',\n )}.`,\n );\n\n const customAnswers = generatePlaceholders(fields);\n\n return {\n destinationDir,\n entryPoint,\n packageManager,\n templateComplete: false,\n templateData: { ...templateData, ...customAnswers },\n templateName,\n type,\n };\n};\n\nexport const readJSONFromStdIn = async () => {\n let text = '';\n\n await new Promise((resolve) =>\n process.stdin\n .on('data', (chunk) => (text += chunk.toString()))\n .once('end', resolve),\n );\n\n text = text.trim();\n\n if (text === '') {\n log.err('No data from stdin.');\n process.exit(1);\n }\n\n let value: unknown;\n\n try {\n value = JSON.parse(text) as unknown;\n } catch {\n log.err('Invalid JSON from stdin.');\n process.exit(1);\n }\n\n return value;\n};\n\nconst configureFromPipe = async (): Promise<InitConfig> => {\n const value = await readJSONFromStdIn();\n\n const result = initConfigInputSchema.safeParse(value);\n\n if (!result.success) {\n log.err('Invalid data from stdin:');\n log.err(result.error);\n process.exit(1);\n }\n\n const { destinationDir, templateComplete, templateName } = result.data;\n\n const templateData = {\n ...(await baseToTemplateData(result.data.templateData)),\n ...result.data.templateData,\n };\n\n await createDirectory(destinationDir);\n\n const { entryPoint, fields, noSkip, packageManager, type } =\n await cloneTemplate(templateName, destinationDir);\n\n if (!templateComplete) {\n if (noSkip) {\n log.err('Templating for', log.bold(templateName), 'cannot be skipped.');\n process.exit(1);\n }\n\n return {\n ...result.data,\n entryPoint,\n packageManager,\n templateData: {\n ...templateData,\n ...generatePlaceholders(fields),\n },\n type,\n };\n }\n\n const required = fields.map(({ name }) => name);\n\n const provided = new Set(Object.keys(templateData));\n\n const missing = required.filter((name) => !provided.has(name));\n\n if (missing.length > 0) {\n log.err('This template uses the following information:');\n log.newline();\n missing.forEach((name) => log.err(`- ${name}`));\n process.exit(1);\n }\n\n return {\n ...result.data,\n entryPoint,\n packageManager,\n templateData,\n type,\n };\n};\n\nexport const getConfig = () =>\n process.stdin.isTTY ? configureFromPrompt() : configureFromPipe();\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,sBAAsC;AACtC,sBAAe;AAEf,kBAA0B;AAC1B,mBAAgC;AAChC,qBAAoB;AACpB,4BAGO;AACP,kBAA8B;AAC9B,sBAKO;AAEP,iBAAuC;AACvC,qBAOO;AACP,mBAAuD;AAEhD,MAAM,UAAU,CAA6B,UAI9C;AACJ,QAAM,EAAE,SAAS,KAAK,IAAI;AAE1B,QAAM,UAAU,MAAM,QAAQ,IAAI,CAAC,YAAY;AAAA,IAC7C,GAAG;AAAA,IACH,UAAU,CAAC,UAA8B;AACvC,UACE,CAAC,SACD,UAAU,MACT,UAAU,OAAO,WAAW,CAAC,OAAO,cACrC;AACA,eAAO;AAAA,MACT;AAEA,aAAO,OAAO,WAAW,KAAK,KAAK;AAAA,IACrC;AAAA,EACF,EAAE;AAEF,QAAM,OAAO,IAAI,qBAAQ;AAAA,IACvB;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,OAAO,WAAW;AAC1B,YAAM,UAAU,MAAM,QAAQ;AAAA,QAC5B,QAAQ,IAAI,CAAC,WAAW,OAAO,SAAS,OAAO,OAAO,IAAI,CAAC,CAAC;AAAA,MAC9D;AAEA,aACE,QAAQ,KAAK,CAAC,WAAW,OAAO,WAAW,QAAQ,KACnD,QAAQ,MAAM,CAAC,WAAW,WAAW,IAAI;AAAA,IAE7C;AAAA,EACF,CAAC;AAED,SAAO,KAAK,IAAI;AAClB;AAEA,MAAM,wBAAwB,OAAO,YAAmC;AACtE,QAAM,aAAa,QAAQ,IAAI,CAAC,WAAW,OAAO,OAAO;AAEzD,qBAAI,QAAQ;AACZ,qBAAI,MAAM,+CAA+C;AACzD,qBAAI,QAAQ;AACZ,aAAW,QAAQ,CAAC,YAAY,mBAAI,OAAO,KAAK,OAAO,EAAE,CAAC;AAE1D,qBAAI,QAAQ;AACZ,QAAM,SAAS,MAAM,sCAAuB,IAAI;AAEhD,SAAO,WAAW;AACpB;AAEA,MAAM,kBAAkB,OAAO,QAAgB;AAC7C,MAAI;AACF,UAAM,gBAAAA,QAAG,SAAS,MAAM,GAAG;AAAA,EAC7B,SAAS,KAAK;AACZ,YAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC,yBAAI,IAAI,kBAAkB,GAAG,mBAAmB;AAChD,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,UAAM;AAAA,EACR;AACF;AAEA,MAAM,gBAAgB,OACpB,cACA,mBAC4B;AAC5B,QAAM,mBAAmB,aAAa,WAAW,SAAS;AAE1D,MAAI,kBAAkB;AACpB,UAAM,aAAa,aAAa,MAAM,UAAU,MAAM;AAEtD,cAAM,mCAAuB,YAAY,cAAc;AAAA,EACzD,OAAO;AACL,UAAM,cAAc,YAAAC,QAAK,KAAK,8BAAc,YAAY;AAExD,cAAM,uBAAU;AAAA;AAAA,MAEd,SAAS,MAAM;AAAA,MACf,YAAY;AAAA,MACZ,iBAAiB;AAAA,MACjB,YAAY,CAAC;AAAA;AAAA,MAEb,uBAAuB;AAAA,IACzB,CAAC;AAAA,EACH;AAEA,QAAM,iBAAiB;AAAA,IACrB,YAAAA,QAAK,KAAK,QAAQ,IAAI,GAAG,cAAc;AAAA,EACzC;AAEA,MAAI,kBAAkB;AACpB,uBAAI,QAAQ;AACZ,uBAAI;AAAA,MACF;AAAA,MACA,mBAAI;AAAA,YACF,+CAAwB,eAAe,cAAc,EAAE;AAAA,
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport { Form, type FormChoice } from 'enquirer';\nimport fs from 'fs-extra';\n\nimport { copyFiles } from '../../utils/copy';\nimport { isErrorWithCode } from '../../utils/error';\nimport { log } from '../../utils/logging';\nimport {\n DEFAULT_PACKAGE_MANAGER,\n configForPackageManager,\n} from '../../utils/packageManager';\nimport { getRandomPort } from '../../utils/port';\nimport {\n TEMPLATE_CONFIG_FILENAME,\n TEMPLATE_DIR,\n type TemplateConfig,\n templateConfigSchema,\n} from '../../utils/template';\n\nimport { downloadGitHubTemplate } from './git';\nimport {\n BASE_PROMPT_PROPS,\n type BaseFields,\n type Choice,\n GIT_PATH_PROMPT,\n SHOULD_CONTINUE_PROMPT,\n TEMPLATE_PROMPT,\n} from './prompts';\nimport { type InitConfig, initConfigInputSchema } from './types';\n\nexport const runForm = <T = Record<string, string>>(props: {\n choices: readonly Choice[];\n message: string;\n name: string;\n}) => {\n const { message, name } = props;\n\n const choices = props.choices.map((choice) => ({\n ...choice,\n validate: (value: string | undefined) => {\n if (\n !value ||\n value === '' ||\n (value === choice.initial && !choice.allowInitial)\n ) {\n return 'Form is not complete';\n }\n\n return choice.validate?.(value) ?? true;\n },\n }));\n\n const form = new Form<T>({\n choices,\n message,\n name,\n validate: async (values) => {\n const results = await Promise.all(\n choices.map((choice) => choice.validate(values[choice.name])),\n );\n\n return (\n results.find((result) => typeof result === 'string') ??\n results.every((result) => result === true)\n );\n },\n });\n\n return form.run();\n};\n\nconst confirmShouldContinue = async (choices: readonly FormChoice[]) => {\n const fieldsList = choices.map((choice) => choice.message);\n\n log.newline();\n log.plain('This template uses the following information:');\n log.newline();\n fieldsList.forEach((message) => log.subtle(`- ${message}`));\n\n log.newline();\n const result = await SHOULD_CONTINUE_PROMPT.run();\n\n return result === 'yes';\n};\n\nconst createDirectory = async (dir: string) => {\n try {\n await fs.promises.mkdir(dir);\n } catch (err) {\n if (isErrorWithCode(err, 'EEXIST')) {\n log.err(`The directory '${dir}' already exists.`);\n process.exit(1);\n }\n\n throw err;\n }\n};\n\nconst cloneTemplate = async (\n templateName: string,\n destinationDir: string,\n): Promise<TemplateConfig> => {\n const isCustomTemplate = templateName.startsWith('github:');\n\n if (isCustomTemplate) {\n const gitHubPath = templateName.slice('github:'.length);\n\n await downloadGitHubTemplate(gitHubPath, destinationDir);\n } else {\n const templateDir = path.join(TEMPLATE_DIR, templateName);\n\n await copyFiles({\n // assume built-in templates have no extraneous files\n include: () => true,\n sourceRoot: templateDir,\n destinationRoot: destinationDir,\n processors: [],\n // built-in templates have files like _package.json\n stripUnderscorePrefix: true,\n });\n }\n\n const templateConfig = getTemplateConfig(\n path.join(process.cwd(), destinationDir),\n );\n\n if (isCustomTemplate) {\n log.newline();\n log.warn(\n 'You may need to run',\n log.bold(\n configForPackageManager(templateConfig.packageManager).print.exec,\n 'skuba',\n 'configure',\n ),\n 'once this is done.',\n );\n }\n\n return templateConfig;\n};\n\nconst getTemplateName = async () => {\n const templateSelection = await TEMPLATE_PROMPT.run();\n\n if (templateSelection === 'github \u2192') {\n const gitHubPath = await GIT_PATH_PROMPT.run();\n return `github:${gitHubPath}`;\n }\n\n return templateSelection;\n};\n\nconst generatePlaceholders = (choices: FormChoice[]) =>\n Object.fromEntries(\n choices.map(({ name }) => [name, `<%- ${name} %>`] as const),\n );\n\nexport const getTemplateConfig = (dir: string): TemplateConfig => {\n const templateConfigPath = path.join(dir, TEMPLATE_CONFIG_FILENAME);\n\n try {\n // eslint-disable-next-line @typescript-eslint/no-require-imports\n const templateConfig = require(templateConfigPath) as unknown;\n\n return templateConfigSchema.parse(templateConfig);\n } catch (err) {\n if (isErrorWithCode(err, 'MODULE_NOT_FOUND')) {\n return {\n entryPoint: undefined,\n fields: [],\n packageManager: DEFAULT_PACKAGE_MANAGER,\n type: undefined,\n };\n }\n\n throw err;\n }\n};\n\nconst baseToTemplateData = async ({\n ownerName,\n platformName,\n repoName,\n defaultBranch,\n}: BaseFields) => {\n const [orgName, teamName] = ownerName.split('/');\n\n const port = String(await getRandomPort());\n\n if (!orgName) {\n throw new Error(`Invalid format for owner name: ${ownerName}`);\n }\n\n return {\n orgName,\n ownerName,\n repoName,\n defaultBranch,\n // Use standalone username in `teamName` contexts\n teamName: teamName ?? orgName,\n\n port,\n\n platformName,\n lambdaCdkArchitecture: platformName === 'amd64' ? 'X86_64' : 'ARM_64',\n lambdaServerlessArchitecture:\n platformName === 'amd64' ? 'x86_64' : platformName,\n };\n};\n\nexport const configureFromPrompt = async (): Promise<InitConfig> => {\n const { ownerName, platformName, repoName, defaultBranch } =\n await runForm<BaseFields>(BASE_PROMPT_PROPS);\n log.plain(chalk.cyan(repoName), 'by', chalk.cyan(ownerName));\n\n const templateData = await baseToTemplateData({\n ownerName,\n platformName,\n repoName,\n defaultBranch,\n });\n\n const destinationDir = repoName;\n\n await createDirectory(destinationDir);\n\n log.newline();\n const templateName = await getTemplateName();\n\n const { entryPoint, fields, noSkip, packageManager, type } =\n await cloneTemplate(templateName, destinationDir);\n\n if (fields.length === 0) {\n return {\n destinationDir,\n entryPoint,\n packageManager,\n templateComplete: true,\n templateData,\n templateName,\n type,\n };\n }\n\n const shouldContinue = noSkip ? true : await confirmShouldContinue(fields);\n\n if (shouldContinue) {\n log.newline();\n const customAnswers = await runForm({\n choices: fields,\n message: chalk.bold(`Complete ${chalk.cyan(templateName)}:`),\n name: 'customAnswers',\n });\n\n return {\n destinationDir,\n entryPoint,\n packageManager,\n templateComplete: true,\n templateData: { ...templateData, ...customAnswers },\n templateName,\n type,\n };\n }\n\n log.newline();\n log.warn(\n `Resume this later with ${chalk.bold(\n configForPackageManager(packageManager).print.exec,\n 'skuba configure',\n )}.`,\n );\n\n const customAnswers = generatePlaceholders(fields);\n\n return {\n destinationDir,\n entryPoint,\n packageManager,\n templateComplete: false,\n templateData: { ...templateData, ...customAnswers },\n templateName,\n type,\n };\n};\n\nexport const readJSONFromStdIn = async () => {\n let text = '';\n\n await new Promise((resolve) =>\n process.stdin\n .on('data', (chunk) => (text += chunk.toString()))\n .once('end', resolve),\n );\n\n text = text.trim();\n\n if (text === '') {\n log.err('No data from stdin.');\n process.exit(1);\n }\n\n let value: unknown;\n\n try {\n value = JSON.parse(text) as unknown;\n } catch {\n log.err('Invalid JSON from stdin.');\n process.exit(1);\n }\n\n return value;\n};\n\nconst configureFromPipe = async (): Promise<InitConfig> => {\n const value = await readJSONFromStdIn();\n\n const result = initConfigInputSchema.safeParse(value);\n\n if (!result.success) {\n log.err('Invalid data from stdin:');\n log.err(result.error);\n process.exit(1);\n }\n\n const { destinationDir, templateComplete, templateName } = result.data;\n\n const templateData = {\n ...(await baseToTemplateData(result.data.templateData)),\n ...result.data.templateData,\n };\n\n await createDirectory(destinationDir);\n\n const { entryPoint, fields, noSkip, packageManager, type } =\n await cloneTemplate(templateName, destinationDir);\n\n if (!templateComplete) {\n if (noSkip) {\n log.err('Templating for', log.bold(templateName), 'cannot be skipped.');\n process.exit(1);\n }\n\n return {\n ...result.data,\n entryPoint,\n packageManager,\n templateData: {\n ...templateData,\n ...generatePlaceholders(fields),\n },\n type,\n };\n }\n\n const required = fields.map(({ name }) => name);\n\n const provided = new Set(Object.keys(templateData));\n\n const missing = required.filter((name) => !provided.has(name));\n\n if (missing.length > 0) {\n log.err('This template uses the following information:');\n log.newline();\n missing.forEach((name) => log.err(`- ${name}`));\n process.exit(1);\n }\n\n return {\n ...result.data,\n entryPoint,\n packageManager,\n templateData,\n type,\n };\n};\n\nexport const getConfig = () =>\n process.stdin.isTTY ? configureFromPrompt() : configureFromPipe();\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,sBAAsC;AACtC,sBAAe;AAEf,kBAA0B;AAC1B,mBAAgC;AAChC,qBAAoB;AACpB,4BAGO;AACP,kBAA8B;AAC9B,sBAKO;AAEP,iBAAuC;AACvC,qBAOO;AACP,mBAAuD;AAEhD,MAAM,UAAU,CAA6B,UAI9C;AACJ,QAAM,EAAE,SAAS,KAAK,IAAI;AAE1B,QAAM,UAAU,MAAM,QAAQ,IAAI,CAAC,YAAY;AAAA,IAC7C,GAAG;AAAA,IACH,UAAU,CAAC,UAA8B;AACvC,UACE,CAAC,SACD,UAAU,MACT,UAAU,OAAO,WAAW,CAAC,OAAO,cACrC;AACA,eAAO;AAAA,MACT;AAEA,aAAO,OAAO,WAAW,KAAK,KAAK;AAAA,IACrC;AAAA,EACF,EAAE;AAEF,QAAM,OAAO,IAAI,qBAAQ;AAAA,IACvB;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,OAAO,WAAW;AAC1B,YAAM,UAAU,MAAM,QAAQ;AAAA,QAC5B,QAAQ,IAAI,CAAC,WAAW,OAAO,SAAS,OAAO,OAAO,IAAI,CAAC,CAAC;AAAA,MAC9D;AAEA,aACE,QAAQ,KAAK,CAAC,WAAW,OAAO,WAAW,QAAQ,KACnD,QAAQ,MAAM,CAAC,WAAW,WAAW,IAAI;AAAA,IAE7C;AAAA,EACF,CAAC;AAED,SAAO,KAAK,IAAI;AAClB;AAEA,MAAM,wBAAwB,OAAO,YAAmC;AACtE,QAAM,aAAa,QAAQ,IAAI,CAAC,WAAW,OAAO,OAAO;AAEzD,qBAAI,QAAQ;AACZ,qBAAI,MAAM,+CAA+C;AACzD,qBAAI,QAAQ;AACZ,aAAW,QAAQ,CAAC,YAAY,mBAAI,OAAO,KAAK,OAAO,EAAE,CAAC;AAE1D,qBAAI,QAAQ;AACZ,QAAM,SAAS,MAAM,sCAAuB,IAAI;AAEhD,SAAO,WAAW;AACpB;AAEA,MAAM,kBAAkB,OAAO,QAAgB;AAC7C,MAAI;AACF,UAAM,gBAAAA,QAAG,SAAS,MAAM,GAAG;AAAA,EAC7B,SAAS,KAAK;AACZ,YAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC,yBAAI,IAAI,kBAAkB,GAAG,mBAAmB;AAChD,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,UAAM;AAAA,EACR;AACF;AAEA,MAAM,gBAAgB,OACpB,cACA,mBAC4B;AAC5B,QAAM,mBAAmB,aAAa,WAAW,SAAS;AAE1D,MAAI,kBAAkB;AACpB,UAAM,aAAa,aAAa,MAAM,UAAU,MAAM;AAEtD,cAAM,mCAAuB,YAAY,cAAc;AAAA,EACzD,OAAO;AACL,UAAM,cAAc,YAAAC,QAAK,KAAK,8BAAc,YAAY;AAExD,cAAM,uBAAU;AAAA;AAAA,MAEd,SAAS,MAAM;AAAA,MACf,YAAY;AAAA,MACZ,iBAAiB;AAAA,MACjB,YAAY,CAAC;AAAA;AAAA,MAEb,uBAAuB;AAAA,IACzB,CAAC;AAAA,EACH;AAEA,QAAM,iBAAiB;AAAA,IACrB,YAAAA,QAAK,KAAK,QAAQ,IAAI,GAAG,cAAc;AAAA,EACzC;AAEA,MAAI,kBAAkB;AACpB,uBAAI,QAAQ;AACZ,uBAAI;AAAA,MACF;AAAA,MACA,mBAAI;AAAA,YACF,+CAAwB,eAAe,cAAc,EAAE,MAAM;AAAA,QAC7D;AAAA,QACA;AAAA,MACF;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAEA,MAAM,kBAAkB,YAAY;AAClC,QAAM,oBAAoB,MAAM,+BAAgB,IAAI;AAEpD,MAAI,sBAAsB,iBAAY;AACpC,UAAM,aAAa,MAAM,+BAAgB,IAAI;AAC7C,WAAO,UAAU,UAAU;AAAA,EAC7B;AAEA,SAAO;AACT;AAEA,MAAM,uBAAuB,CAAC,YAC5B,OAAO;AAAA,EACL,QAAQ,IAAI,CAAC,EAAE,KAAK,MAAM,CAAC,MAAM,OAAO,IAAI,KAAK,CAAU;AAC7D;AAEK,MAAM,oBAAoB,CAAC,QAAgC;AAChE,QAAM,qBAAqB,YAAAA,QAAK,KAAK,KAAK,wCAAwB;AAElE,MAAI;AAEF,UAAM,iBAAiB,QAAQ,kBAAkB;AAEjD,WAAO,qCAAqB,MAAM,cAAc;AAAA,EAClD,SAAS,KAAK;AACZ,YAAI,8BAAgB,KAAK,kBAAkB,GAAG;AAC5C,aAAO;AAAA,QACL,YAAY;AAAA,QACZ,QAAQ,CAAC;AAAA,QACT,gBAAgB;AAAA,QAChB,MAAM;AAAA,MACR;AAAA,IACF;AAEA,UAAM;AAAA,EACR;AACF;AAEA,MAAM,qBAAqB,OAAO;AAAA,EAChC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAAkB;AAChB,QAAM,CAAC,SAAS,QAAQ,IAAI,UAAU,MAAM,GAAG;AAE/C,QAAM,OAAO,OAAO,UAAM,2BAAc,CAAC;AAEzC,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,MAAM,kCAAkC,SAAS,EAAE;AAAA,EAC/D;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA;AAAA,IAEA,UAAU,YAAY;AAAA,IAEtB;AAAA,IAEA;AAAA,IACA,uBAAuB,iBAAiB,UAAU,WAAW;AAAA,IAC7D,8BACE,iBAAiB,UAAU,WAAW;AAAA,EAC1C;AACF;AAEO,MAAM,sBAAsB,YAAiC;AAClE,QAAM,EAAE,WAAW,cAAc,UAAU,cAAc,IACvD,MAAM,QAAoB,gCAAiB;AAC7C,qBAAI,MAAM,aAAAC,QAAM,KAAK,QAAQ,GAAG,MAAM,aAAAA,QAAM,KAAK,SAAS,CAAC;AAE3D,QAAM,eAAe,MAAM,mBAAmB;AAAA,IAC5C;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,iBAAiB;AAEvB,QAAM,gBAAgB,cAAc;AAEpC,qBAAI,QAAQ;AACZ,QAAM,eAAe,MAAM,gBAAgB;AAE3C,QAAM,EAAE,YAAY,QAAQ,QAAQ,gBAAgB,KAAK,IACvD,MAAM,cAAc,cAAc,cAAc;AAElD,MAAI,OAAO,WAAW,GAAG;AACvB,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA,kBAAkB;AAAA,MAClB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,iBAAiB,SAAS,OAAO,MAAM,sBAAsB,MAAM;AAEzE,MAAI,gBAAgB;AAClB,uBAAI,QAAQ;AACZ,UAAMC,iBAAgB,MAAM,QAAQ;AAAA,MAClC,SAAS;AAAA,MACT,SAAS,aAAAD,QAAM,KAAK,YAAY,aAAAA,QAAM,KAAK,YAAY,CAAC,GAAG;AAAA,MAC3D,MAAM;AAAA,IACR,CAAC;AAED,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA,kBAAkB;AAAA,MAClB,cAAc,EAAE,GAAG,cAAc,GAAGC,eAAc;AAAA,MAClD;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,qBAAI,QAAQ;AACZ,qBAAI;AAAA,IACF,0BAA0B,aAAAD,QAAM;AAAA,UAC9B,+CAAwB,cAAc,EAAE,MAAM;AAAA,MAC9C;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,gBAAgB,qBAAqB,MAAM;AAEjD,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,kBAAkB;AAAA,IAClB,cAAc,EAAE,GAAG,cAAc,GAAG,cAAc;AAAA,IAClD;AAAA,IACA;AAAA,EACF;AACF;AAEO,MAAM,oBAAoB,YAAY;AAC3C,MAAI,OAAO;AAEX,QAAM,IAAI;AAAA,IAAQ,CAAC,YACjB,QAAQ,MACL,GAAG,QAAQ,CAAC,UAAW,QAAQ,MAAM,SAAS,CAAE,EAChD,KAAK,OAAO,OAAO;AAAA,EACxB;AAEA,SAAO,KAAK,KAAK;AAEjB,MAAI,SAAS,IAAI;AACf,uBAAI,IAAI,qBAAqB;AAC7B,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,MAAI;AAEJ,MAAI;AACF,YAAQ,KAAK,MAAM,IAAI;AAAA,EACzB,QAAQ;AACN,uBAAI,IAAI,0BAA0B;AAClC,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,SAAO;AACT;AAEA,MAAM,oBAAoB,YAAiC;AACzD,QAAM,QAAQ,MAAM,kBAAkB;AAEtC,QAAM,SAAS,mCAAsB,UAAU,KAAK;AAEpD,MAAI,CAAC,OAAO,SAAS;AACnB,uBAAI,IAAI,0BAA0B;AAClC,uBAAI,IAAI,OAAO,KAAK;AACpB,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,EAAE,gBAAgB,kBAAkB,aAAa,IAAI,OAAO;AAElE,QAAM,eAAe;AAAA,IACnB,GAAI,MAAM,mBAAmB,OAAO,KAAK,YAAY;AAAA,IACrD,GAAG,OAAO,KAAK;AAAA,EACjB;AAEA,QAAM,gBAAgB,cAAc;AAEpC,QAAM,EAAE,YAAY,QAAQ,QAAQ,gBAAgB,KAAK,IACvD,MAAM,cAAc,cAAc,cAAc;AAElD,MAAI,CAAC,kBAAkB;AACrB,QAAI,QAAQ;AACV,yBAAI,IAAI,kBAAkB,mBAAI,KAAK,YAAY,GAAG,oBAAoB;AACtE,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,WAAO;AAAA,MACL,GAAG,OAAO;AAAA,MACV;AAAA,MACA;AAAA,MACA,cAAc;AAAA,QACZ,GAAG;AAAA,QACH,GAAG,qBAAqB,MAAM;AAAA,MAChC;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,WAAW,OAAO,IAAI,CAAC,EAAE,KAAK,MAAM,IAAI;AAE9C,QAAM,WAAW,IAAI,IAAI,OAAO,KAAK,YAAY,CAAC;AAElD,QAAM,UAAU,SAAS,OAAO,CAAC,SAAS,CAAC,SAAS,IAAI,IAAI,CAAC;AAE7D,MAAI,QAAQ,SAAS,GAAG;AACtB,uBAAI,IAAI,+CAA+C;AACvD,uBAAI,QAAQ;AACZ,YAAQ,QAAQ,CAAC,SAAS,mBAAI,IAAI,KAAK,IAAI,EAAE,CAAC;AAC9C,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,SAAO;AAAA,IACL,GAAG,OAAO;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAEO,MAAM,YAAY,MACvB,QAAQ,MAAM,QAAQ,oBAAoB,IAAI,kBAAkB;",
|
|
6
6
|
"names": ["fs", "path", "chalk", "customAnswers"]
|
|
7
7
|
}
|
package/lib/cli/lint/index.js
CHANGED
|
@@ -62,7 +62,7 @@ const lint = async (args = process.argv.slice(2), tscWriteable = void 0, workerT
|
|
|
62
62
|
import_logging.log.newline();
|
|
63
63
|
import_logging.log.warn(
|
|
64
64
|
`Try running ${import_logging.log.bold(
|
|
65
|
-
packageManager.exec,
|
|
65
|
+
packageManager.print.exec,
|
|
66
66
|
"skuba",
|
|
67
67
|
"format"
|
|
68
68
|
)} to fix them.`
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/lint/index.ts"],
|
|
4
|
-
"sourcesContent": ["import type { Writable } from 'stream';\nimport { inspect } from 'util';\n\nimport { hasDebugFlag, hasSerialFlag } from '../../utils/args';\nimport { log } from '../../utils/logging';\nimport { detectPackageManager } from '../../utils/packageManager';\nimport { throwOnTimeout } from '../../utils/wait';\n\nimport { createAnnotations } from './annotate';\nimport { autofix } from './autofix';\nimport { externalLint } from './external';\nimport { internalLint } from './internal';\nimport type { Input } from './types';\n\nexport const lint = async (\n args = process.argv.slice(2),\n tscWriteable: Writable | undefined = undefined,\n workerThreads = true,\n) => {\n const opts: Input = {\n debug: hasDebugFlag(args),\n serial: hasSerialFlag(args),\n tscOutputStream: tscWriteable,\n workerThreads,\n };\n\n const { eslint, prettier, tscOk, tscOutputStream } = await externalLint(opts);\n const internal = await internalLint('lint', opts);\n\n try {\n await throwOnTimeout(\n createAnnotations(internal, eslint, prettier, tscOk, tscOutputStream),\n { s: 30 },\n );\n } catch (err) {\n log.warn('Failed to annotate lint results.');\n log.subtle(inspect(err));\n }\n\n if (!internal.ok || !eslint.ok || !prettier.ok || !tscOk) {\n process.exitCode = 1;\n\n const tools = [\n ...(internal.ok ? [] : ['skuba']),\n ...(eslint.ok ? [] : ['ESLint']),\n ...(prettier.ok ? [] : ['Prettier']),\n ...(tscOk ? [] : ['tsc']),\n ];\n\n log.err(`${tools.join(', ')} found issues that require triage.`);\n\n if (internal.fixable || eslint.fixable || !prettier.ok) {\n const packageManager = await detectPackageManager();\n log.newline();\n log.warn(\n `Try running ${log.bold(\n packageManager.exec,\n 'skuba',\n 'format',\n )} to fix them.`,\n );\n }\n }\n\n await autofix({\n debug: opts.debug,\n eslint: eslint.fixable,\n prettier: !prettier.ok,\n internal: internal.fixable,\n });\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,kBAAwB;AAExB,kBAA4C;AAC5C,qBAAoB;AACpB,4BAAqC;AACrC,kBAA+B;AAE/B,sBAAkC;AAClC,qBAAwB;AACxB,sBAA6B;AAC7B,sBAA6B;AAGtB,MAAM,OAAO,OAClB,OAAO,QAAQ,KAAK,MAAM,CAAC,GAC3B,eAAqC,QACrC,gBAAgB,SACb;AACH,QAAM,OAAc;AAAA,IAClB,WAAO,0BAAa,IAAI;AAAA,IACxB,YAAQ,2BAAc,IAAI;AAAA,IAC1B,iBAAiB;AAAA,IACjB;AAAA,EACF;AAEA,QAAM,EAAE,QAAQ,UAAU,OAAO,gBAAgB,IAAI,UAAM,8BAAa,IAAI;AAC5E,QAAM,WAAW,UAAM,8BAAa,QAAQ,IAAI;AAEhD,MAAI;AACF,cAAM;AAAA,UACJ,mCAAkB,UAAU,QAAQ,UAAU,OAAO,eAAe;AAAA,MACpE,EAAE,GAAG,GAAG;AAAA,IACV;AAAA,EACF,SAAS,KAAK;AACZ,uBAAI,KAAK,kCAAkC;AAC3C,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AAAA,EACzB;AAEA,MAAI,CAAC,SAAS,MAAM,CAAC,OAAO,MAAM,CAAC,SAAS,MAAM,CAAC,OAAO;AACxD,YAAQ,WAAW;AAEnB,UAAM,QAAQ;AAAA,MACZ,GAAI,SAAS,KAAK,CAAC,IAAI,CAAC,OAAO;AAAA,MAC/B,GAAI,OAAO,KAAK,CAAC,IAAI,CAAC,QAAQ;AAAA,MAC9B,GAAI,SAAS,KAAK,CAAC,IAAI,CAAC,UAAU;AAAA,MAClC,GAAI,QAAQ,CAAC,IAAI,CAAC,KAAK;AAAA,IACzB;AAEA,uBAAI,IAAI,GAAG,MAAM,KAAK,IAAI,CAAC,oCAAoC;AAE/D,QAAI,SAAS,WAAW,OAAO,WAAW,CAAC,SAAS,IAAI;AACtD,YAAM,iBAAiB,UAAM,4CAAqB;AAClD,yBAAI,QAAQ;AACZ,yBAAI;AAAA,QACF,eAAe,mBAAI;AAAA,UACjB,eAAe;AAAA,
|
|
4
|
+
"sourcesContent": ["import type { Writable } from 'stream';\nimport { inspect } from 'util';\n\nimport { hasDebugFlag, hasSerialFlag } from '../../utils/args';\nimport { log } from '../../utils/logging';\nimport { detectPackageManager } from '../../utils/packageManager';\nimport { throwOnTimeout } from '../../utils/wait';\n\nimport { createAnnotations } from './annotate';\nimport { autofix } from './autofix';\nimport { externalLint } from './external';\nimport { internalLint } from './internal';\nimport type { Input } from './types';\n\nexport const lint = async (\n args = process.argv.slice(2),\n tscWriteable: Writable | undefined = undefined,\n workerThreads = true,\n) => {\n const opts: Input = {\n debug: hasDebugFlag(args),\n serial: hasSerialFlag(args),\n tscOutputStream: tscWriteable,\n workerThreads,\n };\n\n const { eslint, prettier, tscOk, tscOutputStream } = await externalLint(opts);\n const internal = await internalLint('lint', opts);\n\n try {\n await throwOnTimeout(\n createAnnotations(internal, eslint, prettier, tscOk, tscOutputStream),\n { s: 30 },\n );\n } catch (err) {\n log.warn('Failed to annotate lint results.');\n log.subtle(inspect(err));\n }\n\n if (!internal.ok || !eslint.ok || !prettier.ok || !tscOk) {\n process.exitCode = 1;\n\n const tools = [\n ...(internal.ok ? [] : ['skuba']),\n ...(eslint.ok ? [] : ['ESLint']),\n ...(prettier.ok ? [] : ['Prettier']),\n ...(tscOk ? [] : ['tsc']),\n ];\n\n log.err(`${tools.join(', ')} found issues that require triage.`);\n\n if (internal.fixable || eslint.fixable || !prettier.ok) {\n const packageManager = await detectPackageManager();\n log.newline();\n log.warn(\n `Try running ${log.bold(\n packageManager.print.exec,\n 'skuba',\n 'format',\n )} to fix them.`,\n );\n }\n }\n\n await autofix({\n debug: opts.debug,\n eslint: eslint.fixable,\n prettier: !prettier.ok,\n internal: internal.fixable,\n });\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,kBAAwB;AAExB,kBAA4C;AAC5C,qBAAoB;AACpB,4BAAqC;AACrC,kBAA+B;AAE/B,sBAAkC;AAClC,qBAAwB;AACxB,sBAA6B;AAC7B,sBAA6B;AAGtB,MAAM,OAAO,OAClB,OAAO,QAAQ,KAAK,MAAM,CAAC,GAC3B,eAAqC,QACrC,gBAAgB,SACb;AACH,QAAM,OAAc;AAAA,IAClB,WAAO,0BAAa,IAAI;AAAA,IACxB,YAAQ,2BAAc,IAAI;AAAA,IAC1B,iBAAiB;AAAA,IACjB;AAAA,EACF;AAEA,QAAM,EAAE,QAAQ,UAAU,OAAO,gBAAgB,IAAI,UAAM,8BAAa,IAAI;AAC5E,QAAM,WAAW,UAAM,8BAAa,QAAQ,IAAI;AAEhD,MAAI;AACF,cAAM;AAAA,UACJ,mCAAkB,UAAU,QAAQ,UAAU,OAAO,eAAe;AAAA,MACpE,EAAE,GAAG,GAAG;AAAA,IACV;AAAA,EACF,SAAS,KAAK;AACZ,uBAAI,KAAK,kCAAkC;AAC3C,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AAAA,EACzB;AAEA,MAAI,CAAC,SAAS,MAAM,CAAC,OAAO,MAAM,CAAC,SAAS,MAAM,CAAC,OAAO;AACxD,YAAQ,WAAW;AAEnB,UAAM,QAAQ;AAAA,MACZ,GAAI,SAAS,KAAK,CAAC,IAAI,CAAC,OAAO;AAAA,MAC/B,GAAI,OAAO,KAAK,CAAC,IAAI,CAAC,QAAQ;AAAA,MAC9B,GAAI,SAAS,KAAK,CAAC,IAAI,CAAC,UAAU;AAAA,MAClC,GAAI,QAAQ,CAAC,IAAI,CAAC,KAAK;AAAA,IACzB;AAEA,uBAAI,IAAI,GAAG,MAAM,KAAK,IAAI,CAAC,oCAAoC;AAE/D,QAAI,SAAS,WAAW,OAAO,WAAW,CAAC,SAAS,IAAI;AACtD,YAAM,iBAAiB,UAAM,4CAAqB;AAClD,yBAAI,QAAQ;AACZ,yBAAI;AAAA,QACF,eAAe,mBAAI;AAAA,UACjB,eAAe,MAAM;AAAA,UACrB;AAAA,UACA;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAEA,YAAM,wBAAQ;AAAA,IACZ,OAAO,KAAK;AAAA,IACZ,QAAQ,OAAO;AAAA,IACf,UAAU,CAAC,SAAS;AAAA,IACpB,UAAU,SAAS;AAAA,EACrB,CAAC;AACH;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -50,7 +50,7 @@ const noSkubaTemplateJs = async (_mode, logger) => {
|
|
|
50
50
|
if (await (0, import_fs_extra.pathExists)(templateConfigPath)) {
|
|
51
51
|
logger.err(
|
|
52
52
|
`Template is incomplete; run ${logger.bold(
|
|
53
|
-
packageManager.exec,
|
|
53
|
+
packageManager.print.exec,
|
|
54
54
|
"skuba",
|
|
55
55
|
"configure"
|
|
56
56
|
)}. ${logger.dim("no-skuba-template-js")}`
|
|
@@ -61,7 +61,7 @@ const noSkubaTemplateJs = async (_mode, logger) => {
|
|
|
61
61
|
annotations: [
|
|
62
62
|
{
|
|
63
63
|
path: "skuba.template.js",
|
|
64
|
-
message: `Template is incomplete; run ${packageManager.exec} skuba configure.`
|
|
64
|
+
message: `Template is incomplete; run ${packageManager.print.exec} skuba configure.`
|
|
65
65
|
}
|
|
66
66
|
]
|
|
67
67
|
};
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../src/cli/lint/internalLints/noSkubaTemplateJs.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport { pathExists } from 'fs-extra';\n\nimport type { Logger } from '../../../utils/logging';\nimport { getConsumerManifest } from '../../../utils/manifest';\nimport { detectPackageManager } from '../../../utils/packageManager';\nimport type { InternalLintResult } from '../internal';\n\nexport const noSkubaTemplateJs = async (\n _mode: 'format' | 'lint',\n logger: Logger,\n): Promise<InternalLintResult> => {\n const [manifest, packageManager] = await Promise.all([\n getConsumerManifest(),\n detectPackageManager(),\n ]);\n\n if (!manifest) {\n // This will throw elsewhere\n return { ok: true, fixable: false };\n }\n\n const templateConfigPath = path.join(\n path.dirname(manifest.path),\n 'skuba.template.js',\n );\n\n if (await pathExists(templateConfigPath)) {\n logger.err(\n `Template is incomplete; run ${logger.bold(\n packageManager.exec,\n 'skuba',\n 'configure',\n )}. ${logger.dim('no-skuba-template-js')}`,\n );\n\n return {\n ok: false,\n fixable: false,\n annotations: [\n {\n path: 'skuba.template.js',\n message: `Template is incomplete; run ${packageManager.exec} skuba configure.`,\n },\n ],\n };\n }\n\n return { ok: true, fixable: false };\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAA2B;AAG3B,sBAAoC;AACpC,4BAAqC;AAG9B,MAAM,oBAAoB,OAC/B,OACA,WACgC;AAChC,QAAM,CAAC,UAAU,cAAc,IAAI,MAAM,QAAQ,IAAI;AAAA,QACnD,qCAAoB;AAAA,QACpB,4CAAqB;AAAA,EACvB,CAAC;AAED,MAAI,CAAC,UAAU;AAEb,WAAO,EAAE,IAAI,MAAM,SAAS,MAAM;AAAA,EACpC;AAEA,QAAM,qBAAqB,YAAAA,QAAK;AAAA,IAC9B,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAAA,IAC1B;AAAA,EACF;AAEA,MAAI,UAAM,4BAAW,kBAAkB,GAAG;AACxC,WAAO;AAAA,MACL,+BAA+B,OAAO;AAAA,QACpC,eAAe;AAAA,
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport { pathExists } from 'fs-extra';\n\nimport type { Logger } from '../../../utils/logging';\nimport { getConsumerManifest } from '../../../utils/manifest';\nimport { detectPackageManager } from '../../../utils/packageManager';\nimport type { InternalLintResult } from '../internal';\n\nexport const noSkubaTemplateJs = async (\n _mode: 'format' | 'lint',\n logger: Logger,\n): Promise<InternalLintResult> => {\n const [manifest, packageManager] = await Promise.all([\n getConsumerManifest(),\n detectPackageManager(),\n ]);\n\n if (!manifest) {\n // This will throw elsewhere\n return { ok: true, fixable: false };\n }\n\n const templateConfigPath = path.join(\n path.dirname(manifest.path),\n 'skuba.template.js',\n );\n\n if (await pathExists(templateConfigPath)) {\n logger.err(\n `Template is incomplete; run ${logger.bold(\n packageManager.print.exec,\n 'skuba',\n 'configure',\n )}. ${logger.dim('no-skuba-template-js')}`,\n );\n\n return {\n ok: false,\n fixable: false,\n annotations: [\n {\n path: 'skuba.template.js',\n message: `Template is incomplete; run ${packageManager.print.exec} skuba configure.`,\n },\n ],\n };\n }\n\n return { ok: true, fixable: false };\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAA2B;AAG3B,sBAAoC;AACpC,4BAAqC;AAG9B,MAAM,oBAAoB,OAC/B,OACA,WACgC;AAChC,QAAM,CAAC,UAAU,cAAc,IAAI,MAAM,QAAQ,IAAI;AAAA,QACnD,qCAAoB;AAAA,QACpB,4CAAqB;AAAA,EACvB,CAAC;AAED,MAAI,CAAC,UAAU;AAEb,WAAO,EAAE,IAAI,MAAM,SAAS,MAAM;AAAA,EACpC;AAEA,QAAM,qBAAqB,YAAAA,QAAK;AAAA,IAC9B,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAAA,IAC1B;AAAA,EACF;AAEA,MAAI,UAAM,4BAAW,kBAAkB,GAAG;AACxC,WAAO;AAAA,MACL,+BAA+B,OAAO;AAAA,QACpC,eAAe,MAAM;AAAA,QACrB;AAAA,QACA;AAAA,MACF,CAAC,KAAK,OAAO,IAAI,sBAAsB,CAAC;AAAA,IAC1C;AAEA,WAAO;AAAA,MACL,IAAI;AAAA,MACJ,SAAS;AAAA,MACT,aAAa;AAAA,QACX;AAAA,UACE,MAAM;AAAA,UACN,SAAS,+BAA+B,eAAe,MAAM,IAAI;AAAA,QACnE;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO,EAAE,IAAI,MAAM,SAAS,MAAM;AACpC;",
|
|
6
6
|
"names": ["path"]
|
|
7
7
|
}
|
|
@@ -119,7 +119,7 @@ const refreshConfigFiles = async (mode, logger) => {
|
|
|
119
119
|
msg: `The ${logger.bold(
|
|
120
120
|
filename
|
|
121
121
|
)} file is out of date. Run \`${logger.bold(
|
|
122
|
-
packageManager2.exec,
|
|
122
|
+
packageManager2.print.exec,
|
|
123
123
|
"skuba",
|
|
124
124
|
"format"
|
|
125
125
|
)}\` to update it.`,
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../src/cli/lint/internalLints/refreshConfigFiles.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\nimport { inspect, stripVTControlCharacters as stripAnsi } from 'util';\n\nimport { writeFile } from 'fs-extra';\n\nimport { Git } from '../../..';\nimport type { Logger } from '../../../utils/logging';\nimport { NPMRC_LINES, hasNpmrcSecret } from '../../../utils/npmrc';\nimport {\n type PackageManagerConfig,\n detectPackageManager,\n} from '../../../utils/packageManager';\nimport { readBaseTemplateFile } from '../../../utils/template';\nimport { getDestinationManifest } from '../../configure/analysis/package';\nimport { createDestinationFileReader } from '../../configure/analysis/project';\nimport { mergeWithConfigFile } from '../../configure/processing/configFile';\nimport type { InternalLintResult } from '../internal';\n\nconst ensureNoAuthToken = (fileContents: string) =>\n fileContents\n .split('\\n')\n .filter((line) => !hasNpmrcSecret(line))\n .join('\\n');\n\ntype RefreshableConfigFile = {\n name: string;\n type: 'ignore' | 'npmrc';\n additionalMapping?: (\n s: string,\n packageManager: PackageManagerConfig,\n ) => string;\n if?: (packageManager: PackageManagerConfig) => boolean;\n};\n\nconst removeRedundantNpmrc = (contents: string) => {\n const npmrcLines = contents\n .split('\\n')\n .filter((line) => NPMRC_LINES.includes(line.trim()));\n\n // If we're only left with !.npmrc line we can remove it\n // TODO: Consider if we should generalise this\n if (npmrcLines.length > 0 && npmrcLines.every((line) => line.includes('!'))) {\n return contents\n .split('\\n')\n .filter((line) => !NPMRC_LINES.includes(line.trim()))\n .join('\\n');\n }\n return contents;\n};\n\nexport const REFRESHABLE_CONFIG_FILES: RefreshableConfigFile[] = [\n {\n name: '.gitignore',\n type: 'ignore',\n additionalMapping: removeRedundantNpmrc,\n },\n { name: '.prettierignore', type: 'ignore' },\n {\n name: '.npmrc',\n type: 'npmrc',\n additionalMapping: ensureNoAuthToken,\n if: (packageManager: PackageManagerConfig) =>\n packageManager.command === 'pnpm',\n },\n {\n name: '.dockerignore',\n type: 'ignore',\n additionalMapping: removeRedundantNpmrc,\n },\n];\n\nexport const refreshConfigFiles = async (\n mode: 'format' | 'lint',\n logger: Logger,\n) => {\n const [manifest, gitRoot] = await Promise.all([\n getDestinationManifest(),\n Git.findRoot({ dir: process.cwd() }),\n ]);\n\n const destinationRoot = path.dirname(manifest.path);\n\n const readDestinationFile = createDestinationFileReader(destinationRoot);\n\n const refreshConfigFile = async (\n {\n name: filename,\n type: fileType,\n additionalMapping = (s) => s,\n if: condition = () => true,\n }: RefreshableConfigFile,\n packageManager: PackageManagerConfig,\n ) => {\n if (!condition(packageManager)) {\n return { needsChange: false };\n }\n\n const [inputFile, templateFile, isGitIgnored] = await Promise.all([\n readDestinationFile(filename),\n readBaseTemplateFile(`_${filename}`),\n gitRoot\n ? Git.isFileGitIgnored({\n gitRoot,\n absolutePath: path.join(destinationRoot, filename),\n })\n : false,\n ]);\n\n // If the file is gitignored and doesn't exist, don't make it\n if (inputFile === undefined && isGitIgnored) {\n return { needsChange: false };\n }\n\n const data = additionalMapping(\n inputFile\n ? mergeWithConfigFile(templateFile, fileType)(inputFile)\n : templateFile,\n packageManager,\n );\n\n const filepath = path.join(destinationRoot, filename);\n\n if (mode === 'format') {\n if (data === inputFile) {\n return { needsChange: false };\n }\n\n await writeFile(filepath, data);\n return {\n needsChange: false,\n msg: `Refreshed ${logger.bold(filename)}.`,\n filename,\n };\n }\n\n if (data !== inputFile) {\n return {\n needsChange: true,\n msg: `The ${logger.bold(\n filename,\n )} file is out of date. Run \\`${logger.bold(\n packageManager.exec,\n 'skuba',\n 'format',\n )}\\` to update it.`,\n filename,\n };\n }\n\n return { needsChange: false };\n };\n\n const packageManager = await detectPackageManager(destinationRoot);\n\n const results = await Promise.all(\n REFRESHABLE_CONFIG_FILES.map((conf) =>\n refreshConfigFile(conf, packageManager),\n ),\n );\n\n // Log after for reproducible test output ordering\n results.forEach((result) => {\n if (result.msg) {\n logger.warn(result.msg, logger.dim('refresh-config-files'));\n }\n });\n\n const anyNeedChanging = results.some(({ needsChange }) => needsChange);\n\n return {\n ok: !anyNeedChanging,\n fixable: anyNeedChanging,\n annotations: results.flatMap(({ needsChange, filename, msg }) =>\n needsChange && msg\n ? [\n {\n path: filename,\n message: stripAnsi(msg),\n },\n ]\n : [],\n ),\n };\n};\n\nexport const tryRefreshConfigFiles = async (\n mode: 'format' | 'lint',\n logger: Logger,\n): Promise<InternalLintResult> => {\n try {\n return await refreshConfigFiles(mode, logger);\n } catch (err) {\n logger.warn('Failed to refresh config files.');\n logger.subtle(inspect(err));\n\n return {\n ok: false,\n fixable: false,\n annotations: [],\n };\n }\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAA+D;AAE/D,sBAA0B;AAE1B,eAAoB;AAEpB,mBAA4C;AAC5C,4BAGO;AACP,sBAAqC;AACrC,qBAAuC;AACvC,qBAA4C;AAC5C,wBAAoC;AAGpC,MAAM,oBAAoB,CAAC,iBACzB,aACG,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,KAAC,6BAAe,IAAI,CAAC,EACtC,KAAK,IAAI;AAYd,MAAM,uBAAuB,CAAC,aAAqB;AACjD,QAAM,aAAa,SAChB,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,yBAAY,SAAS,KAAK,KAAK,CAAC,CAAC;AAIrD,MAAI,WAAW,SAAS,KAAK,WAAW,MAAM,CAAC,SAAS,KAAK,SAAS,GAAG,CAAC,GAAG;AAC3E,WAAO,SACJ,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,CAAC,yBAAY,SAAS,KAAK,KAAK,CAAC,CAAC,EACnD,KAAK,IAAI;AAAA,EACd;AACA,SAAO;AACT;AAEO,MAAM,2BAAoD;AAAA,EAC/D;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,mBAAmB;AAAA,EACrB;AAAA,EACA,EAAE,MAAM,mBAAmB,MAAM,SAAS;AAAA,EAC1C;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,mBAAmB;AAAA,IACnB,IAAI,CAAC,mBACH,eAAe,YAAY;AAAA,EAC/B;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,mBAAmB;AAAA,EACrB;AACF;AAEO,MAAM,qBAAqB,OAChC,MACA,WACG;AACH,QAAM,CAAC,UAAU,OAAO,IAAI,MAAM,QAAQ,IAAI;AAAA,QAC5C,uCAAuB;AAAA,IACvB,aAAI,SAAS,EAAE,KAAK,QAAQ,IAAI,EAAE,CAAC;AAAA,EACrC,CAAC;AAED,QAAM,kBAAkB,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAElD,QAAM,0BAAsB,4CAA4B,eAAe;AAEvE,QAAM,oBAAoB,OACxB;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,oBAAoB,CAAC,MAAM;AAAA,IAC3B,IAAI,YAAY,MAAM;AAAA,EACxB,GACAC,oBACG;AACH,QAAI,CAAC,UAAUA,eAAc,GAAG;AAC9B,aAAO,EAAE,aAAa,MAAM;AAAA,IAC9B;AAEA,UAAM,CAAC,WAAW,cAAc,YAAY,IAAI,MAAM,QAAQ,IAAI;AAAA,MAChE,oBAAoB,QAAQ;AAAA,UAC5B,sCAAqB,IAAI,QAAQ,EAAE;AAAA,MACnC,UACI,aAAI,iBAAiB;AAAA,QACnB;AAAA,QACA,cAAc,YAAAD,QAAK,KAAK,iBAAiB,QAAQ;AAAA,MACnD,CAAC,IACD;AAAA,IACN,CAAC;AAGD,QAAI,cAAc,UAAa,cAAc;AAC3C,aAAO,EAAE,aAAa,MAAM;AAAA,IAC9B;AAEA,UAAM,OAAO;AAAA,MACX,gBACI,uCAAoB,cAAc,QAAQ,EAAE,SAAS,IACrD;AAAA,MACJC;AAAA,IACF;AAEA,UAAM,WAAW,YAAAD,QAAK,KAAK,iBAAiB,QAAQ;AAEpD,QAAI,SAAS,UAAU;AACrB,UAAI,SAAS,WAAW;AACtB,eAAO,EAAE,aAAa,MAAM;AAAA,MAC9B;AAEA,gBAAM,2BAAU,UAAU,IAAI;AAC9B,aAAO;AAAA,QACL,aAAa;AAAA,QACb,KAAK,aAAa,OAAO,KAAK,QAAQ,CAAC;AAAA,QACvC;AAAA,MACF;AAAA,IACF;AAEA,QAAI,SAAS,WAAW;AACtB,aAAO;AAAA,QACL,aAAa;AAAA,QACb,KAAK,OAAO,OAAO;AAAA,UACjB;AAAA,QACF,CAAC,+BAA+B,OAAO;AAAA,UACrCC,gBAAe;AAAA,
|
|
4
|
+
"sourcesContent": ["import path from 'path';\nimport { inspect, stripVTControlCharacters as stripAnsi } from 'util';\n\nimport { writeFile } from 'fs-extra';\n\nimport { Git } from '../../..';\nimport type { Logger } from '../../../utils/logging';\nimport { NPMRC_LINES, hasNpmrcSecret } from '../../../utils/npmrc';\nimport {\n type PackageManagerConfig,\n detectPackageManager,\n} from '../../../utils/packageManager';\nimport { readBaseTemplateFile } from '../../../utils/template';\nimport { getDestinationManifest } from '../../configure/analysis/package';\nimport { createDestinationFileReader } from '../../configure/analysis/project';\nimport { mergeWithConfigFile } from '../../configure/processing/configFile';\nimport type { InternalLintResult } from '../internal';\n\nconst ensureNoAuthToken = (fileContents: string) =>\n fileContents\n .split('\\n')\n .filter((line) => !hasNpmrcSecret(line))\n .join('\\n');\n\ntype RefreshableConfigFile = {\n name: string;\n type: 'ignore' | 'npmrc';\n additionalMapping?: (\n s: string,\n packageManager: PackageManagerConfig,\n ) => string;\n if?: (packageManager: PackageManagerConfig) => boolean;\n};\n\nconst removeRedundantNpmrc = (contents: string) => {\n const npmrcLines = contents\n .split('\\n')\n .filter((line) => NPMRC_LINES.includes(line.trim()));\n\n // If we're only left with !.npmrc line we can remove it\n // TODO: Consider if we should generalise this\n if (npmrcLines.length > 0 && npmrcLines.every((line) => line.includes('!'))) {\n return contents\n .split('\\n')\n .filter((line) => !NPMRC_LINES.includes(line.trim()))\n .join('\\n');\n }\n return contents;\n};\n\nexport const REFRESHABLE_CONFIG_FILES: RefreshableConfigFile[] = [\n {\n name: '.gitignore',\n type: 'ignore',\n additionalMapping: removeRedundantNpmrc,\n },\n { name: '.prettierignore', type: 'ignore' },\n {\n name: '.npmrc',\n type: 'npmrc',\n additionalMapping: ensureNoAuthToken,\n if: (packageManager: PackageManagerConfig) =>\n packageManager.command === 'pnpm',\n },\n {\n name: '.dockerignore',\n type: 'ignore',\n additionalMapping: removeRedundantNpmrc,\n },\n];\n\nexport const refreshConfigFiles = async (\n mode: 'format' | 'lint',\n logger: Logger,\n) => {\n const [manifest, gitRoot] = await Promise.all([\n getDestinationManifest(),\n Git.findRoot({ dir: process.cwd() }),\n ]);\n\n const destinationRoot = path.dirname(manifest.path);\n\n const readDestinationFile = createDestinationFileReader(destinationRoot);\n\n const refreshConfigFile = async (\n {\n name: filename,\n type: fileType,\n additionalMapping = (s) => s,\n if: condition = () => true,\n }: RefreshableConfigFile,\n packageManager: PackageManagerConfig,\n ) => {\n if (!condition(packageManager)) {\n return { needsChange: false };\n }\n\n const [inputFile, templateFile, isGitIgnored] = await Promise.all([\n readDestinationFile(filename),\n readBaseTemplateFile(`_${filename}`),\n gitRoot\n ? Git.isFileGitIgnored({\n gitRoot,\n absolutePath: path.join(destinationRoot, filename),\n })\n : false,\n ]);\n\n // If the file is gitignored and doesn't exist, don't make it\n if (inputFile === undefined && isGitIgnored) {\n return { needsChange: false };\n }\n\n const data = additionalMapping(\n inputFile\n ? mergeWithConfigFile(templateFile, fileType)(inputFile)\n : templateFile,\n packageManager,\n );\n\n const filepath = path.join(destinationRoot, filename);\n\n if (mode === 'format') {\n if (data === inputFile) {\n return { needsChange: false };\n }\n\n await writeFile(filepath, data);\n return {\n needsChange: false,\n msg: `Refreshed ${logger.bold(filename)}.`,\n filename,\n };\n }\n\n if (data !== inputFile) {\n return {\n needsChange: true,\n msg: `The ${logger.bold(\n filename,\n )} file is out of date. Run \\`${logger.bold(\n packageManager.print.exec,\n 'skuba',\n 'format',\n )}\\` to update it.`,\n filename,\n };\n }\n\n return { needsChange: false };\n };\n\n const packageManager = await detectPackageManager(destinationRoot);\n\n const results = await Promise.all(\n REFRESHABLE_CONFIG_FILES.map((conf) =>\n refreshConfigFile(conf, packageManager),\n ),\n );\n\n // Log after for reproducible test output ordering\n results.forEach((result) => {\n if (result.msg) {\n logger.warn(result.msg, logger.dim('refresh-config-files'));\n }\n });\n\n const anyNeedChanging = results.some(({ needsChange }) => needsChange);\n\n return {\n ok: !anyNeedChanging,\n fixable: anyNeedChanging,\n annotations: results.flatMap(({ needsChange, filename, msg }) =>\n needsChange && msg\n ? [\n {\n path: filename,\n message: stripAnsi(msg),\n },\n ]\n : [],\n ),\n };\n};\n\nexport const tryRefreshConfigFiles = async (\n mode: 'format' | 'lint',\n logger: Logger,\n): Promise<InternalLintResult> => {\n try {\n return await refreshConfigFiles(mode, logger);\n } catch (err) {\n logger.warn('Failed to refresh config files.');\n logger.subtle(inspect(err));\n\n return {\n ok: false,\n fixable: false,\n annotations: [],\n };\n }\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAA+D;AAE/D,sBAA0B;AAE1B,eAAoB;AAEpB,mBAA4C;AAC5C,4BAGO;AACP,sBAAqC;AACrC,qBAAuC;AACvC,qBAA4C;AAC5C,wBAAoC;AAGpC,MAAM,oBAAoB,CAAC,iBACzB,aACG,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,KAAC,6BAAe,IAAI,CAAC,EACtC,KAAK,IAAI;AAYd,MAAM,uBAAuB,CAAC,aAAqB;AACjD,QAAM,aAAa,SAChB,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,yBAAY,SAAS,KAAK,KAAK,CAAC,CAAC;AAIrD,MAAI,WAAW,SAAS,KAAK,WAAW,MAAM,CAAC,SAAS,KAAK,SAAS,GAAG,CAAC,GAAG;AAC3E,WAAO,SACJ,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,CAAC,yBAAY,SAAS,KAAK,KAAK,CAAC,CAAC,EACnD,KAAK,IAAI;AAAA,EACd;AACA,SAAO;AACT;AAEO,MAAM,2BAAoD;AAAA,EAC/D;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,mBAAmB;AAAA,EACrB;AAAA,EACA,EAAE,MAAM,mBAAmB,MAAM,SAAS;AAAA,EAC1C;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,mBAAmB;AAAA,IACnB,IAAI,CAAC,mBACH,eAAe,YAAY;AAAA,EAC/B;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,mBAAmB;AAAA,EACrB;AACF;AAEO,MAAM,qBAAqB,OAChC,MACA,WACG;AACH,QAAM,CAAC,UAAU,OAAO,IAAI,MAAM,QAAQ,IAAI;AAAA,QAC5C,uCAAuB;AAAA,IACvB,aAAI,SAAS,EAAE,KAAK,QAAQ,IAAI,EAAE,CAAC;AAAA,EACrC,CAAC;AAED,QAAM,kBAAkB,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAElD,QAAM,0BAAsB,4CAA4B,eAAe;AAEvE,QAAM,oBAAoB,OACxB;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,oBAAoB,CAAC,MAAM;AAAA,IAC3B,IAAI,YAAY,MAAM;AAAA,EACxB,GACAC,oBACG;AACH,QAAI,CAAC,UAAUA,eAAc,GAAG;AAC9B,aAAO,EAAE,aAAa,MAAM;AAAA,IAC9B;AAEA,UAAM,CAAC,WAAW,cAAc,YAAY,IAAI,MAAM,QAAQ,IAAI;AAAA,MAChE,oBAAoB,QAAQ;AAAA,UAC5B,sCAAqB,IAAI,QAAQ,EAAE;AAAA,MACnC,UACI,aAAI,iBAAiB;AAAA,QACnB;AAAA,QACA,cAAc,YAAAD,QAAK,KAAK,iBAAiB,QAAQ;AAAA,MACnD,CAAC,IACD;AAAA,IACN,CAAC;AAGD,QAAI,cAAc,UAAa,cAAc;AAC3C,aAAO,EAAE,aAAa,MAAM;AAAA,IAC9B;AAEA,UAAM,OAAO;AAAA,MACX,gBACI,uCAAoB,cAAc,QAAQ,EAAE,SAAS,IACrD;AAAA,MACJC;AAAA,IACF;AAEA,UAAM,WAAW,YAAAD,QAAK,KAAK,iBAAiB,QAAQ;AAEpD,QAAI,SAAS,UAAU;AACrB,UAAI,SAAS,WAAW;AACtB,eAAO,EAAE,aAAa,MAAM;AAAA,MAC9B;AAEA,gBAAM,2BAAU,UAAU,IAAI;AAC9B,aAAO;AAAA,QACL,aAAa;AAAA,QACb,KAAK,aAAa,OAAO,KAAK,QAAQ,CAAC;AAAA,QACvC;AAAA,MACF;AAAA,IACF;AAEA,QAAI,SAAS,WAAW;AACtB,aAAO;AAAA,QACL,aAAa;AAAA,QACb,KAAK,OAAO,OAAO;AAAA,UACjB;AAAA,QACF,CAAC,+BAA+B,OAAO;AAAA,UACrCC,gBAAe,MAAM;AAAA,UACrB;AAAA,UACA;AAAA,QACF,CAAC;AAAA,QACD;AAAA,MACF;AAAA,IACF;AAEA,WAAO,EAAE,aAAa,MAAM;AAAA,EAC9B;AAEA,QAAM,iBAAiB,UAAM,4CAAqB,eAAe;AAEjE,QAAM,UAAU,MAAM,QAAQ;AAAA,IAC5B,yBAAyB;AAAA,MAAI,CAAC,SAC5B,kBAAkB,MAAM,cAAc;AAAA,IACxC;AAAA,EACF;AAGA,UAAQ,QAAQ,CAAC,WAAW;AAC1B,QAAI,OAAO,KAAK;AACd,aAAO,KAAK,OAAO,KAAK,OAAO,IAAI,sBAAsB,CAAC;AAAA,IAC5D;AAAA,EACF,CAAC;AAED,QAAM,kBAAkB,QAAQ,KAAK,CAAC,EAAE,YAAY,MAAM,WAAW;AAErE,SAAO;AAAA,IACL,IAAI,CAAC;AAAA,IACL,SAAS;AAAA,IACT,aAAa,QAAQ;AAAA,MAAQ,CAAC,EAAE,aAAa,UAAU,IAAI,MACzD,eAAe,MACX;AAAA,QACE;AAAA,UACE,MAAM;AAAA,UACN,aAAS,YAAAC,0BAAU,GAAG;AAAA,QACxB;AAAA,MACF,IACA,CAAC;AAAA,IACP;AAAA,EACF;AACF;AAEO,MAAM,wBAAwB,OACnC,MACA,WACgC;AAChC,MAAI;AACF,WAAO,MAAM,mBAAmB,MAAM,MAAM;AAAA,EAC9C,SAAS,KAAK;AACZ,WAAO,KAAK,iCAAiC;AAC7C,WAAO,WAAO,qBAAQ,GAAG,CAAC;AAE1B,WAAO;AAAA,MACL,IAAI;AAAA,MACJ,SAAS;AAAA,MACT,aAAa,CAAC;AAAA,IAChB;AAAA,EACF;AACF;",
|
|
6
6
|
"names": ["path", "packageManager", "stripAnsi"]
|
|
7
7
|
}
|
|
@@ -96,7 +96,7 @@ const upgradeSkuba = async (mode, logger) => {
|
|
|
96
96
|
}
|
|
97
97
|
logger.warn(
|
|
98
98
|
`skuba has patches to apply. Run ${logger.bold(
|
|
99
|
-
packageManager.exec,
|
|
99
|
+
packageManager.print.exec,
|
|
100
100
|
"skuba",
|
|
101
101
|
"format"
|
|
102
102
|
)} to run them. ${logger.dim("skuba-patches")}`
|
|
@@ -109,7 +109,7 @@ const upgradeSkuba = async (mode, logger) => {
|
|
|
109
109
|
// package.json as likely skuba version has changed
|
|
110
110
|
// TODO: locate the "skuba": {} config in the package.json and annotate on the version property
|
|
111
111
|
path: manifest.path,
|
|
112
|
-
message: `skuba has patches to apply. Run ${packageManager.exec} skuba format to run them.`
|
|
112
|
+
message: `skuba has patches to apply. Run ${packageManager.print.exec} skuba format to run them.`
|
|
113
113
|
}
|
|
114
114
|
]
|
|
115
115
|
};
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../../src/cli/lint/internalLints/upgrade/index.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport { readdir, writeFile } from 'fs-extra';\nimport type readPkgUp from 'read-pkg-up';\nimport { gte, sort } from 'semver';\n\nimport type { Logger } from '../../../../utils/logging';\nimport { getConsumerManifest } from '../../../../utils/manifest';\nimport {\n type PackageManagerConfig,\n detectPackageManager,\n} from '../../../../utils/packageManager';\nimport { getSkubaVersion } from '../../../../utils/version';\nimport { formatPackage } from '../../../configure/processing/package';\nimport type { SkubaPackageJson } from '../../../init/writePackageJson';\nimport type { InternalLintResult } from '../../internal';\n\nexport type Patches = Patch[];\nexport type Patch = {\n apply: PatchFunction;\n description: string;\n};\nexport type PatchReturnType =\n | { result: 'apply' }\n | { result: 'skip'; reason?: string };\n\nexport type PatchConfig = {\n mode: 'format' | 'lint';\n manifest: readPkgUp.NormalizedReadResult;\n packageManager: PackageManagerConfig;\n dir?: string;\n};\n\nexport type PatchFunction = (config: PatchConfig) => Promise<PatchReturnType>;\n\nconst getPatches = async (manifestVersion: string): Promise<Patches> => {\n const patches = await readdir(path.join(__dirname, 'patches'), {\n withFileTypes: true,\n });\n\n // The patches are sorted by the version they were added from.\n // Only return patches that are newer or equal to the current version.\n const patchesForVersion = sort(\n patches.flatMap((patch) =>\n // Is a directory rather than a JavaScript source file\n patch.isDirectory() &&\n // Has been added since the last patch run on the project\n gte(patch.name, manifestVersion)\n ? patch.name\n : [],\n ),\n );\n\n return (await Promise.all(patchesForVersion.map(resolvePatches))).flat();\n};\n\nconst fileExtensions = ['js', 'ts'];\n\n// Hack to allow our Jest environment/transform to resolve the patches\n// In normal scenarios this will resolve immediately after the .js import\nconst resolvePatches = async (version: string): Promise<Patches> => {\n for (const extension of fileExtensions) {\n try {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-return, @typescript-eslint/no-unsafe-member-access\n return (await import(`./patches/${version}/index.${extension}`)).patches;\n } catch {\n // Ignore\n }\n }\n throw new Error(`Could not resolve patches for ${version}`);\n};\n\nexport const upgradeSkuba = async (\n mode: 'lint' | 'format',\n logger: Logger,\n): Promise<InternalLintResult> => {\n const [currentVersion, manifest, packageManager] = await Promise.all([\n getSkubaVersion(),\n getConsumerManifest(),\n detectPackageManager(),\n ]);\n\n if (!manifest) {\n throw new Error('Could not find a package json for this project');\n }\n\n manifest.packageJson.skuba ??= { version: '1.0.0' };\n\n const manifestVersion = (manifest.packageJson.skuba as SkubaPackageJson)\n .version;\n\n // We are up to date, skip patches\n if (gte(manifestVersion, currentVersion)) {\n return { ok: true, fixable: false };\n }\n\n const patches = await getPatches(manifestVersion);\n // No patches to apply even if version out of date. Early exit to avoid unnecessary commits.\n if (patches.length === 0) {\n return { ok: true, fixable: false };\n }\n\n if (mode === 'lint') {\n const results = await Promise.all(\n patches.map(\n async ({ apply }) =>\n await apply({\n mode,\n manifest,\n packageManager,\n }),\n ),\n );\n\n // No patches are applicable. Early exit to avoid unnecessary commits.\n if (results.every(({ result }) => result === 'skip')) {\n return { ok: true, fixable: false };\n }\n\n logger.warn(\n `skuba has patches to apply. Run ${logger.bold(\n packageManager.exec,\n 'skuba',\n 'format',\n )} to run them. ${logger.dim('skuba-patches')}`,\n );\n\n return {\n ok: false,\n fixable: true,\n annotations: [\n {\n // package.json as likely skuba version has changed\n // TODO: locate the \"skuba\": {} config in the package.json and annotate on the version property\n path: manifest.path,\n message: `skuba has patches to apply. Run ${packageManager.exec} skuba format to run them.`,\n },\n ],\n };\n }\n\n logger.plain('Updating skuba...');\n\n // Run these in series in case a subsequent patch relies on a previous patch\n for (const { apply, description } of patches) {\n const result = await apply({\n mode,\n manifest,\n packageManager,\n });\n logger.newline();\n if (result.result === 'skip') {\n logger.plain(\n `Patch skipped: ${description}${\n result.reason ? ` - ${result.reason}` : ''\n }`,\n );\n } else {\n logger.plain(`Patch applied: ${description}`);\n }\n }\n\n (manifest.packageJson.skuba as SkubaPackageJson).version = currentVersion;\n\n const updatedPackageJson = await formatPackage(manifest.packageJson);\n\n await writeFile(manifest.path, updatedPackageJson);\n logger.newline();\n logger.plain('skuba update complete.');\n logger.newline();\n\n return {\n ok: true,\n fixable: false,\n };\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAmC;AAEnC,oBAA0B;AAG1B,sBAAoC;AACpC,4BAGO;AACP,qBAAgC;AAChC,qBAA8B;AAsB9B,MAAM,aAAa,OAAO,oBAA8C;AACtE,QAAM,UAAU,UAAM,yBAAQ,YAAAA,QAAK,KAAK,WAAW,SAAS,GAAG;AAAA,IAC7D,eAAe;AAAA,EACjB,CAAC;AAID,QAAM,wBAAoB;AAAA,IACxB,QAAQ;AAAA,MAAQ,CAAC;AAAA;AAAA,QAEf,MAAM,YAAY;AAAA,YAElB,mBAAI,MAAM,MAAM,eAAe,IAC3B,MAAM,OACN,CAAC;AAAA;AAAA,IACP;AAAA,EACF;AAEA,UAAQ,MAAM,QAAQ,IAAI,kBAAkB,IAAI,cAAc,CAAC,GAAG,KAAK;AACzE;AAEA,MAAM,iBAAiB,CAAC,MAAM,IAAI;AAIlC,MAAM,iBAAiB,OAAO,YAAsC;AAClE,aAAW,aAAa,gBAAgB;AACtC,QAAI;AAEF,cAAQ,MAAM,OAAO,aAAa,OAAO,UAAU,SAAS,KAAK;AAAA,IACnE,QAAQ;AAAA,IAER;AAAA,EACF;AACA,QAAM,IAAI,MAAM,iCAAiC,OAAO,EAAE;AAC5D;AAEO,MAAM,eAAe,OAC1B,MACA,WACgC;AAChC,QAAM,CAAC,gBAAgB,UAAU,cAAc,IAAI,MAAM,QAAQ,IAAI;AAAA,QACnE,gCAAgB;AAAA,QAChB,qCAAoB;AAAA,QACpB,4CAAqB;AAAA,EACvB,CAAC;AAED,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,MAAM,gDAAgD;AAAA,EAClE;AAEA,WAAS,YAAY,UAAU,EAAE,SAAS,QAAQ;AAElD,QAAM,kBAAmB,SAAS,YAAY,MAC3C;AAGH,UAAI,mBAAI,iBAAiB,cAAc,GAAG;AACxC,WAAO,EAAE,IAAI,MAAM,SAAS,MAAM;AAAA,EACpC;AAEA,QAAM,UAAU,MAAM,WAAW,eAAe;AAEhD,MAAI,QAAQ,WAAW,GAAG;AACxB,WAAO,EAAE,IAAI,MAAM,SAAS,MAAM;AAAA,EACpC;AAEA,MAAI,SAAS,QAAQ;AACnB,UAAM,UAAU,MAAM,QAAQ;AAAA,MAC5B,QAAQ;AAAA,QACN,OAAO,EAAE,MAAM,MACb,MAAM,MAAM;AAAA,UACV;AAAA,UACA;AAAA,UACA;AAAA,QACF,CAAC;AAAA,MACL;AAAA,IACF;AAGA,QAAI,QAAQ,MAAM,CAAC,EAAE,OAAO,MAAM,WAAW,MAAM,GAAG;AACpD,aAAO,EAAE,IAAI,MAAM,SAAS,MAAM;AAAA,IACpC;AAEA,WAAO;AAAA,MACL,mCAAmC,OAAO;AAAA,QACxC,eAAe;AAAA,
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport { readdir, writeFile } from 'fs-extra';\nimport type readPkgUp from 'read-pkg-up';\nimport { gte, sort } from 'semver';\n\nimport type { Logger } from '../../../../utils/logging';\nimport { getConsumerManifest } from '../../../../utils/manifest';\nimport {\n type PackageManagerConfig,\n detectPackageManager,\n} from '../../../../utils/packageManager';\nimport { getSkubaVersion } from '../../../../utils/version';\nimport { formatPackage } from '../../../configure/processing/package';\nimport type { SkubaPackageJson } from '../../../init/writePackageJson';\nimport type { InternalLintResult } from '../../internal';\n\nexport type Patches = Patch[];\nexport type Patch = {\n apply: PatchFunction;\n description: string;\n};\nexport type PatchReturnType =\n | { result: 'apply' }\n | { result: 'skip'; reason?: string };\n\nexport type PatchConfig = {\n mode: 'format' | 'lint';\n manifest: readPkgUp.NormalizedReadResult;\n packageManager: PackageManagerConfig;\n dir?: string;\n};\n\nexport type PatchFunction = (config: PatchConfig) => Promise<PatchReturnType>;\n\nconst getPatches = async (manifestVersion: string): Promise<Patches> => {\n const patches = await readdir(path.join(__dirname, 'patches'), {\n withFileTypes: true,\n });\n\n // The patches are sorted by the version they were added from.\n // Only return patches that are newer or equal to the current version.\n const patchesForVersion = sort(\n patches.flatMap((patch) =>\n // Is a directory rather than a JavaScript source file\n patch.isDirectory() &&\n // Has been added since the last patch run on the project\n gte(patch.name, manifestVersion)\n ? patch.name\n : [],\n ),\n );\n\n return (await Promise.all(patchesForVersion.map(resolvePatches))).flat();\n};\n\nconst fileExtensions = ['js', 'ts'];\n\n// Hack to allow our Jest environment/transform to resolve the patches\n// In normal scenarios this will resolve immediately after the .js import\nconst resolvePatches = async (version: string): Promise<Patches> => {\n for (const extension of fileExtensions) {\n try {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-return, @typescript-eslint/no-unsafe-member-access\n return (await import(`./patches/${version}/index.${extension}`)).patches;\n } catch {\n // Ignore\n }\n }\n throw new Error(`Could not resolve patches for ${version}`);\n};\n\nexport const upgradeSkuba = async (\n mode: 'lint' | 'format',\n logger: Logger,\n): Promise<InternalLintResult> => {\n const [currentVersion, manifest, packageManager] = await Promise.all([\n getSkubaVersion(),\n getConsumerManifest(),\n detectPackageManager(),\n ]);\n\n if (!manifest) {\n throw new Error('Could not find a package json for this project');\n }\n\n manifest.packageJson.skuba ??= { version: '1.0.0' };\n\n const manifestVersion = (manifest.packageJson.skuba as SkubaPackageJson)\n .version;\n\n // We are up to date, skip patches\n if (gte(manifestVersion, currentVersion)) {\n return { ok: true, fixable: false };\n }\n\n const patches = await getPatches(manifestVersion);\n // No patches to apply even if version out of date. Early exit to avoid unnecessary commits.\n if (patches.length === 0) {\n return { ok: true, fixable: false };\n }\n\n if (mode === 'lint') {\n const results = await Promise.all(\n patches.map(\n async ({ apply }) =>\n await apply({\n mode,\n manifest,\n packageManager,\n }),\n ),\n );\n\n // No patches are applicable. Early exit to avoid unnecessary commits.\n if (results.every(({ result }) => result === 'skip')) {\n return { ok: true, fixable: false };\n }\n\n logger.warn(\n `skuba has patches to apply. Run ${logger.bold(\n packageManager.print.exec,\n 'skuba',\n 'format',\n )} to run them. ${logger.dim('skuba-patches')}`,\n );\n\n return {\n ok: false,\n fixable: true,\n annotations: [\n {\n // package.json as likely skuba version has changed\n // TODO: locate the \"skuba\": {} config in the package.json and annotate on the version property\n path: manifest.path,\n message: `skuba has patches to apply. Run ${packageManager.print.exec} skuba format to run them.`,\n },\n ],\n };\n }\n\n logger.plain('Updating skuba...');\n\n // Run these in series in case a subsequent patch relies on a previous patch\n for (const { apply, description } of patches) {\n const result = await apply({\n mode,\n manifest,\n packageManager,\n });\n logger.newline();\n if (result.result === 'skip') {\n logger.plain(\n `Patch skipped: ${description}${\n result.reason ? ` - ${result.reason}` : ''\n }`,\n );\n } else {\n logger.plain(`Patch applied: ${description}`);\n }\n }\n\n (manifest.packageJson.skuba as SkubaPackageJson).version = currentVersion;\n\n const updatedPackageJson = await formatPackage(manifest.packageJson);\n\n await writeFile(manifest.path, updatedPackageJson);\n logger.newline();\n logger.plain('skuba update complete.');\n logger.newline();\n\n return {\n ok: true,\n fixable: false,\n };\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAmC;AAEnC,oBAA0B;AAG1B,sBAAoC;AACpC,4BAGO;AACP,qBAAgC;AAChC,qBAA8B;AAsB9B,MAAM,aAAa,OAAO,oBAA8C;AACtE,QAAM,UAAU,UAAM,yBAAQ,YAAAA,QAAK,KAAK,WAAW,SAAS,GAAG;AAAA,IAC7D,eAAe;AAAA,EACjB,CAAC;AAID,QAAM,wBAAoB;AAAA,IACxB,QAAQ;AAAA,MAAQ,CAAC;AAAA;AAAA,QAEf,MAAM,YAAY;AAAA,YAElB,mBAAI,MAAM,MAAM,eAAe,IAC3B,MAAM,OACN,CAAC;AAAA;AAAA,IACP;AAAA,EACF;AAEA,UAAQ,MAAM,QAAQ,IAAI,kBAAkB,IAAI,cAAc,CAAC,GAAG,KAAK;AACzE;AAEA,MAAM,iBAAiB,CAAC,MAAM,IAAI;AAIlC,MAAM,iBAAiB,OAAO,YAAsC;AAClE,aAAW,aAAa,gBAAgB;AACtC,QAAI;AAEF,cAAQ,MAAM,OAAO,aAAa,OAAO,UAAU,SAAS,KAAK;AAAA,IACnE,QAAQ;AAAA,IAER;AAAA,EACF;AACA,QAAM,IAAI,MAAM,iCAAiC,OAAO,EAAE;AAC5D;AAEO,MAAM,eAAe,OAC1B,MACA,WACgC;AAChC,QAAM,CAAC,gBAAgB,UAAU,cAAc,IAAI,MAAM,QAAQ,IAAI;AAAA,QACnE,gCAAgB;AAAA,QAChB,qCAAoB;AAAA,QACpB,4CAAqB;AAAA,EACvB,CAAC;AAED,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,MAAM,gDAAgD;AAAA,EAClE;AAEA,WAAS,YAAY,UAAU,EAAE,SAAS,QAAQ;AAElD,QAAM,kBAAmB,SAAS,YAAY,MAC3C;AAGH,UAAI,mBAAI,iBAAiB,cAAc,GAAG;AACxC,WAAO,EAAE,IAAI,MAAM,SAAS,MAAM;AAAA,EACpC;AAEA,QAAM,UAAU,MAAM,WAAW,eAAe;AAEhD,MAAI,QAAQ,WAAW,GAAG;AACxB,WAAO,EAAE,IAAI,MAAM,SAAS,MAAM;AAAA,EACpC;AAEA,MAAI,SAAS,QAAQ;AACnB,UAAM,UAAU,MAAM,QAAQ;AAAA,MAC5B,QAAQ;AAAA,QACN,OAAO,EAAE,MAAM,MACb,MAAM,MAAM;AAAA,UACV;AAAA,UACA;AAAA,UACA;AAAA,QACF,CAAC;AAAA,MACL;AAAA,IACF;AAGA,QAAI,QAAQ,MAAM,CAAC,EAAE,OAAO,MAAM,WAAW,MAAM,GAAG;AACpD,aAAO,EAAE,IAAI,MAAM,SAAS,MAAM;AAAA,IACpC;AAEA,WAAO;AAAA,MACL,mCAAmC,OAAO;AAAA,QACxC,eAAe,MAAM;AAAA,QACrB;AAAA,QACA;AAAA,MACF,CAAC,iBAAiB,OAAO,IAAI,eAAe,CAAC;AAAA,IAC/C;AAEA,WAAO;AAAA,MACL,IAAI;AAAA,MACJ,SAAS;AAAA,MACT,aAAa;AAAA,QACX;AAAA;AAAA;AAAA,UAGE,MAAM,SAAS;AAAA,UACf,SAAS,mCAAmC,eAAe,MAAM,IAAI;AAAA,QACvE;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO,MAAM,mBAAmB;AAGhC,aAAW,EAAE,OAAO,YAAY,KAAK,SAAS;AAC5C,UAAM,SAAS,MAAM,MAAM;AAAA,MACzB;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AACD,WAAO,QAAQ;AACf,QAAI,OAAO,WAAW,QAAQ;AAC5B,aAAO;AAAA,QACL,kBAAkB,WAAW,GAC3B,OAAO,SAAS,MAAM,OAAO,MAAM,KAAK,EAC1C;AAAA,MACF;AAAA,IACF,OAAO;AACL,aAAO,MAAM,kBAAkB,WAAW,EAAE;AAAA,IAC9C;AAAA,EACF;AAEA,EAAC,SAAS,YAAY,MAA2B,UAAU;AAE3D,QAAM,qBAAqB,UAAM,8BAAc,SAAS,WAAW;AAEnE,YAAM,2BAAU,SAAS,MAAM,kBAAkB;AACjD,SAAO,QAAQ;AACf,SAAO,MAAM,wBAAwB;AACrC,SAAO,QAAQ;AAEf,SAAO;AAAA,IACL,IAAI;AAAA,IACJ,SAAS;AAAA,EACX;AACF;",
|
|
6
6
|
"names": ["path"]
|
|
7
7
|
}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import { type ZodRawShape, z } from 'zod';
|
|
2
|
-
export declare const extractFromParentPackageJson: <T extends ZodRawShape>(schema: z.ZodObject<T
|
|
2
|
+
export declare const extractFromParentPackageJson: <T extends ZodRawShape>(schema: z.ZodObject<T>, currentPath: string) => Promise<{
|
|
3
3
|
packageJson: undefined;
|
|
4
4
|
packageJsonRelativePath: undefined;
|
|
5
5
|
} | {
|
|
@@ -9,6 +9,6 @@ export declare const extractFromParentPackageJson: <T extends ZodRawShape>(schem
|
|
|
9
9
|
packageJson: z.objectUtil.addQuestionMarks<z.baseObjectOutputType<T>, any> extends infer T_1 ? { [k in keyof T_1]: z.objectUtil.addQuestionMarks<z.baseObjectOutputType<T>, any>[k]; } : never;
|
|
10
10
|
packageJsonRelativePath: string;
|
|
11
11
|
}>;
|
|
12
|
-
export declare const isPatchableServerlessVersion: () => Promise<boolean>;
|
|
13
|
-
export declare const isPatchableSkubaType: () => Promise<boolean>;
|
|
14
|
-
export declare const isPatchableNodeVersion: (targetNodeVersion: number) => Promise<boolean>;
|
|
12
|
+
export declare const isPatchableServerlessVersion: (currentPath: string) => Promise<boolean>;
|
|
13
|
+
export declare const isPatchableSkubaType: (currentPath: string) => Promise<boolean>;
|
|
14
|
+
export declare const isPatchableNodeVersion: (targetNodeVersion: number, currentPath: string) => Promise<boolean>;
|
|
@@ -39,8 +39,8 @@ var import_fs_extra = __toESM(require("fs-extra"));
|
|
|
39
39
|
var import_semver = require("semver");
|
|
40
40
|
var import_zod = require("zod");
|
|
41
41
|
var import_logging = require("../../../utils/logging");
|
|
42
|
-
const getParentFile = async (file) => {
|
|
43
|
-
const path = await (0, import_find_up.default)(file, { cwd
|
|
42
|
+
const getParentFile = async (file, cwd = process.cwd()) => {
|
|
43
|
+
const path = await (0, import_find_up.default)(file, { cwd });
|
|
44
44
|
if (!path) {
|
|
45
45
|
return void 0;
|
|
46
46
|
}
|
|
@@ -49,8 +49,8 @@ const getParentFile = async (file) => {
|
|
|
49
49
|
path
|
|
50
50
|
};
|
|
51
51
|
};
|
|
52
|
-
const extractFromParentPackageJson = async (schema) => {
|
|
53
|
-
const file = await getParentFile("package.json");
|
|
52
|
+
const extractFromParentPackageJson = async (schema, currentPath) => {
|
|
53
|
+
const file = await getParentFile("package.json", currentPath);
|
|
54
54
|
if (!file) {
|
|
55
55
|
return { packageJson: void 0, packageJsonRelativePath: void 0 };
|
|
56
56
|
}
|
|
@@ -67,17 +67,18 @@ const extractFromParentPackageJson = async (schema) => {
|
|
|
67
67
|
}
|
|
68
68
|
return { packageJson: result.data, packageJsonRelativePath: path };
|
|
69
69
|
};
|
|
70
|
-
const isPatchableServerlessVersion = async () => {
|
|
70
|
+
const isPatchableServerlessVersion = async (currentPath) => {
|
|
71
71
|
const { packageJson, packageJsonRelativePath } = await extractFromParentPackageJson(
|
|
72
72
|
import_zod.z.object({
|
|
73
73
|
devDependencies: import_zod.z.object({
|
|
74
74
|
serverless: import_zod.z.string().optional()
|
|
75
75
|
})
|
|
76
|
-
})
|
|
76
|
+
}),
|
|
77
|
+
currentPath
|
|
77
78
|
);
|
|
78
79
|
if (!packageJson) {
|
|
79
80
|
throw new Error(
|
|
80
|
-
|
|
81
|
+
`package.json not found in ${currentPath}, ensure it is in the correct location`
|
|
81
82
|
);
|
|
82
83
|
}
|
|
83
84
|
const serverlessVersion = packageJson?.devDependencies.serverless;
|
|
@@ -98,18 +99,19 @@ const isPatchableServerlessVersion = async () => {
|
|
|
98
99
|
);
|
|
99
100
|
return true;
|
|
100
101
|
};
|
|
101
|
-
const isPatchableSkubaType = async () => {
|
|
102
|
+
const isPatchableSkubaType = async (currentPath) => {
|
|
102
103
|
const { packageJson, packageJsonRelativePath } = await extractFromParentPackageJson(
|
|
103
104
|
import_zod.z.object({
|
|
104
105
|
skuba: import_zod.z.object({
|
|
105
106
|
type: import_zod.z.string().optional()
|
|
106
|
-
}),
|
|
107
|
+
}).optional(),
|
|
107
108
|
files: import_zod.z.string().array().optional()
|
|
108
|
-
})
|
|
109
|
+
}),
|
|
110
|
+
currentPath
|
|
109
111
|
);
|
|
110
112
|
if (!packageJson) {
|
|
111
113
|
throw new Error(
|
|
112
|
-
|
|
114
|
+
`package.json not found in ${currentPath}, ensure it is in the correct location`
|
|
113
115
|
);
|
|
114
116
|
}
|
|
115
117
|
if (packageJson.files) {
|
|
@@ -118,7 +120,7 @@ const isPatchableSkubaType = async () => {
|
|
|
118
120
|
);
|
|
119
121
|
return false;
|
|
120
122
|
}
|
|
121
|
-
const type = packageJson?.skuba
|
|
123
|
+
const type = packageJson?.skuba?.type;
|
|
122
124
|
if (!type) {
|
|
123
125
|
import_logging.log.warn(
|
|
124
126
|
`skuba project type not found in ${packageJsonRelativePath}; add a package.json#/skuba/type to ensure the correct migration can be applied`
|
|
@@ -134,7 +136,7 @@ const isPatchableSkubaType = async () => {
|
|
|
134
136
|
import_logging.log.ok(`Proceeding with migration of skuba project type ${type}`);
|
|
135
137
|
return true;
|
|
136
138
|
};
|
|
137
|
-
const isPatchableNodeVersion = async (targetNodeVersion) => {
|
|
139
|
+
const isPatchableNodeVersion = async (targetNodeVersion, currentPath) => {
|
|
138
140
|
const nvmrcFile = await getParentFile(".nvmrc");
|
|
139
141
|
const nodeVersionFile = await getParentFile(".node-version");
|
|
140
142
|
const { packageJson } = await extractFromParentPackageJson(
|
|
@@ -142,7 +144,8 @@ const isPatchableNodeVersion = async (targetNodeVersion) => {
|
|
|
142
144
|
engines: import_zod.z.object({
|
|
143
145
|
node: import_zod.z.string()
|
|
144
146
|
})
|
|
145
|
-
})
|
|
147
|
+
}),
|
|
148
|
+
currentPath
|
|
146
149
|
);
|
|
147
150
|
const nvmrcNodeVersion = nvmrcFile?.fileContent;
|
|
148
151
|
const nodeVersion = nodeVersionFile?.fileContent;
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../src/cli/migrate/nodeVersion/checks.ts"],
|
|
4
|
-
"sourcesContent": ["import findUp from 'find-up';\nimport fs from 'fs-extra';\nimport { coerce, lte, satisfies } from 'semver';\nimport { type ZodRawShape, z } from 'zod';\n\nimport { log } from '../../../utils/logging';\n\nconst getParentFile = async (file: string) => {\n const path = await findUp(file, { cwd
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAmB;AACnB,sBAAe;AACf,oBAAuC;AACvC,iBAAoC;AAEpC,qBAAoB;AAEpB,MAAM,gBAAgB,OAAO,
|
|
4
|
+
"sourcesContent": ["import findUp from 'find-up';\nimport fs from 'fs-extra';\nimport { coerce, lte, satisfies } from 'semver';\nimport { type ZodRawShape, z } from 'zod';\n\nimport { log } from '../../../utils/logging';\n\nconst getParentFile = async (file: string, cwd: string = process.cwd()) => {\n const path = await findUp(file, { cwd });\n if (!path) {\n return undefined;\n }\n return {\n fileContent: await fs.readFile(path, 'utf-8'),\n path,\n };\n};\n\nexport const extractFromParentPackageJson = async <T extends ZodRawShape>(\n schema: z.ZodObject<T>,\n currentPath: string,\n) => {\n const file = await getParentFile('package.json', currentPath);\n if (!file) {\n return { packageJson: undefined, packageJsonRelativePath: undefined };\n }\n const { fileContent: packageJson, path } = file;\n let rawJSON;\n try {\n rawJSON = JSON.parse(packageJson) as unknown;\n } catch {\n throw new Error(`${path} is not valid JSON`);\n }\n const result = schema.safeParse(rawJSON);\n if (!result.success) {\n return { packageJson: undefined, packageJsonRelativePath: path };\n }\n\n return { packageJson: result.data, packageJsonRelativePath: path };\n};\n\nexport const isPatchableServerlessVersion = async (\n currentPath: string,\n): Promise<boolean> => {\n const { packageJson, packageJsonRelativePath } =\n await extractFromParentPackageJson(\n z.object({\n devDependencies: z.object({\n serverless: z.string().optional(),\n }),\n }),\n currentPath,\n );\n if (!packageJson) {\n throw new Error(\n `package.json not found in ${currentPath}, ensure it is in the correct location`,\n );\n }\n\n const serverlessVersion = packageJson?.devDependencies.serverless;\n\n if (!serverlessVersion) {\n log.subtle(\n `Serverless version not found in ${packageJsonRelativePath}, assuming it is not a dependency`,\n );\n return true;\n }\n\n if (!satisfies(serverlessVersion, '4.x.x')) {\n log.warn(\n `Serverless version ${serverlessVersion} cannot be migrated; use Serverless 4.x to automatically migrate Serverless files`,\n );\n return false;\n }\n\n log.ok(\n `Proceeding with migration of Serverless version ${serverlessVersion}`,\n );\n return true;\n};\n\nexport const isPatchableSkubaType = async (\n currentPath: string,\n): Promise<boolean> => {\n const { packageJson, packageJsonRelativePath } =\n await extractFromParentPackageJson(\n z.object({\n skuba: z\n .object({\n type: z.string().optional(),\n })\n .optional(),\n files: z.string().array().optional(),\n }),\n currentPath,\n );\n\n if (!packageJson) {\n throw new Error(\n `package.json not found in ${currentPath}, ensure it is in the correct location`,\n );\n }\n\n if (packageJson.files) {\n log.warn(\n 'Migrations are not supported for packages; update manually to ensure major runtime deprecations are intended',\n );\n return false;\n }\n\n const type = packageJson?.skuba?.type;\n\n if (!type) {\n log.warn(\n `skuba project type not found in ${packageJsonRelativePath}; add a package.json#/skuba/type to ensure the correct migration can be applied`,\n );\n return false;\n }\n if (type === 'package') {\n log.warn(\n 'Migrations are not supported for packages; update manually to ensure major runtime deprecations are intended',\n );\n return false;\n }\n\n log.ok(`Proceeding with migration of skuba project type ${type}`);\n return true;\n};\n\nexport const isPatchableNodeVersion = async (\n targetNodeVersion: number,\n currentPath: string,\n): Promise<boolean> => {\n const nvmrcFile = await getParentFile('.nvmrc');\n const nodeVersionFile = await getParentFile('.node-version');\n const { packageJson } = await extractFromParentPackageJson(\n z.object({\n engines: z.object({\n node: z.string(),\n }),\n }),\n currentPath,\n );\n\n const nvmrcNodeVersion = nvmrcFile?.fileContent;\n const nodeVersion = nodeVersionFile?.fileContent;\n const engineVersion = packageJson?.engines.node;\n\n const currentNodeVersion = nvmrcNodeVersion || nodeVersion || engineVersion;\n\n const coercedTargetVersion = coerce(targetNodeVersion.toString())?.version;\n const coercedCurrentVersion = coerce(currentNodeVersion)?.version;\n\n const isNodeVersionValid =\n coercedTargetVersion &&\n coercedCurrentVersion &&\n lte(coercedCurrentVersion, coercedTargetVersion);\n\n if (!isNodeVersionValid) {\n log.warn(\n `Node.js version ${coercedCurrentVersion ?? 'unknown'} cannot be migrated to ${coercedTargetVersion}`,\n );\n return false;\n }\n\n log.ok(\n `Proceeding with migration from Node.js ${coercedCurrentVersion} to ${coercedTargetVersion}`,\n );\n return true;\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAmB;AACnB,sBAAe;AACf,oBAAuC;AACvC,iBAAoC;AAEpC,qBAAoB;AAEpB,MAAM,gBAAgB,OAAO,MAAc,MAAc,QAAQ,IAAI,MAAM;AACzE,QAAM,OAAO,UAAM,eAAAA,SAAO,MAAM,EAAE,IAAI,CAAC;AACvC,MAAI,CAAC,MAAM;AACT,WAAO;AAAA,EACT;AACA,SAAO;AAAA,IACL,aAAa,MAAM,gBAAAC,QAAG,SAAS,MAAM,OAAO;AAAA,IAC5C;AAAA,EACF;AACF;AAEO,MAAM,+BAA+B,OAC1C,QACA,gBACG;AACH,QAAM,OAAO,MAAM,cAAc,gBAAgB,WAAW;AAC5D,MAAI,CAAC,MAAM;AACT,WAAO,EAAE,aAAa,QAAW,yBAAyB,OAAU;AAAA,EACtE;AACA,QAAM,EAAE,aAAa,aAAa,KAAK,IAAI;AAC3C,MAAI;AACJ,MAAI;AACF,cAAU,KAAK,MAAM,WAAW;AAAA,EAClC,QAAQ;AACN,UAAM,IAAI,MAAM,GAAG,IAAI,oBAAoB;AAAA,EAC7C;AACA,QAAM,SAAS,OAAO,UAAU,OAAO;AACvC,MAAI,CAAC,OAAO,SAAS;AACnB,WAAO,EAAE,aAAa,QAAW,yBAAyB,KAAK;AAAA,EACjE;AAEA,SAAO,EAAE,aAAa,OAAO,MAAM,yBAAyB,KAAK;AACnE;AAEO,MAAM,+BAA+B,OAC1C,gBACqB;AACrB,QAAM,EAAE,aAAa,wBAAwB,IAC3C,MAAM;AAAA,IACJ,aAAE,OAAO;AAAA,MACP,iBAAiB,aAAE,OAAO;AAAA,QACxB,YAAY,aAAE,OAAO,EAAE,SAAS;AAAA,MAClC,CAAC;AAAA,IACH,CAAC;AAAA,IACD;AAAA,EACF;AACF,MAAI,CAAC,aAAa;AAChB,UAAM,IAAI;AAAA,MACR,6BAA6B,WAAW;AAAA,IAC1C;AAAA,EACF;AAEA,QAAM,oBAAoB,aAAa,gBAAgB;AAEvD,MAAI,CAAC,mBAAmB;AACtB,uBAAI;AAAA,MACF,mCAAmC,uBAAuB;AAAA,IAC5D;AACA,WAAO;AAAA,EACT;AAEA,MAAI,KAAC,yBAAU,mBAAmB,OAAO,GAAG;AAC1C,uBAAI;AAAA,MACF,sBAAsB,iBAAiB;AAAA,IACzC;AACA,WAAO;AAAA,EACT;AAEA,qBAAI;AAAA,IACF,mDAAmD,iBAAiB;AAAA,EACtE;AACA,SAAO;AACT;AAEO,MAAM,uBAAuB,OAClC,gBACqB;AACrB,QAAM,EAAE,aAAa,wBAAwB,IAC3C,MAAM;AAAA,IACJ,aAAE,OAAO;AAAA,MACP,OAAO,aACJ,OAAO;AAAA,QACN,MAAM,aAAE,OAAO,EAAE,SAAS;AAAA,MAC5B,CAAC,EACA,SAAS;AAAA,MACZ,OAAO,aAAE,OAAO,EAAE,MAAM,EAAE,SAAS;AAAA,IACrC,CAAC;AAAA,IACD;AAAA,EACF;AAEF,MAAI,CAAC,aAAa;AAChB,UAAM,IAAI;AAAA,MACR,6BAA6B,WAAW;AAAA,IAC1C;AAAA,EACF;AAEA,MAAI,YAAY,OAAO;AACrB,uBAAI;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAEA,QAAM,OAAO,aAAa,OAAO;AAEjC,MAAI,CAAC,MAAM;AACT,uBAAI;AAAA,MACF,mCAAmC,uBAAuB;AAAA,IAC5D;AACA,WAAO;AAAA,EACT;AACA,MAAI,SAAS,WAAW;AACtB,uBAAI;AAAA,MACF;AAAA,IACF;AACA,WAAO;AAAA,EACT;AAEA,qBAAI,GAAG,mDAAmD,IAAI,EAAE;AAChE,SAAO;AACT;AAEO,MAAM,yBAAyB,OACpC,mBACA,gBACqB;AACrB,QAAM,YAAY,MAAM,cAAc,QAAQ;AAC9C,QAAM,kBAAkB,MAAM,cAAc,eAAe;AAC3D,QAAM,EAAE,YAAY,IAAI,MAAM;AAAA,IAC5B,aAAE,OAAO;AAAA,MACP,SAAS,aAAE,OAAO;AAAA,QAChB,MAAM,aAAE,OAAO;AAAA,MACjB,CAAC;AAAA,IACH,CAAC;AAAA,IACD;AAAA,EACF;AAEA,QAAM,mBAAmB,WAAW;AACpC,QAAM,cAAc,iBAAiB;AACrC,QAAM,gBAAgB,aAAa,QAAQ;AAE3C,QAAM,qBAAqB,oBAAoB,eAAe;AAE9D,QAAM,2BAAuB,sBAAO,kBAAkB,SAAS,CAAC,GAAG;AACnE,QAAM,4BAAwB,sBAAO,kBAAkB,GAAG;AAE1D,QAAM,qBACJ,wBACA,6BACA,mBAAI,uBAAuB,oBAAoB;AAEjD,MAAI,CAAC,oBAAoB;AACvB,uBAAI;AAAA,MACF,mBAAmB,yBAAyB,SAAS,0BAA0B,oBAAoB;AAAA,IACrG;AACA,WAAO;AAAA,EACT;AAEA,qBAAI;AAAA,IACF,0CAA0C,qBAAqB,OAAO,oBAAoB;AAAA,EAC5F;AACA,SAAO;AACT;",
|
|
6
6
|
"names": ["findUp", "fs"]
|
|
7
7
|
}
|
|
@@ -134,7 +134,9 @@ const runSubPatch = async (dir, patch) => {
|
|
|
134
134
|
return;
|
|
135
135
|
}
|
|
136
136
|
if (patch.tests) {
|
|
137
|
-
const results = await Promise.all(
|
|
137
|
+
const results = await Promise.all(
|
|
138
|
+
patch.tests.map((test) => test(path))
|
|
139
|
+
);
|
|
138
140
|
if (!results.every(Boolean)) {
|
|
139
141
|
return;
|
|
140
142
|
}
|
|
@@ -169,7 +171,7 @@ const nodeVersionMigration = async ({
|
|
|
169
171
|
}, dir = process.cwd()) => {
|
|
170
172
|
import_logging.log.ok(`Upgrading to Node.js ${nodeVersion}`);
|
|
171
173
|
try {
|
|
172
|
-
if (!await (0, import_checks.isPatchableNodeVersion)(nodeVersion)) {
|
|
174
|
+
if (!await (0, import_checks.isPatchableNodeVersion)(nodeVersion, dir)) {
|
|
173
175
|
throw new Error("Node.js version is not patchable");
|
|
174
176
|
}
|
|
175
177
|
const { version: nodeTypesVersion, err } = await (0, import_getNodeTypesVersion.getNodeTypesVersion)(
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../src/cli/migrate/nodeVersion/index.ts"],
|
|
4
|
-
"sourcesContent": ["import { inspect } from 'util';\n\nimport { glob } from 'fast-glob';\nimport fs from 'fs-extra';\n\nimport { log } from '../../../utils/logging';\nimport { relock } from '../../../utils/packageManager';\nimport { createDestinationFileReader } from '../../configure/analysis/project';\n\nimport {\n isPatchableNodeVersion,\n isPatchableServerlessVersion,\n isPatchableSkubaType,\n} from './checks';\nimport { getNodeTypesVersion } from './getNodeTypesVersion';\n\ntype FileSelector =\n | { files: string; file?: never }\n | { file: string; files?: never };\n\ntype SubPatch = FileSelector & {\n tests?: Array<() => Promise<boolean>>;\n regex?: RegExp;\n replace: string;\n};\n\nconst subPatches = ({\n nodeVersion,\n nodeTypesVersion,\n ECMAScriptVersion,\n}: Versions): SubPatch[] => [\n { file: '.nvmrc', replace: `${nodeVersion}\\n` },\n {\n files: '**/Dockerfile*',\n\n regex:\n /^FROM(.*) (public.ecr.aws\\/docker\\/library\\/)?node:([0-9]+(?:\\.[0-9]+(?:\\.[0-9]+)?)?)(-[a-z0-9]+)?(@sha256:[a-f0-9]{64})?( .*)?$/gm,\n replace: `FROM$1 $2node:${nodeVersion}$4$6`,\n },\n {\n files: '**/Dockerfile*',\n regex:\n /^FROM(.*) gcr.io\\/distroless\\/nodejs\\d+-debian(\\d+)(@sha256:[a-f0-9]{64})?(\\.[^- \\n]+)?(-[^ \\n]+)?( .+|)$/gm,\n replace: `FROM$1 gcr.io/distroless/nodejs${nodeVersion}-debian$2$4$5$6`,\n },\n\n {\n files: '**/serverless*.y*ml',\n regex: /\\bnodejs\\d+.x\\b/gm,\n tests: [isPatchableServerlessVersion],\n replace: `nodejs${nodeVersion}.x`,\n },\n {\n files: '**/serverless*.y*ml',\n regex: /\\bnode\\d+\\b/gm,\n tests: [isPatchableServerlessVersion],\n replace: `node${nodeVersion}`,\n },\n\n {\n files: '**/infra/**/*.ts',\n regex: /NODEJS_\\d+_X/g,\n replace: `NODEJS_${nodeVersion}_X`,\n },\n {\n files: '**/infra/**/*.ts',\n regex: /(target:\\s*'node)(\\d+)(.+)$/gm,\n replace: `$1${nodeVersion}$3`,\n },\n\n {\n files: '**/.buildkite/*',\n regex:\n /(image: )(public.ecr.aws\\/docker\\/library\\/)?(node:)[0-9.]+(\\.[^- \\n]+)?(-[^ \\n]+)?$/gm,\n replace: `$1$2$3${nodeVersion}$5`,\n },\n {\n files: '.node-version*',\n regex: /(\\d+(?:\\.\\d+)*)/g,\n replace: `${nodeVersion}`,\n },\n\n {\n files: '**/package.json',\n regex: /(\"@types\\/node\":\\s*\")(\\^)?(\\d+\\.\\d+\\.\\d+)(\")/gm,\n tests: [isPatchableServerlessVersion],\n replace: `$1$2${nodeTypesVersion}$4`,\n },\n {\n files: '**/package.json',\n regex:\n /([\"']engines[\"']:\\s*{[\\s\\S]*?[\"']node[\"']:\\s*[\"']>=)(\\d+(?:\\.\\d+)*)(['\"]\\s*})/gm,\n tests: [isPatchableServerlessVersion, isPatchableSkubaType],\n replace: `$1${nodeVersion}$3`,\n },\n\n {\n files: '**/tsconfig*.json',\n regex: /(\"target\":\\s*\")(ES\\d+)\"/gim,\n tests: [isPatchableServerlessVersion, isPatchableSkubaType],\n replace: `$1${ECMAScriptVersion}\"`,\n },\n {\n files: '**/tsconfig*.json',\n regex: /(\"lib\":\\s*\\[)([\\S\\s]*?)(ES\\d+)([\\S\\s]*?)(\\])/gim,\n tests: [isPatchableServerlessVersion, isPatchableSkubaType],\n replace: `$1$2${ECMAScriptVersion}$4$5`,\n },\n\n {\n files: '**/docker-compose*.y*ml',\n regex:\n /(image: )(public.ecr.aws\\/docker\\/library\\/)?(node:)[0-9.]+(\\.[^- \\n]+)?(-[^ \\n]+)?$/gm,\n\n replace: `$1$2$3${nodeVersion}$5`,\n },\n];\n\ntype Versions = {\n nodeVersion: number;\n nodeTypesVersion: string;\n ECMAScriptVersion: string;\n};\n\nconst runSubPatch = async (dir: string, patch: SubPatch) => {\n const readFile = createDestinationFileReader(dir);\n const paths = patch.file\n ? [patch.file]\n : await glob(patch.files ?? [], { cwd: dir });\n\n await Promise.all(\n paths.map(async (path) => {\n if (path.includes('node_modules')) {\n return;\n }\n const contents = await readFile(path);\n if (!contents) {\n return;\n }\n\n if (patch.regex && !patch.regex.test(contents)) {\n return;\n }\n\n if (patch.tests) {\n const results = await Promise.all(patch.tests.map((test) => test()));\n if (!results.every(Boolean)) {\n return;\n }\n }\n\n await writePatchedContents({\n path,\n contents,\n templated: patch.replace,\n regex: patch.regex,\n });\n }),\n );\n};\n\nconst writePatchedContents = async ({\n path,\n contents,\n templated,\n regex,\n}: {\n path: string;\n contents: string;\n templated: string;\n regex?: RegExp;\n}) =>\n await fs.promises.writeFile(\n path,\n regex ? contents.replaceAll(regex, templated) : templated,\n );\n\nconst upgrade = async (versions: Versions, dir: string) => {\n for (const subPatch of subPatches(versions)) {\n await runSubPatch(dir, subPatch);\n }\n};\n\nexport const nodeVersionMigration = async (\n {\n nodeVersion,\n ECMAScriptVersion,\n defaultNodeTypesVersion,\n }: {\n nodeVersion: number;\n ECMAScriptVersion: string;\n defaultNodeTypesVersion: string;\n },\n dir = process.cwd(),\n) => {\n log.ok(`Upgrading to Node.js ${nodeVersion}`);\n try {\n if (!(await isPatchableNodeVersion(nodeVersion))) {\n throw new Error('Node.js version is not patchable');\n }\n\n const { version: nodeTypesVersion, err } = await getNodeTypesVersion(\n nodeVersion,\n defaultNodeTypesVersion,\n );\n if (err) {\n log.warn(err);\n }\n await upgrade({ nodeVersion, nodeTypesVersion, ECMAScriptVersion }, dir);\n await relock(dir);\n\n log.ok('Upgraded to Node.js', nodeVersion);\n } catch (error) {\n log.err('Failed to upgrade');\n log.subtle(inspect(error));\n process.exitCode = 1;\n }\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,uBAAqB;AACrB,sBAAe;AAEf,qBAAoB;AACpB,4BAAuB;AACvB,qBAA4C;AAE5C,oBAIO;AACP,iCAAoC;AAYpC,MAAM,aAAa,CAAC;AAAA,EAClB;AAAA,EACA;AAAA,EACA;AACF,MAA4B;AAAA,EAC1B,EAAE,MAAM,UAAU,SAAS,GAAG,WAAW;AAAA,EAAK;AAAA,EAC9C;AAAA,IACE,OAAO;AAAA,IAEP,OACE;AAAA,IACF,SAAS,iBAAiB,WAAW;AAAA,EACvC;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OACE;AAAA,IACF,SAAS,kCAAkC,WAAW;AAAA,EACxD;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO,CAAC,0CAA4B;AAAA,IACpC,SAAS,SAAS,WAAW;AAAA,EAC/B;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO,CAAC,0CAA4B;AAAA,IACpC,SAAS,OAAO,WAAW;AAAA,EAC7B;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,SAAS,UAAU,WAAW;AAAA,EAChC;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,SAAS,KAAK,WAAW;AAAA,EAC3B;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OACE;AAAA,IACF,SAAS,SAAS,WAAW;AAAA,EAC/B;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,SAAS,GAAG,WAAW;AAAA,EACzB;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO,CAAC,0CAA4B;AAAA,IACpC,SAAS,OAAO,gBAAgB;AAAA,EAClC;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OACE;AAAA,IACF,OAAO,CAAC,4CAA8B,kCAAoB;AAAA,IAC1D,SAAS,KAAK,WAAW;AAAA,EAC3B;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO,CAAC,4CAA8B,kCAAoB;AAAA,IAC1D,SAAS,KAAK,iBAAiB;AAAA,EACjC;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO,CAAC,4CAA8B,kCAAoB;AAAA,IAC1D,SAAS,OAAO,iBAAiB;AAAA,EACnC;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OACE;AAAA,IAEF,SAAS,SAAS,WAAW;AAAA,EAC/B;AACF;AAQA,MAAM,cAAc,OAAO,KAAa,UAAoB;AAC1D,QAAM,eAAW,4CAA4B,GAAG;AAChD,QAAM,QAAQ,MAAM,OAChB,CAAC,MAAM,IAAI,IACX,UAAM,uBAAK,MAAM,SAAS,CAAC,GAAG,EAAE,KAAK,IAAI,CAAC;AAE9C,QAAM,QAAQ;AAAA,IACZ,MAAM,IAAI,OAAO,SAAS;AACxB,UAAI,KAAK,SAAS,cAAc,GAAG;AACjC;AAAA,MACF;AACA,YAAM,WAAW,MAAM,SAAS,IAAI;AACpC,UAAI,CAAC,UAAU;AACb;AAAA,MACF;AAEA,UAAI,MAAM,SAAS,CAAC,MAAM,MAAM,KAAK,QAAQ,GAAG;AAC9C;AAAA,MACF;AAEA,UAAI,MAAM,OAAO;AACf,cAAM,UAAU,MAAM,QAAQ,
|
|
4
|
+
"sourcesContent": ["import { inspect } from 'util';\n\nimport { glob } from 'fast-glob';\nimport fs from 'fs-extra';\n\nimport { log } from '../../../utils/logging';\nimport { relock } from '../../../utils/packageManager';\nimport { createDestinationFileReader } from '../../configure/analysis/project';\n\nimport {\n isPatchableNodeVersion,\n isPatchableServerlessVersion,\n isPatchableSkubaType,\n} from './checks';\nimport { getNodeTypesVersion } from './getNodeTypesVersion';\n\ntype FileSelector =\n | { files: string; file?: never }\n | { file: string; files?: never };\n\ntype SubPatch = FileSelector & {\n tests?: Array<(path: string) => Promise<boolean>>;\n regex?: RegExp;\n replace: string;\n};\n\nconst subPatches = ({\n nodeVersion,\n nodeTypesVersion,\n ECMAScriptVersion,\n}: Versions): SubPatch[] => [\n { file: '.nvmrc', replace: `${nodeVersion}\\n` },\n {\n files: '**/Dockerfile*',\n\n regex:\n /^FROM(.*) (public.ecr.aws\\/docker\\/library\\/)?node:([0-9]+(?:\\.[0-9]+(?:\\.[0-9]+)?)?)(-[a-z0-9]+)?(@sha256:[a-f0-9]{64})?( .*)?$/gm,\n replace: `FROM$1 $2node:${nodeVersion}$4$6`,\n },\n {\n files: '**/Dockerfile*',\n regex:\n /^FROM(.*) gcr.io\\/distroless\\/nodejs\\d+-debian(\\d+)(@sha256:[a-f0-9]{64})?(\\.[^- \\n]+)?(-[^ \\n]+)?( .+|)$/gm,\n replace: `FROM$1 gcr.io/distroless/nodejs${nodeVersion}-debian$2$4$5$6`,\n },\n\n {\n files: '**/serverless*.y*ml',\n regex: /\\bnodejs\\d+.x\\b/gm,\n tests: [isPatchableServerlessVersion],\n replace: `nodejs${nodeVersion}.x`,\n },\n {\n files: '**/serverless*.y*ml',\n regex: /\\bnode\\d+\\b/gm,\n tests: [isPatchableServerlessVersion],\n replace: `node${nodeVersion}`,\n },\n\n {\n files: '**/infra/**/*.ts',\n regex: /NODEJS_\\d+_X/g,\n replace: `NODEJS_${nodeVersion}_X`,\n },\n {\n files: '**/infra/**/*.ts',\n regex: /(target:\\s*'node)(\\d+)(.+)$/gm,\n replace: `$1${nodeVersion}$3`,\n },\n\n {\n files: '**/.buildkite/*',\n regex:\n /(image: )(public.ecr.aws\\/docker\\/library\\/)?(node:)[0-9.]+(\\.[^- \\n]+)?(-[^ \\n]+)?$/gm,\n replace: `$1$2$3${nodeVersion}$5`,\n },\n {\n files: '.node-version*',\n regex: /(\\d+(?:\\.\\d+)*)/g,\n replace: `${nodeVersion}`,\n },\n\n {\n files: '**/package.json',\n regex: /(\"@types\\/node\":\\s*\")(\\^)?(\\d+\\.\\d+\\.\\d+)(\")/gm,\n tests: [isPatchableServerlessVersion],\n replace: `$1$2${nodeTypesVersion}$4`,\n },\n {\n files: '**/package.json',\n regex:\n /([\"']engines[\"']:\\s*{[\\s\\S]*?[\"']node[\"']:\\s*[\"']>=)(\\d+(?:\\.\\d+)*)(['\"]\\s*})/gm,\n tests: [isPatchableServerlessVersion, isPatchableSkubaType],\n replace: `$1${nodeVersion}$3`,\n },\n\n {\n files: '**/tsconfig*.json',\n regex: /(\"target\":\\s*\")(ES\\d+)\"/gim,\n tests: [isPatchableServerlessVersion, isPatchableSkubaType],\n replace: `$1${ECMAScriptVersion}\"`,\n },\n {\n files: '**/tsconfig*.json',\n regex: /(\"lib\":\\s*\\[)([\\S\\s]*?)(ES\\d+)([\\S\\s]*?)(\\])/gim,\n tests: [isPatchableServerlessVersion, isPatchableSkubaType],\n replace: `$1$2${ECMAScriptVersion}$4$5`,\n },\n\n {\n files: '**/docker-compose*.y*ml',\n regex:\n /(image: )(public.ecr.aws\\/docker\\/library\\/)?(node:)[0-9.]+(\\.[^- \\n]+)?(-[^ \\n]+)?$/gm,\n\n replace: `$1$2$3${nodeVersion}$5`,\n },\n];\n\ntype Versions = {\n nodeVersion: number;\n nodeTypesVersion: string;\n ECMAScriptVersion: string;\n};\n\nconst runSubPatch = async (dir: string, patch: SubPatch) => {\n const readFile = createDestinationFileReader(dir);\n const paths = patch.file\n ? [patch.file]\n : await glob(patch.files ?? [], { cwd: dir });\n\n await Promise.all(\n paths.map(async (path) => {\n if (path.includes('node_modules')) {\n return;\n }\n const contents = await readFile(path);\n if (!contents) {\n return;\n }\n\n if (patch.regex && !patch.regex.test(contents)) {\n return;\n }\n\n if (patch.tests) {\n const results = await Promise.all(\n patch.tests.map((test) => test(path)),\n );\n if (!results.every(Boolean)) {\n return;\n }\n }\n\n await writePatchedContents({\n path,\n contents,\n templated: patch.replace,\n regex: patch.regex,\n });\n }),\n );\n};\n\nconst writePatchedContents = async ({\n path,\n contents,\n templated,\n regex,\n}: {\n path: string;\n contents: string;\n templated: string;\n regex?: RegExp;\n}) =>\n await fs.promises.writeFile(\n path,\n regex ? contents.replaceAll(regex, templated) : templated,\n );\n\nconst upgrade = async (versions: Versions, dir: string) => {\n for (const subPatch of subPatches(versions)) {\n await runSubPatch(dir, subPatch);\n }\n};\n\nexport const nodeVersionMigration = async (\n {\n nodeVersion,\n ECMAScriptVersion,\n defaultNodeTypesVersion,\n }: {\n nodeVersion: number;\n ECMAScriptVersion: string;\n defaultNodeTypesVersion: string;\n },\n dir = process.cwd(),\n) => {\n log.ok(`Upgrading to Node.js ${nodeVersion}`);\n try {\n if (!(await isPatchableNodeVersion(nodeVersion, dir))) {\n throw new Error('Node.js version is not patchable');\n }\n\n const { version: nodeTypesVersion, err } = await getNodeTypesVersion(\n nodeVersion,\n defaultNodeTypesVersion,\n );\n if (err) {\n log.warn(err);\n }\n await upgrade({ nodeVersion, nodeTypesVersion, ECMAScriptVersion }, dir);\n await relock(dir);\n\n log.ok('Upgraded to Node.js', nodeVersion);\n } catch (error) {\n log.err('Failed to upgrade');\n log.subtle(inspect(error));\n process.exitCode = 1;\n }\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,uBAAqB;AACrB,sBAAe;AAEf,qBAAoB;AACpB,4BAAuB;AACvB,qBAA4C;AAE5C,oBAIO;AACP,iCAAoC;AAYpC,MAAM,aAAa,CAAC;AAAA,EAClB;AAAA,EACA;AAAA,EACA;AACF,MAA4B;AAAA,EAC1B,EAAE,MAAM,UAAU,SAAS,GAAG,WAAW;AAAA,EAAK;AAAA,EAC9C;AAAA,IACE,OAAO;AAAA,IAEP,OACE;AAAA,IACF,SAAS,iBAAiB,WAAW;AAAA,EACvC;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OACE;AAAA,IACF,SAAS,kCAAkC,WAAW;AAAA,EACxD;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO,CAAC,0CAA4B;AAAA,IACpC,SAAS,SAAS,WAAW;AAAA,EAC/B;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO,CAAC,0CAA4B;AAAA,IACpC,SAAS,OAAO,WAAW;AAAA,EAC7B;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,SAAS,UAAU,WAAW;AAAA,EAChC;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,SAAS,KAAK,WAAW;AAAA,EAC3B;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OACE;AAAA,IACF,SAAS,SAAS,WAAW;AAAA,EAC/B;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,SAAS,GAAG,WAAW;AAAA,EACzB;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO,CAAC,0CAA4B;AAAA,IACpC,SAAS,OAAO,gBAAgB;AAAA,EAClC;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OACE;AAAA,IACF,OAAO,CAAC,4CAA8B,kCAAoB;AAAA,IAC1D,SAAS,KAAK,WAAW;AAAA,EAC3B;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO,CAAC,4CAA8B,kCAAoB;AAAA,IAC1D,SAAS,KAAK,iBAAiB;AAAA,EACjC;AAAA,EACA;AAAA,IACE,OAAO;AAAA,IACP,OAAO;AAAA,IACP,OAAO,CAAC,4CAA8B,kCAAoB;AAAA,IAC1D,SAAS,OAAO,iBAAiB;AAAA,EACnC;AAAA,EAEA;AAAA,IACE,OAAO;AAAA,IACP,OACE;AAAA,IAEF,SAAS,SAAS,WAAW;AAAA,EAC/B;AACF;AAQA,MAAM,cAAc,OAAO,KAAa,UAAoB;AAC1D,QAAM,eAAW,4CAA4B,GAAG;AAChD,QAAM,QAAQ,MAAM,OAChB,CAAC,MAAM,IAAI,IACX,UAAM,uBAAK,MAAM,SAAS,CAAC,GAAG,EAAE,KAAK,IAAI,CAAC;AAE9C,QAAM,QAAQ;AAAA,IACZ,MAAM,IAAI,OAAO,SAAS;AACxB,UAAI,KAAK,SAAS,cAAc,GAAG;AACjC;AAAA,MACF;AACA,YAAM,WAAW,MAAM,SAAS,IAAI;AACpC,UAAI,CAAC,UAAU;AACb;AAAA,MACF;AAEA,UAAI,MAAM,SAAS,CAAC,MAAM,MAAM,KAAK,QAAQ,GAAG;AAC9C;AAAA,MACF;AAEA,UAAI,MAAM,OAAO;AACf,cAAM,UAAU,MAAM,QAAQ;AAAA,UAC5B,MAAM,MAAM,IAAI,CAAC,SAAS,KAAK,IAAI,CAAC;AAAA,QACtC;AACA,YAAI,CAAC,QAAQ,MAAM,OAAO,GAAG;AAC3B;AAAA,QACF;AAAA,MACF;AAEA,YAAM,qBAAqB;AAAA,QACzB;AAAA,QACA;AAAA,QACA,WAAW,MAAM;AAAA,QACjB,OAAO,MAAM;AAAA,MACf,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AACF;AAEA,MAAM,uBAAuB,OAAO;AAAA,EAClC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAME,MAAM,gBAAAA,QAAG,SAAS;AAAA,EAChB;AAAA,EACA,QAAQ,SAAS,WAAW,OAAO,SAAS,IAAI;AAClD;AAEF,MAAM,UAAU,OAAO,UAAoB,QAAgB;AACzD,aAAW,YAAY,WAAW,QAAQ,GAAG;AAC3C,UAAM,YAAY,KAAK,QAAQ;AAAA,EACjC;AACF;AAEO,MAAM,uBAAuB,OAClC;AAAA,EACE;AAAA,EACA;AAAA,EACA;AACF,GAKA,MAAM,QAAQ,IAAI,MACf;AACH,qBAAI,GAAG,wBAAwB,WAAW,EAAE;AAC5C,MAAI;AACF,QAAI,CAAE,UAAM,sCAAuB,aAAa,GAAG,GAAI;AACrD,YAAM,IAAI,MAAM,kCAAkC;AAAA,IACpD;AAEA,UAAM,EAAE,SAAS,kBAAkB,IAAI,IAAI,UAAM;AAAA,MAC/C;AAAA,MACA;AAAA,IACF;AACA,QAAI,KAAK;AACP,yBAAI,KAAK,GAAG;AAAA,IACd;AACA,UAAM,QAAQ,EAAE,aAAa,kBAAkB,kBAAkB,GAAG,GAAG;AACvE,cAAM,8BAAO,GAAG;AAEhB,uBAAI,GAAG,uBAAuB,WAAW;AAAA,EAC3C,SAAS,OAAO;AACd,uBAAI,IAAI,mBAAmB;AAC3B,uBAAI,WAAO,qBAAQ,KAAK,CAAC;AACzB,YAAQ,WAAW;AAAA,EACrB;AACF;",
|
|
6
6
|
"names": ["fs"]
|
|
7
7
|
}
|
package/lib/utils/logo.js
CHANGED
|
@@ -58,7 +58,9 @@ const showLogoAndVersionInfo = async () => {
|
|
|
58
58
|
import_logging.log.warn("Your skuba installation is out of date.");
|
|
59
59
|
import_logging.log.warn("Consider upgrading:");
|
|
60
60
|
import_logging.log.newline();
|
|
61
|
-
import_logging.log.warn(
|
|
61
|
+
import_logging.log.warn(
|
|
62
|
+
import_logging.log.bold(packageManager.print.update, `skuba@${versionInfo.latest}`)
|
|
63
|
+
);
|
|
62
64
|
import_logging.log.newline();
|
|
63
65
|
}
|
|
64
66
|
return versionInfo;
|
package/lib/utils/logo.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/utils/logo.ts"],
|
|
4
|
-
"sourcesContent": ["import chalk from 'chalk';\n\nimport { log } from './logging';\nimport { detectPackageManager } from './packageManager';\nimport { getSkubaVersionInfo } from './version';\n\nconst LOGO = chalk.blueBright(`\n \u256D\u2500\u256E ${chalk.magentaBright(' ')}\u256D\u2500\u256E\n\u256D\u2500\u2500\u2500\u2502 \u2570\u2500${chalk.magentaBright('\u256D\u2500\u252C\u2500\u256E')} \u2570\u2500\u256E\u2500\u2500\u2500\u256E\n\u2502_ \u2500\u2524 <${chalk.magentaBright('\u2502 \u2575 \u2502')} \u2022 \u2502 \u2022 \u2502\n\u2570\u2500\u2500\u2500\u2570\u2500\u2534\u2500${chalk.magentaBright('\u2570\u2500\u2500\u2500\u256F')}\u2500\u2500\u2500\u256F\u2500\u2500 \u2570\n`);\n\nexport const showLogoAndVersionInfo = async () => {\n const [versionInfo, packageManager] = await Promise.all([\n getSkubaVersionInfo(),\n detectPackageManager(),\n ]);\n\n log.plain(LOGO);\n log.subtle(\n log.bold(versionInfo.local),\n '|',\n 'latest',\n log.bold(versionInfo.latest ?? 'offline \u2708'),\n );\n log.newline();\n\n if (versionInfo.isStale) {\n log.warn('Your skuba installation is out of date.');\n log.warn('Consider upgrading:');\n log.newline();\n log.warn(log.bold(packageManager.update, `skuba@${versionInfo.latest}`));\n log.newline();\n }\n\n return versionInfo;\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAkB;AAElB,qBAAoB;AACpB,4BAAqC;AACrC,qBAAoC;AAEpC,MAAM,OAAO,aAAAA,QAAM,WAAW;AAAA,yBACpB,aAAAA,QAAM,cAAc,MAAM,CAAC;AAAA,6CAC3B,aAAAA,QAAM,cAAc,gCAAO,CAAC;AAAA,yBAC5B,aAAAA,QAAM,cAAc,sBAAO,CAAC;AAAA,kDAC5B,aAAAA,QAAM,cAAc,gCAAO,CAAC;AAAA,CACrC;AAEM,MAAM,yBAAyB,YAAY;AAChD,QAAM,CAAC,aAAa,cAAc,IAAI,MAAM,QAAQ,IAAI;AAAA,QACtD,oCAAoB;AAAA,QACpB,4CAAqB;AAAA,EACvB,CAAC;AAED,qBAAI,MAAM,IAAI;AACd,qBAAI;AAAA,IACF,mBAAI,KAAK,YAAY,KAAK;AAAA,IAC1B;AAAA,IACA;AAAA,IACA,mBAAI,KAAK,YAAY,UAAU,gBAAW;AAAA,EAC5C;AACA,qBAAI,QAAQ;AAEZ,MAAI,YAAY,SAAS;AACvB,uBAAI,KAAK,yCAAyC;AAClD,uBAAI,KAAK,qBAAqB;AAC9B,uBAAI,QAAQ;AACZ,uBAAI,
|
|
4
|
+
"sourcesContent": ["import chalk from 'chalk';\n\nimport { log } from './logging';\nimport { detectPackageManager } from './packageManager';\nimport { getSkubaVersionInfo } from './version';\n\nconst LOGO = chalk.blueBright(`\n \u256D\u2500\u256E ${chalk.magentaBright(' ')}\u256D\u2500\u256E\n\u256D\u2500\u2500\u2500\u2502 \u2570\u2500${chalk.magentaBright('\u256D\u2500\u252C\u2500\u256E')} \u2570\u2500\u256E\u2500\u2500\u2500\u256E\n\u2502_ \u2500\u2524 <${chalk.magentaBright('\u2502 \u2575 \u2502')} \u2022 \u2502 \u2022 \u2502\n\u2570\u2500\u2500\u2500\u2570\u2500\u2534\u2500${chalk.magentaBright('\u2570\u2500\u2500\u2500\u256F')}\u2500\u2500\u2500\u256F\u2500\u2500 \u2570\n`);\n\nexport const showLogoAndVersionInfo = async () => {\n const [versionInfo, packageManager] = await Promise.all([\n getSkubaVersionInfo(),\n detectPackageManager(),\n ]);\n\n log.plain(LOGO);\n log.subtle(\n log.bold(versionInfo.local),\n '|',\n 'latest',\n log.bold(versionInfo.latest ?? 'offline \u2708'),\n );\n log.newline();\n\n if (versionInfo.isStale) {\n log.warn('Your skuba installation is out of date.');\n log.warn('Consider upgrading:');\n log.newline();\n log.warn(\n log.bold(packageManager.print.update, `skuba@${versionInfo.latest}`),\n );\n log.newline();\n }\n\n return versionInfo;\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAkB;AAElB,qBAAoB;AACpB,4BAAqC;AACrC,qBAAoC;AAEpC,MAAM,OAAO,aAAAA,QAAM,WAAW;AAAA,yBACpB,aAAAA,QAAM,cAAc,MAAM,CAAC;AAAA,6CAC3B,aAAAA,QAAM,cAAc,gCAAO,CAAC;AAAA,yBAC5B,aAAAA,QAAM,cAAc,sBAAO,CAAC;AAAA,kDAC5B,aAAAA,QAAM,cAAc,gCAAO,CAAC;AAAA,CACrC;AAEM,MAAM,yBAAyB,YAAY;AAChD,QAAM,CAAC,aAAa,cAAc,IAAI,MAAM,QAAQ,IAAI;AAAA,QACtD,oCAAoB;AAAA,QACpB,4CAAqB;AAAA,EACvB,CAAC;AAED,qBAAI,MAAM,IAAI;AACd,qBAAI;AAAA,IACF,mBAAI,KAAK,YAAY,KAAK;AAAA,IAC1B;AAAA,IACA;AAAA,IACA,mBAAI,KAAK,YAAY,UAAU,gBAAW;AAAA,EAC5C;AACA,qBAAI,QAAQ;AAEZ,MAAI,YAAY,SAAS;AACvB,uBAAI,KAAK,yCAAyC;AAClD,uBAAI,KAAK,qBAAqB;AAC9B,uBAAI,QAAQ;AACZ,uBAAI;AAAA,MACF,mBAAI,KAAK,eAAe,MAAM,QAAQ,SAAS,YAAY,MAAM,EAAE;AAAA,IACrE;AACA,uBAAI,QAAQ;AAAA,EACd;AAEA,SAAO;AACT;",
|
|
6
6
|
"names": ["chalk"]
|
|
7
7
|
}
|
|
@@ -5,21 +5,23 @@ export type PackageManagerConfig = (typeof PACKAGE_MANAGERS)[keyof typeof PACKAG
|
|
|
5
5
|
};
|
|
6
6
|
declare const PACKAGE_MANAGERS: {
|
|
7
7
|
pnpm: {
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
8
|
+
print: {
|
|
9
|
+
exec: string;
|
|
10
|
+
runSilent: string;
|
|
11
|
+
update: string;
|
|
12
|
+
};
|
|
12
13
|
};
|
|
13
14
|
yarn: {
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
15
|
+
print: {
|
|
16
|
+
exec: string;
|
|
17
|
+
runSilent: string;
|
|
18
|
+
update: string;
|
|
19
|
+
};
|
|
18
20
|
};
|
|
19
21
|
};
|
|
20
22
|
export declare const configForPackageManager: (packageManager: PackageManager) => PackageManagerConfig;
|
|
21
23
|
export declare const detectPackageManager: (cwd?: string) => Promise<PackageManagerConfig>;
|
|
22
|
-
export declare const relock: (cwd?: string) => Promise<
|
|
24
|
+
export declare const relock: (cwd?: string) => Promise<import("execa").ExecaReturnValue<string> | undefined>;
|
|
23
25
|
export type PackageManager = z.infer<typeof packageManagerSchema>;
|
|
24
26
|
export declare const packageManagerSchema: z.ZodDefault<z.ZodEnum<["pnpm", "yarn"]>>;
|
|
25
27
|
export {};
|
|
@@ -43,16 +43,18 @@ var import_logging = require("./logging");
|
|
|
43
43
|
const DEFAULT_PACKAGE_MANAGER = "yarn";
|
|
44
44
|
const PACKAGE_MANAGERS = {
|
|
45
45
|
pnpm: {
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
46
|
+
print: {
|
|
47
|
+
exec: "pnpm exec",
|
|
48
|
+
runSilent: "pnpm --silent run",
|
|
49
|
+
update: import_is_installed_globally.default ? "pnpm update --global" : "pnpm update"
|
|
50
|
+
}
|
|
50
51
|
},
|
|
51
52
|
yarn: {
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
53
|
+
print: {
|
|
54
|
+
exec: "yarn",
|
|
55
|
+
runSilent: "yarn -s",
|
|
56
|
+
update: import_is_installed_globally.default ? "yarn global upgrade" : "yarn upgrade"
|
|
57
|
+
}
|
|
56
58
|
}
|
|
57
59
|
};
|
|
58
60
|
const configForPackageManager = (packageManager) => ({
|
|
@@ -93,9 +95,20 @@ const relock = async (cwd) => {
|
|
|
93
95
|
const packageManager = await detectPackageManager(cwd);
|
|
94
96
|
const exec = (0, import_exec.createExec)({
|
|
95
97
|
stdio: "pipe",
|
|
96
|
-
streamStdio: packageManager.command
|
|
98
|
+
streamStdio: packageManager.command,
|
|
99
|
+
cwd
|
|
97
100
|
});
|
|
98
|
-
|
|
101
|
+
if (packageManager.command === "pnpm") {
|
|
102
|
+
await exec(
|
|
103
|
+
packageManager.command,
|
|
104
|
+
"install",
|
|
105
|
+
"--prefer-offline",
|
|
106
|
+
"--lockfile-only",
|
|
107
|
+
"--frozen-lockfile=false"
|
|
108
|
+
);
|
|
109
|
+
return;
|
|
110
|
+
}
|
|
111
|
+
return await exec(packageManager.command, "install");
|
|
99
112
|
};
|
|
100
113
|
const findDepth = async (filename, cwd) => {
|
|
101
114
|
const path = await (0, import_find_up.default)(filename, { cwd });
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/utils/packageManager.ts"],
|
|
4
|
-
"sourcesContent": ["import findUp from 'find-up';\nimport isInstalledGlobally from 'is-installed-globally';\nimport { z } from 'zod';\n\nimport { createExec } from './exec';\nimport { log } from './logging';\n\n// TODO: consider changing to this to `pnpm` in a future major version.\nexport const DEFAULT_PACKAGE_MANAGER = 'yarn';\n\nexport type PackageManagerConfig =\n (typeof PACKAGE_MANAGERS)[keyof typeof PACKAGE_MANAGERS] & {\n command: PackageManager;\n };\n\nconst PACKAGE_MANAGERS = {\n pnpm: {\n
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAmB;AACnB,mCAAgC;AAChC,iBAAkB;AAElB,kBAA2B;AAC3B,qBAAoB;AAGb,MAAM,0BAA0B;AAOvC,MAAM,mBAAmB;AAAA,EACvB,MAAM;AAAA,IACJ,
|
|
4
|
+
"sourcesContent": ["import findUp from 'find-up';\nimport isInstalledGlobally from 'is-installed-globally';\nimport { z } from 'zod';\n\nimport { createExec } from './exec';\nimport { log } from './logging';\n\n// TODO: consider changing to this to `pnpm` in a future major version.\nexport const DEFAULT_PACKAGE_MANAGER = 'yarn';\n\nexport type PackageManagerConfig =\n (typeof PACKAGE_MANAGERS)[keyof typeof PACKAGE_MANAGERS] & {\n command: PackageManager;\n };\n\nconst PACKAGE_MANAGERS = {\n pnpm: {\n print: {\n exec: 'pnpm exec',\n runSilent: 'pnpm --silent run',\n update: isInstalledGlobally ? 'pnpm update --global' : 'pnpm update',\n },\n },\n yarn: {\n print: {\n exec: 'yarn',\n runSilent: 'yarn -s',\n update: isInstalledGlobally ? 'yarn global upgrade' : 'yarn upgrade',\n },\n },\n};\n\nexport const configForPackageManager = (\n packageManager: PackageManager,\n): PackageManagerConfig => ({\n ...PACKAGE_MANAGERS[packageManager],\n command: packageManager,\n});\n\nexport const detectPackageManager = async (\n cwd?: string,\n): Promise<PackageManagerConfig> => {\n let packageManager: PackageManager = DEFAULT_PACKAGE_MANAGER;\n\n try {\n const [yarnDepth, pnpmDepth] = await Promise.all([\n findDepth('yarn.lock', cwd),\n findDepth('pnpm-lock.yaml', cwd),\n ]);\n\n if (yarnDepth === undefined && pnpmDepth === undefined) {\n throw new Error('No package manager lockfile found.');\n }\n\n packageManager = (pnpmDepth ?? -1) > (yarnDepth ?? -1) ? 'pnpm' : 'yarn';\n } catch (err) {\n log.warn(\n `Failed to detect package manager; defaulting to ${log.bold(\n DEFAULT_PACKAGE_MANAGER,\n )}.`,\n );\n log.subtle(\n (() => {\n switch (true) {\n case err instanceof Error:\n return err.message;\n\n default:\n return String(err);\n }\n })(),\n );\n }\n\n return configForPackageManager(packageManager);\n};\n\nexport const relock = async (cwd?: string) => {\n const packageManager = await detectPackageManager(cwd);\n const exec = createExec({\n stdio: 'pipe',\n streamStdio: packageManager.command,\n cwd,\n });\n\n if (packageManager.command === 'pnpm') {\n await exec(\n packageManager.command,\n 'install',\n '--prefer-offline',\n '--lockfile-only',\n '--frozen-lockfile=false',\n );\n return;\n }\n\n return await exec(packageManager.command, 'install');\n};\n\nconst findDepth = async (filename: string, cwd?: string) => {\n const path = await findUp(filename, { cwd });\n return path ? path.split('/').length : undefined;\n};\n\nexport type PackageManager = z.infer<typeof packageManagerSchema>;\n\nexport const packageManagerSchema = z\n .enum(['pnpm', 'yarn'])\n .default(DEFAULT_PACKAGE_MANAGER);\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAmB;AACnB,mCAAgC;AAChC,iBAAkB;AAElB,kBAA2B;AAC3B,qBAAoB;AAGb,MAAM,0BAA0B;AAOvC,MAAM,mBAAmB;AAAA,EACvB,MAAM;AAAA,IACJ,OAAO;AAAA,MACL,MAAM;AAAA,MACN,WAAW;AAAA,MACX,QAAQ,6BAAAA,UAAsB,yBAAyB;AAAA,IACzD;AAAA,EACF;AAAA,EACA,MAAM;AAAA,IACJ,OAAO;AAAA,MACL,MAAM;AAAA,MACN,WAAW;AAAA,MACX,QAAQ,6BAAAA,UAAsB,wBAAwB;AAAA,IACxD;AAAA,EACF;AACF;AAEO,MAAM,0BAA0B,CACrC,oBAC0B;AAAA,EAC1B,GAAG,iBAAiB,cAAc;AAAA,EAClC,SAAS;AACX;AAEO,MAAM,uBAAuB,OAClC,QACkC;AAClC,MAAI,iBAAiC;AAErC,MAAI;AACF,UAAM,CAAC,WAAW,SAAS,IAAI,MAAM,QAAQ,IAAI;AAAA,MAC/C,UAAU,aAAa,GAAG;AAAA,MAC1B,UAAU,kBAAkB,GAAG;AAAA,IACjC,CAAC;AAED,QAAI,cAAc,UAAa,cAAc,QAAW;AACtD,YAAM,IAAI,MAAM,oCAAoC;AAAA,IACtD;AAEA,sBAAkB,aAAa,OAAO,aAAa,MAAM,SAAS;AAAA,EACpE,SAAS,KAAK;AACZ,uBAAI;AAAA,MACF,mDAAmD,mBAAI;AAAA,QACrD;AAAA,MACF,CAAC;AAAA,IACH;AACA,uBAAI;AAAA,OACD,MAAM;AACL,gBAAQ,MAAM;AAAA,UACZ,KAAK,eAAe;AAClB,mBAAO,IAAI;AAAA,UAEb;AACE,mBAAO,OAAO,GAAG;AAAA,QACrB;AAAA,MACF,GAAG;AAAA,IACL;AAAA,EACF;AAEA,SAAO,wBAAwB,cAAc;AAC/C;AAEO,MAAM,SAAS,OAAO,QAAiB;AAC5C,QAAM,iBAAiB,MAAM,qBAAqB,GAAG;AACrD,QAAM,WAAO,wBAAW;AAAA,IACtB,OAAO;AAAA,IACP,aAAa,eAAe;AAAA,IAC5B;AAAA,EACF,CAAC;AAED,MAAI,eAAe,YAAY,QAAQ;AACrC,UAAM;AAAA,MACJ,eAAe;AAAA,MACf;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AACA;AAAA,EACF;AAEA,SAAO,MAAM,KAAK,eAAe,SAAS,SAAS;AACrD;AAEA,MAAM,YAAY,OAAO,UAAkB,QAAiB;AAC1D,QAAM,OAAO,UAAM,eAAAC,SAAO,UAAU,EAAE,IAAI,CAAC;AAC3C,SAAO,OAAO,KAAK,MAAM,GAAG,EAAE,SAAS;AACzC;AAIO,MAAM,uBAAuB,aACjC,KAAK,CAAC,QAAQ,MAAM,CAAC,EACrB,QAAQ,uBAAuB;",
|
|
6
6
|
"names": ["isInstalledGlobally", "findUp"]
|
|
7
7
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "skuba",
|
|
3
|
-
"version": "10.0.
|
|
3
|
+
"version": "10.0.2-aaron-test-20250315070234",
|
|
4
4
|
"private": false,
|
|
5
5
|
"description": "SEEK development toolkit for backend applications and packages",
|
|
6
6
|
"homepage": "https://github.com/seek-oss/skuba#readme",
|
|
@@ -17,9 +17,9 @@
|
|
|
17
17
|
},
|
|
18
18
|
"devDependencies": {
|
|
19
19
|
"@types/node": "^22.13.10",
|
|
20
|
-
"skuba": "10.0.
|
|
20
|
+
"skuba": "10.0.2-aaron-test-20250315070234"
|
|
21
21
|
},
|
|
22
|
-
"packageManager": "pnpm@10.6.
|
|
22
|
+
"packageManager": "pnpm@10.6.3",
|
|
23
23
|
"engines": {
|
|
24
24
|
"node": ">=22"
|
|
25
25
|
}
|
|
@@ -36,9 +36,9 @@
|
|
|
36
36
|
"constructs": "^10.0.17",
|
|
37
37
|
"datadog-cdk-constructs-v2": "^2.0.0",
|
|
38
38
|
"pino-pretty": "^13.0.0",
|
|
39
|
-
"skuba": "10.0.
|
|
39
|
+
"skuba": "10.0.2-aaron-test-20250315070234"
|
|
40
40
|
},
|
|
41
|
-
"packageManager": "pnpm@10.6.
|
|
41
|
+
"packageManager": "pnpm@10.6.3",
|
|
42
42
|
"engines": {
|
|
43
43
|
"node": ">=22"
|
|
44
44
|
}
|