skuba 12.1.0-no-sync-in-promise-iterable-20250801105434 → 12.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -2
- package/config/tsconfig.json +3 -2
- package/lib/cli/build/assets.js +1 -1
- package/lib/cli/build/assets.js.map +2 -2
- package/lib/cli/build/tsc.d.ts +5 -1
- package/lib/cli/build/tsc.js +12 -0
- package/lib/cli/build/tsc.js.map +3 -3
- package/lib/cli/configure/analyseDependencies.d.ts +2 -2
- package/lib/cli/configure/analyseDependencies.js.map +1 -1
- package/lib/cli/configure/analysis/package.d.ts +1 -1
- package/lib/cli/configure/analysis/package.js +1 -1
- package/lib/cli/configure/analysis/package.js.map +2 -2
- package/lib/cli/configure/ensureTemplateCompletion.d.ts +2 -2
- package/lib/cli/configure/ensureTemplateCompletion.js.map +1 -1
- package/lib/cli/configure/getEntryPoint.d.ts +2 -2
- package/lib/cli/configure/getEntryPoint.js.map +1 -1
- package/lib/cli/configure/getProjectType.d.ts +2 -2
- package/lib/cli/configure/getProjectType.js.map +1 -1
- package/lib/cli/configure/processing/package.js +8 -2
- package/lib/cli/configure/processing/package.js.map +2 -2
- package/lib/cli/init/getConfig.js +1 -1
- package/lib/cli/init/getConfig.js.map +2 -2
- package/lib/cli/lint/internal.js +1 -1
- package/lib/cli/lint/internal.js.map +2 -2
- package/lib/cli/lint/internalLints/upgrade/index.d.ts +2 -2
- package/lib/cli/lint/internalLints/upgrade/index.js.map +1 -1
- package/lib/cli/lint/internalLints/upgrade/patches/12.0.2/unhandledRejections.js +1 -1
- package/lib/cli/lint/internalLints/upgrade/patches/12.0.2/unhandledRejections.js.map +1 -1
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/upgradeESLint.js +1 -1
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/upgradeESLint.js.map +2 -2
- package/lib/cli/node/index.js +6 -0
- package/lib/cli/node/index.js.map +2 -2
- package/lib/cli/start/index.js +6 -0
- package/lib/cli/start/index.js.map +2 -2
- package/lib/cli/test/index.d.ts +1 -1
- package/lib/cli/test/index.js +18 -4
- package/lib/cli/test/index.js.map +2 -2
- package/lib/utils/args.d.ts +2 -0
- package/lib/utils/args.js +5 -0
- package/lib/utils/args.js.map +2 -2
- package/lib/utils/manifest.d.ts +1 -1
- package/lib/utils/manifest.js +1 -1
- package/lib/utils/manifest.js.map +2 -2
- package/package.json +13 -13
- package/template/base/_pnpm-workspace.yaml +1 -0
- package/template/base/jest.setup.ts +1 -1
- package/template/express-rest-api/.buildkite/pipeline.yml +6 -0
- package/template/express-rest-api/.env +1 -1
- package/template/express-rest-api/.gantry/dev.yml +5 -1
- package/template/express-rest-api/.gantry/prod.yml +5 -1
- package/template/express-rest-api/Dockerfile +1 -1
- package/template/express-rest-api/README.md +5 -5
- package/template/express-rest-api/gantry.apply.yml +17 -1
- package/template/express-rest-api/package.json +11 -5
- package/template/express-rest-api/src/api/healthCheck.ts +2 -2
- package/template/express-rest-api/src/config.ts +7 -7
- package/template/express-rest-api/src/framework/logging.ts +11 -7
- package/template/express-rest-api/src/framework/metrics.ts +1 -1
- package/template/express-rest-api/src/tracing.ts +56 -0
- package/template/greeter/README.md +2 -2
- package/template/greeter/package.json +2 -2
- package/template/koa-rest-api/.buildkite/pipeline.yml +6 -0
- package/template/koa-rest-api/.env +1 -1
- package/template/koa-rest-api/.gantry/dev.yml +3 -3
- package/template/koa-rest-api/.gantry/prod.yml +3 -3
- package/template/koa-rest-api/README.md +6 -6
- package/template/koa-rest-api/gantry.apply.yml +15 -3
- package/template/koa-rest-api/package.json +5 -6
- package/template/koa-rest-api/src/api/healthCheck.ts +2 -2
- package/template/koa-rest-api/src/config.ts +7 -7
- package/template/koa-rest-api/src/framework/logging.ts +12 -8
- package/template/koa-rest-api/src/framework/metrics.ts +1 -1
- package/template/koa-rest-api/src/framework/server.test.ts +7 -8
- package/template/koa-rest-api/src/framework/server.ts +1 -4
- package/template/lambda-sqs-worker-cdk/.buildkite/pipeline.yml +6 -2
- package/template/lambda-sqs-worker-cdk/.env +1 -1
- package/template/lambda-sqs-worker-cdk/README.md +8 -8
- package/template/lambda-sqs-worker-cdk/infra/__snapshots__/appStack.test.ts.snap +36 -10
- package/template/lambda-sqs-worker-cdk/infra/appStack.test.ts +5 -8
- package/template/lambda-sqs-worker-cdk/infra/appStack.ts +11 -5
- package/template/lambda-sqs-worker-cdk/infra/config.ts +27 -18
- package/template/lambda-sqs-worker-cdk/infra/index.ts +1 -1
- package/template/lambda-sqs-worker-cdk/package.json +6 -6
- package/template/lambda-sqs-worker-cdk/src/app.test.ts +4 -4
- package/template/lambda-sqs-worker-cdk/src/config.ts +11 -16
- package/template/lambda-sqs-worker-cdk/src/framework/handler.test.ts +2 -2
- package/template/lambda-sqs-worker-cdk/src/framework/handler.ts +8 -21
- package/template/lambda-sqs-worker-cdk/src/framework/logging.ts +12 -8
- package/template/lambda-sqs-worker-cdk/src/framework/metrics.ts +1 -4
- package/template/oss-npm-package/.github/workflows/release.yml +1 -1
- package/template/oss-npm-package/.github/workflows/validate.yml +1 -1
package/README.md
CHANGED
|
@@ -12,11 +12,10 @@
|
|
|
12
12
|
- Write in [TypeScript]
|
|
13
13
|
- Enforce coding standards with [ESLint] and [Prettier]
|
|
14
14
|
- Test with [Jest]
|
|
15
|
-
- Deploy with [Gantry]
|
|
15
|
+
- Deploy with [Gantry] or the [AWS CDK]
|
|
16
16
|
|
|
17
17
|
[aws cdk]: https://docs.aws.amazon.com/cdk/latest/guide/work-with-cdk-typescript.html
|
|
18
18
|
[gantry]: https://backstage.myseek.xyz/docs/default/component/gantry/
|
|
19
|
-
[serverless]: https://serverless.com/
|
|
20
19
|
|
|
21
20
|
It provides you with:
|
|
22
21
|
|
package/config/tsconfig.json
CHANGED
|
@@ -3,9 +3,10 @@
|
|
|
3
3
|
"incremental": true,
|
|
4
4
|
"isolatedModules": true,
|
|
5
5
|
"moduleResolution": "node",
|
|
6
|
-
"
|
|
6
|
+
"noUncheckedSideEffectImports": true,
|
|
7
7
|
"noUnusedLocals": false,
|
|
8
|
-
"noUnusedParameters": false
|
|
8
|
+
"noUnusedParameters": false,
|
|
9
|
+
"resolveJsonModule": true
|
|
9
10
|
},
|
|
10
11
|
"extends": "tsconfig-seek"
|
|
11
12
|
}
|
package/lib/cli/build/assets.js
CHANGED
|
@@ -89,7 +89,7 @@ const copyAssetsConcurrently = async (configs) => {
|
|
|
89
89
|
);
|
|
90
90
|
await Promise.all(
|
|
91
91
|
configs.map(
|
|
92
|
-
({ outDir, name, prefixColor }) => copyAssets(
|
|
92
|
+
async ({ outDir, name, prefixColor }) => copyAssets(
|
|
93
93
|
outDir,
|
|
94
94
|
(0, import_logging.createLogger)({
|
|
95
95
|
debug: false,
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/build/assets.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport chalk, { type Color } from 'chalk';\nimport fs from 'fs-extra';\n\nimport { copyFile } from '../../utils/copy.js';\nimport { buildPatternToFilepathMap, crawlDirectory } from '../../utils/dir.js';\nimport { type Logger, createLogger, log } from '../../utils/logging.js';\nimport {\n getConsumerManifest,\n getEntryPointFromManifest,\n getPropFromConsumerManifest,\n} from '../../utils/manifest.js';\n\nexport const copyAssets = async (\n destinationDir: string,\n logger: Logger = log,\n) => {\n const manifest = await getConsumerManifest();\n if (!manifest) {\n return;\n }\n\n const assets = await getPropFromConsumerManifest<string, string[]>('assets');\n if (!assets) {\n return;\n }\n\n const entryPoint = await getEntryPointFromManifest();\n if (!entryPoint) {\n return;\n }\n\n const pathSegments = entryPoint.split(path.sep);\n const srcDir = (pathSegments.length > 1 && pathSegments[0]) || '';\n const resolvedSrcDir = path.resolve(path.dirname(manifest.path), srcDir);\n const resolvedDestinationDir = path.resolve(\n path.dirname(manifest.path),\n destinationDir,\n );\n\n const allFiles = await crawlDirectory(resolvedSrcDir);\n const filesByPattern = buildPatternToFilepathMap(assets, allFiles, {\n cwd: resolvedSrcDir,\n dot: true,\n });\n const matchedFiles = Array.from(\n new Set(Object.values(filesByPattern).flat()),\n );\n\n await Promise.all(\n matchedFiles.map(async (filename) => {\n logger.subtle(`Copying ${filename}`);\n\n await fs.promises.mkdir(\n path.dirname(path.join(resolvedDestinationDir, filename)),\n { recursive: true },\n );\n await copyFile(\n path.join(resolvedSrcDir, filename),\n path.join(resolvedDestinationDir, filename),\n { processors: [] },\n );\n }),\n );\n};\n\ninterface CopyAssetsConfig {\n outDir: string;\n name: string;\n prefixColor: typeof Color;\n}\n\nexport const copyAssetsConcurrently = async (configs: CopyAssetsConfig[]) => {\n const maxNameLength = configs.reduce(\n (length, command) => Math.max(length, command.name.length),\n 0,\n );\n\n await Promise.all(\n configs.map(({ outDir, name, prefixColor }) =>\n copyAssets(\n outDir,\n createLogger({\n debug: false,\n prefixes: [chalk[prefixColor](`${name.padEnd(maxNameLength)} \u2502`)],\n }),\n ),\n ),\n );\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkC;AAClC,sBAAe;AAEf,kBAAyB;AACzB,iBAA0D;AAC1D,qBAA+C;AAC/C,sBAIO;AAEA,MAAM,aAAa,OACxB,gBACA,SAAiB,uBACd;AACH,QAAM,WAAW,UAAM,qCAAoB;AAC3C,MAAI,CAAC,UAAU;AACb;AAAA,EACF;AAEA,QAAM,SAAS,UAAM,6CAA8C,QAAQ;AAC3E,MAAI,CAAC,QAAQ;AACX;AAAA,EACF;AAEA,QAAM,aAAa,UAAM,2CAA0B;AACnD,MAAI,CAAC,YAAY;AACf;AAAA,EACF;AAEA,QAAM,eAAe,WAAW,MAAM,YAAAA,QAAK,GAAG;AAC9C,QAAM,SAAU,aAAa,SAAS,KAAK,aAAa,CAAC,KAAM;AAC/D,QAAM,iBAAiB,YAAAA,QAAK,QAAQ,YAAAA,QAAK,QAAQ,SAAS,IAAI,GAAG,MAAM;AACvE,QAAM,yBAAyB,YAAAA,QAAK;AAAA,IAClC,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAAA,IAC1B;AAAA,EACF;AAEA,QAAM,WAAW,UAAM,2BAAe,cAAc;AACpD,QAAM,qBAAiB,sCAA0B,QAAQ,UAAU;AAAA,IACjE,KAAK;AAAA,IACL,KAAK;AAAA,EACP,CAAC;AACD,QAAM,eAAe,MAAM;AAAA,IACzB,IAAI,IAAI,OAAO,OAAO,cAAc,EAAE,KAAK,CAAC;AAAA,EAC9C;AAEA,QAAM,QAAQ;AAAA,IACZ,aAAa,IAAI,OAAO,aAAa;AACnC,aAAO,OAAO,WAAW,QAAQ,EAAE;AAEnC,YAAM,gBAAAC,QAAG,SAAS;AAAA,QAChB,YAAAD,QAAK,QAAQ,YAAAA,QAAK,KAAK,wBAAwB,QAAQ,CAAC;AAAA,QACxD,EAAE,WAAW,KAAK;AAAA,MACpB;AACA,gBAAM;AAAA,QACJ,YAAAA,QAAK,KAAK,gBAAgB,QAAQ;AAAA,QAClC,YAAAA,QAAK,KAAK,wBAAwB,QAAQ;AAAA,QAC1C,EAAE,YAAY,CAAC,EAAE;AAAA,MACnB;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAQO,MAAM,yBAAyB,OAAO,YAAgC;AAC3E,QAAM,gBAAgB,QAAQ;AAAA,IAC5B,CAAC,QAAQ,YAAY,KAAK,IAAI,QAAQ,QAAQ,KAAK,MAAM;AAAA,IACzD;AAAA,EACF;AAEA,QAAM,QAAQ;AAAA,IACZ,QAAQ;AAAA,MAAI,
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport chalk, { type Color } from 'chalk';\nimport fs from 'fs-extra';\n\nimport { copyFile } from '../../utils/copy.js';\nimport { buildPatternToFilepathMap, crawlDirectory } from '../../utils/dir.js';\nimport { type Logger, createLogger, log } from '../../utils/logging.js';\nimport {\n getConsumerManifest,\n getEntryPointFromManifest,\n getPropFromConsumerManifest,\n} from '../../utils/manifest.js';\n\nexport const copyAssets = async (\n destinationDir: string,\n logger: Logger = log,\n) => {\n const manifest = await getConsumerManifest();\n if (!manifest) {\n return;\n }\n\n const assets = await getPropFromConsumerManifest<string, string[]>('assets');\n if (!assets) {\n return;\n }\n\n const entryPoint = await getEntryPointFromManifest();\n if (!entryPoint) {\n return;\n }\n\n const pathSegments = entryPoint.split(path.sep);\n const srcDir = (pathSegments.length > 1 && pathSegments[0]) || '';\n const resolvedSrcDir = path.resolve(path.dirname(manifest.path), srcDir);\n const resolvedDestinationDir = path.resolve(\n path.dirname(manifest.path),\n destinationDir,\n );\n\n const allFiles = await crawlDirectory(resolvedSrcDir);\n const filesByPattern = buildPatternToFilepathMap(assets, allFiles, {\n cwd: resolvedSrcDir,\n dot: true,\n });\n const matchedFiles = Array.from(\n new Set(Object.values(filesByPattern).flat()),\n );\n\n await Promise.all(\n matchedFiles.map(async (filename) => {\n logger.subtle(`Copying ${filename}`);\n\n await fs.promises.mkdir(\n path.dirname(path.join(resolvedDestinationDir, filename)),\n { recursive: true },\n );\n await copyFile(\n path.join(resolvedSrcDir, filename),\n path.join(resolvedDestinationDir, filename),\n { processors: [] },\n );\n }),\n );\n};\n\ninterface CopyAssetsConfig {\n outDir: string;\n name: string;\n prefixColor: typeof Color;\n}\n\nexport const copyAssetsConcurrently = async (configs: CopyAssetsConfig[]) => {\n const maxNameLength = configs.reduce(\n (length, command) => Math.max(length, command.name.length),\n 0,\n );\n\n await Promise.all(\n configs.map(async ({ outDir, name, prefixColor }) =>\n copyAssets(\n outDir,\n createLogger({\n debug: false,\n prefixes: [chalk[prefixColor](`${name.padEnd(maxNameLength)} \u2502`)],\n }),\n ),\n ),\n );\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkC;AAClC,sBAAe;AAEf,kBAAyB;AACzB,iBAA0D;AAC1D,qBAA+C;AAC/C,sBAIO;AAEA,MAAM,aAAa,OACxB,gBACA,SAAiB,uBACd;AACH,QAAM,WAAW,UAAM,qCAAoB;AAC3C,MAAI,CAAC,UAAU;AACb;AAAA,EACF;AAEA,QAAM,SAAS,UAAM,6CAA8C,QAAQ;AAC3E,MAAI,CAAC,QAAQ;AACX;AAAA,EACF;AAEA,QAAM,aAAa,UAAM,2CAA0B;AACnD,MAAI,CAAC,YAAY;AACf;AAAA,EACF;AAEA,QAAM,eAAe,WAAW,MAAM,YAAAA,QAAK,GAAG;AAC9C,QAAM,SAAU,aAAa,SAAS,KAAK,aAAa,CAAC,KAAM;AAC/D,QAAM,iBAAiB,YAAAA,QAAK,QAAQ,YAAAA,QAAK,QAAQ,SAAS,IAAI,GAAG,MAAM;AACvE,QAAM,yBAAyB,YAAAA,QAAK;AAAA,IAClC,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAAA,IAC1B;AAAA,EACF;AAEA,QAAM,WAAW,UAAM,2BAAe,cAAc;AACpD,QAAM,qBAAiB,sCAA0B,QAAQ,UAAU;AAAA,IACjE,KAAK;AAAA,IACL,KAAK;AAAA,EACP,CAAC;AACD,QAAM,eAAe,MAAM;AAAA,IACzB,IAAI,IAAI,OAAO,OAAO,cAAc,EAAE,KAAK,CAAC;AAAA,EAC9C;AAEA,QAAM,QAAQ;AAAA,IACZ,aAAa,IAAI,OAAO,aAAa;AACnC,aAAO,OAAO,WAAW,QAAQ,EAAE;AAEnC,YAAM,gBAAAC,QAAG,SAAS;AAAA,QAChB,YAAAD,QAAK,QAAQ,YAAAA,QAAK,KAAK,wBAAwB,QAAQ,CAAC;AAAA,QACxD,EAAE,WAAW,KAAK;AAAA,MACpB;AACA,gBAAM;AAAA,QACJ,YAAAA,QAAK,KAAK,gBAAgB,QAAQ;AAAA,QAClC,YAAAA,QAAK,KAAK,wBAAwB,QAAQ;AAAA,QAC1C,EAAE,YAAY,CAAC,EAAE;AAAA,MACnB;AAAA,IACF,CAAC;AAAA,EACH;AACF;AAQO,MAAM,yBAAyB,OAAO,YAAgC;AAC3E,QAAM,gBAAgB,QAAQ;AAAA,IAC5B,CAAC,QAAQ,YAAY,KAAK,IAAI,QAAQ,QAAQ,KAAK,MAAM;AAAA,IACzD;AAAA,EACF;AAEA,QAAM,QAAQ;AAAA,IACZ,QAAQ;AAAA,MAAI,OAAO,EAAE,QAAQ,MAAM,YAAY,MAC7C;AAAA,QACE;AAAA,YACA,6BAAa;AAAA,UACX,OAAO;AAAA,UACP,UAAU,CAAC,aAAAE,QAAM,WAAW,EAAE,GAAG,KAAK,OAAO,aAAa,CAAC,SAAI,CAAC;AAAA,QAClE,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACF;",
|
|
6
6
|
"names": ["path", "fs", "chalk"]
|
|
7
7
|
}
|
package/lib/cli/build/tsc.d.ts
CHANGED
|
@@ -1,4 +1,8 @@
|
|
|
1
1
|
import ts from 'typescript';
|
|
2
|
-
import type
|
|
2
|
+
import { type Logger } from '../../utils/logging.js';
|
|
3
3
|
export declare const tsc: (args?: string[]) => Promise<import("execa").ExecaReturnValue<string>>;
|
|
4
4
|
export declare const readTsconfig: (args: string[] | undefined, log: Logger) => ts.ParsedCommandLine | undefined;
|
|
5
|
+
/**
|
|
6
|
+
* Extract custom conditions from tsconfig that should be passed to tsx
|
|
7
|
+
*/
|
|
8
|
+
export declare const getCustomConditions: () => string[];
|
package/lib/cli/build/tsc.js
CHANGED
|
@@ -28,12 +28,14 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
|
|
|
28
28
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
29
|
var tsc_exports = {};
|
|
30
30
|
__export(tsc_exports, {
|
|
31
|
+
getCustomConditions: () => getCustomConditions,
|
|
31
32
|
readTsconfig: () => readTsconfig,
|
|
32
33
|
tsc: () => tsc
|
|
33
34
|
});
|
|
34
35
|
module.exports = __toCommonJS(tsc_exports);
|
|
35
36
|
var import_typescript = __toESM(require("typescript"));
|
|
36
37
|
var import_exec = require("../../utils/exec.js");
|
|
38
|
+
var import_logging = require("../../utils/logging.js");
|
|
37
39
|
var import_args = require("./args.js");
|
|
38
40
|
const DEFAULT_ARGS = ["--project", "tsconfig.build.json"];
|
|
39
41
|
const formatHost = {
|
|
@@ -94,8 +96,18 @@ const readTsconfig = (args = process.argv.slice(2), log) => {
|
|
|
94
96
|
}
|
|
95
97
|
return parsedCommandLine;
|
|
96
98
|
};
|
|
99
|
+
const getCustomConditions = () => {
|
|
100
|
+
try {
|
|
101
|
+
const parsedConfig = readTsconfig([], import_logging.log);
|
|
102
|
+
const customConditions = parsedConfig?.options.customConditions;
|
|
103
|
+
return Array.isArray(customConditions) ? customConditions : [];
|
|
104
|
+
} catch {
|
|
105
|
+
return [];
|
|
106
|
+
}
|
|
107
|
+
};
|
|
97
108
|
// Annotate the CommonJS export names for ESM import in node:
|
|
98
109
|
0 && (module.exports = {
|
|
110
|
+
getCustomConditions,
|
|
99
111
|
readTsconfig,
|
|
100
112
|
tsc
|
|
101
113
|
});
|
package/lib/cli/build/tsc.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/build/tsc.ts"],
|
|
4
|
-
"sourcesContent": ["import ts from 'typescript';\n\nimport { exec } from '../../utils/exec.js';\nimport type
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAAe;AAEf,kBAAqB;
|
|
6
|
-
"names": ["ts"]
|
|
4
|
+
"sourcesContent": ["import ts from 'typescript';\n\nimport { exec } from '../../utils/exec.js';\nimport { type Logger, log as logger } from '../../utils/logging.js';\n\nimport { parseTscArgs } from './args.js';\n\nconst DEFAULT_ARGS = ['--project', 'tsconfig.build.json'] as const;\n\nconst formatHost: ts.FormatDiagnosticsHost = {\n getCanonicalFileName: (fileName) => fileName,\n getCurrentDirectory: ts.sys.getCurrentDirectory.bind(undefined),\n getNewLine: () => ts.sys.newLine,\n};\n\nconst tsconfigCache = new Map<string, ts.ParsedCommandLine>();\nconst computeCacheKey = (args: string[]) => Array.from(args).sort().toString();\n\nexport const tsc = async (args = process.argv.slice(2)) => {\n const tscArgs = parseTscArgs(args);\n\n // Build flag is incompatible with project flag.\n const defaultArgs = tscArgs.build || tscArgs.project ? [] : DEFAULT_ARGS;\n\n return exec('tsc', ...defaultArgs, ...args);\n};\n\nexport const readTsconfig = (args = process.argv.slice(2), log: Logger) => {\n const tscArgs = parseTscArgs(args);\n\n let parsedCommandLine = tsconfigCache.get(computeCacheKey(args));\n\n if (!parsedCommandLine) {\n log.debug(\n log.bold(\n 'tsconfig',\n ...(tscArgs.project ? ['--project', tscArgs.project] : []),\n ),\n );\n log.debug(tscArgs.pathname);\n\n const tsconfigFile = ts.findConfigFile(\n tscArgs.dirname,\n ts.sys.fileExists.bind(undefined),\n tscArgs.basename,\n );\n if (!tsconfigFile) {\n log.err(`Could not find ${tscArgs.pathname}.`);\n process.exitCode = 1;\n return;\n }\n\n const readConfigFile = ts.readConfigFile(\n tsconfigFile,\n ts.sys.readFile.bind(undefined),\n );\n if (readConfigFile.error) {\n log.err(`Could not read ${tscArgs.pathname}.`);\n log.subtle(ts.formatDiagnostic(readConfigFile.error, formatHost));\n process.exitCode = 1;\n return;\n }\n\n parsedCommandLine = ts.parseJsonConfigFileContent(\n readConfigFile.config,\n ts.sys,\n tscArgs.dirname,\n );\n tsconfigCache.set(computeCacheKey(args), parsedCommandLine);\n }\n\n if (parsedCommandLine.errors.length) {\n log.err(`Could not parse ${tscArgs.pathname}.`);\n log.subtle(ts.formatDiagnostics(parsedCommandLine.errors, formatHost));\n process.exitCode = 1;\n return;\n }\n\n return parsedCommandLine;\n};\n\n/**\n * Extract custom conditions from tsconfig that should be passed to tsx\n */\nexport const getCustomConditions = (): string[] => {\n try {\n const parsedConfig = readTsconfig([], logger);\n const customConditions = parsedConfig?.options.customConditions;\n return Array.isArray(customConditions) ? customConditions : [];\n } catch {\n return [];\n }\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAAe;AAEf,kBAAqB;AACrB,qBAA2C;AAE3C,kBAA6B;AAE7B,MAAM,eAAe,CAAC,aAAa,qBAAqB;AAExD,MAAM,aAAuC;AAAA,EAC3C,sBAAsB,CAAC,aAAa;AAAA,EACpC,qBAAqB,kBAAAA,QAAG,IAAI,oBAAoB,KAAK,MAAS;AAAA,EAC9D,YAAY,MAAM,kBAAAA,QAAG,IAAI;AAC3B;AAEA,MAAM,gBAAgB,oBAAI,IAAkC;AAC5D,MAAM,kBAAkB,CAAC,SAAmB,MAAM,KAAK,IAAI,EAAE,KAAK,EAAE,SAAS;AAEtE,MAAM,MAAM,OAAO,OAAO,QAAQ,KAAK,MAAM,CAAC,MAAM;AACzD,QAAM,cAAU,0BAAa,IAAI;AAGjC,QAAM,cAAc,QAAQ,SAAS,QAAQ,UAAU,CAAC,IAAI;AAE5D,aAAO,kBAAK,OAAO,GAAG,aAAa,GAAG,IAAI;AAC5C;AAEO,MAAM,eAAe,CAAC,OAAO,QAAQ,KAAK,MAAM,CAAC,GAAG,QAAgB;AACzE,QAAM,cAAU,0BAAa,IAAI;AAEjC,MAAI,oBAAoB,cAAc,IAAI,gBAAgB,IAAI,CAAC;AAE/D,MAAI,CAAC,mBAAmB;AACtB,QAAI;AAAA,MACF,IAAI;AAAA,QACF;AAAA,QACA,GAAI,QAAQ,UAAU,CAAC,aAAa,QAAQ,OAAO,IAAI,CAAC;AAAA,MAC1D;AAAA,IACF;AACA,QAAI,MAAM,QAAQ,QAAQ;AAE1B,UAAM,eAAe,kBAAAA,QAAG;AAAA,MACtB,QAAQ;AAAA,MACR,kBAAAA,QAAG,IAAI,WAAW,KAAK,MAAS;AAAA,MAChC,QAAQ;AAAA,IACV;AACA,QAAI,CAAC,cAAc;AACjB,UAAI,IAAI,kBAAkB,QAAQ,QAAQ,GAAG;AAC7C,cAAQ,WAAW;AACnB;AAAA,IACF;AAEA,UAAM,iBAAiB,kBAAAA,QAAG;AAAA,MACxB;AAAA,MACA,kBAAAA,QAAG,IAAI,SAAS,KAAK,MAAS;AAAA,IAChC;AACA,QAAI,eAAe,OAAO;AACxB,UAAI,IAAI,kBAAkB,QAAQ,QAAQ,GAAG;AAC7C,UAAI,OAAO,kBAAAA,QAAG,iBAAiB,eAAe,OAAO,UAAU,CAAC;AAChE,cAAQ,WAAW;AACnB;AAAA,IACF;AAEA,wBAAoB,kBAAAA,QAAG;AAAA,MACrB,eAAe;AAAA,MACf,kBAAAA,QAAG;AAAA,MACH,QAAQ;AAAA,IACV;AACA,kBAAc,IAAI,gBAAgB,IAAI,GAAG,iBAAiB;AAAA,EAC5D;AAEA,MAAI,kBAAkB,OAAO,QAAQ;AACnC,QAAI,IAAI,mBAAmB,QAAQ,QAAQ,GAAG;AAC9C,QAAI,OAAO,kBAAAA,QAAG,kBAAkB,kBAAkB,QAAQ,UAAU,CAAC;AACrE,YAAQ,WAAW;AACnB;AAAA,EACF;AAEA,SAAO;AACT;AAKO,MAAM,sBAAsB,MAAgB;AACjD,MAAI;AACF,UAAM,eAAe,aAAa,CAAC,GAAG,eAAAC,GAAM;AAC5C,UAAM,mBAAmB,cAAc,QAAQ;AAC/C,WAAO,MAAM,QAAQ,gBAAgB,IAAI,mBAAmB,CAAC;AAAA,EAC/D,QAAQ;AACN,WAAO,CAAC;AAAA,EACV;AACF;",
|
|
6
|
+
"names": ["ts", "logger"]
|
|
7
7
|
}
|
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
import type {
|
|
1
|
+
import type { ReadResult } from 'read-pkg-up';
|
|
2
2
|
import type { ProjectType } from '../../utils/manifest.js';
|
|
3
3
|
interface Props {
|
|
4
4
|
destinationRoot: string;
|
|
5
5
|
include: (pathname: string) => boolean;
|
|
6
|
-
manifest:
|
|
6
|
+
manifest: ReadResult;
|
|
7
7
|
type: ProjectType;
|
|
8
8
|
}
|
|
9
9
|
export declare const analyseDependencies: ({ destinationRoot, include, manifest: { packageJson }, type, }: Props) => Promise<undefined | (() => Promise<void>)>;
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/configure/analyseDependencies.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport fs from 'fs-extra';\nimport type {
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport fs from 'fs-extra';\nimport type { ReadResult } from 'read-pkg-up';\n\nimport { type TextProcessor, copyFiles } from '../../utils/copy.js';\nimport { log } from '../../utils/logging.js';\nimport type { ProjectType } from '../../utils/manifest.js';\nimport { getLatestNpmVersion, getSkubaVersion } from '../../utils/version.js';\n\nimport { diffDependencies } from './analysis/package.js';\nimport * as dependencyMutators from './dependencies/index.js';\nimport { formatPackage } from './processing/package.js';\nimport type { DependencyDiff } from './types.js';\n\nconst logDiff = (diff: DependencyDiff): boolean => {\n const entries = Object.entries(diff);\n\n if (entries.length === 0) {\n log.ok('\u2714 No changes');\n\n return false;\n }\n\n Object.entries(diff)\n .sort(([nameA], [nameB]) => nameA.localeCompare(nameB))\n .forEach(([name, { operation, version }]) =>\n log.plain(operation, name, log.formatSubtle(version)),\n );\n\n return true;\n};\n\nconst pinUnspecifiedVersions = async (\n dependencies: Record<string, string>,\n): Promise<void> => {\n const updates = await Promise.all(\n Object.entries(dependencies)\n .filter(([, version]) => version === '*')\n .map(async ([name]) => {\n const version = await (name === 'skuba'\n ? getSkubaVersion()\n : getLatestNpmVersion(name));\n\n if (version === null) {\n throw new Error(`Failed to fetch latest version of ${name}`);\n }\n\n return [name, version] as const;\n }),\n );\n\n updates.forEach(([name, version]) => {\n dependencies[name] = version;\n });\n};\n\ninterface Props {\n destinationRoot: string;\n include: (pathname: string) => boolean;\n manifest: ReadResult;\n type: ProjectType;\n}\n\nexport const analyseDependencies = async ({\n destinationRoot,\n include,\n manifest: { packageJson },\n type,\n}: Props): Promise<undefined | (() => Promise<void>)> => {\n const input = {\n dependencies: packageJson.dependencies ?? {},\n devDependencies: packageJson.devDependencies ?? {},\n type,\n };\n\n const output = {\n dependencies: { ...input.dependencies },\n devDependencies: { ...input.devDependencies },\n type,\n };\n\n const processors = Object.values(dependencyMutators).reduce<TextProcessor[]>(\n (acc, mutate) => {\n const newProcessors = mutate(output);\n acc.push(...newProcessors);\n return acc;\n },\n [],\n );\n\n await Promise.all([\n pinUnspecifiedVersions(output.dependencies),\n pinUnspecifiedVersions(output.devDependencies),\n ]);\n\n const dependencyDiff = diffDependencies({\n old: input.dependencies,\n new: output.dependencies,\n });\n\n log.newline();\n log.plain(log.bold('Dependencies:'));\n\n log.newline();\n const hasDependencyDiff = logDiff(dependencyDiff);\n\n const devDependencyDiff = diffDependencies({\n old: input.devDependencies,\n new: output.devDependencies,\n });\n\n log.newline();\n log.plain(log.bold('Dev dependencies:'));\n\n log.newline();\n const hasDevDependencyDiff = logDiff(devDependencyDiff);\n\n const packageJsonFilepath = path.join(destinationRoot, 'package.json');\n\n if (!hasDependencyDiff && !hasDevDependencyDiff) {\n return;\n }\n\n return async () => {\n const updatedPackageJson = await formatPackage({\n ...packageJson,\n dependencies: output.dependencies,\n devDependencies: output.devDependencies,\n });\n\n await fs.promises.writeFile(packageJsonFilepath, updatedPackageJson);\n\n if (processors.length === 0) {\n return;\n }\n\n await copyFiles({\n sourceRoot: destinationRoot,\n destinationRoot,\n include,\n processors,\n });\n };\n};\n"],
|
|
5
5
|
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAe;AAGf,kBAA8C;AAC9C,qBAAoB;AAEpB,qBAAqD;AAErD,qBAAiC;AACjC,yBAAoC;AACpC,IAAAA,kBAA8B;AAG9B,MAAM,UAAU,CAAC,SAAkC;AACjD,QAAM,UAAU,OAAO,QAAQ,IAAI;AAEnC,MAAI,QAAQ,WAAW,GAAG;AACxB,uBAAI,GAAG,mBAAc;AAErB,WAAO;AAAA,EACT;AAEA,SAAO,QAAQ,IAAI,EAChB,KAAK,CAAC,CAAC,KAAK,GAAG,CAAC,KAAK,MAAM,MAAM,cAAc,KAAK,CAAC,EACrD;AAAA,IAAQ,CAAC,CAAC,MAAM,EAAE,WAAW,QAAQ,CAAC,MACrC,mBAAI,MAAM,WAAW,MAAM,mBAAI,aAAa,OAAO,CAAC;AAAA,EACtD;AAEF,SAAO;AACT;AAEA,MAAM,yBAAyB,OAC7B,iBACkB;AAClB,QAAM,UAAU,MAAM,QAAQ;AAAA,IAC5B,OAAO,QAAQ,YAAY,EACxB,OAAO,CAAC,CAAC,EAAE,OAAO,MAAM,YAAY,GAAG,EACvC,IAAI,OAAO,CAAC,IAAI,MAAM;AACrB,YAAM,UAAU,OAAO,SAAS,cAC5B,gCAAgB,QAChB,oCAAoB,IAAI;AAE5B,UAAI,YAAY,MAAM;AACpB,cAAM,IAAI,MAAM,qCAAqC,IAAI,EAAE;AAAA,MAC7D;AAEA,aAAO,CAAC,MAAM,OAAO;AAAA,IACvB,CAAC;AAAA,EACL;AAEA,UAAQ,QAAQ,CAAC,CAAC,MAAM,OAAO,MAAM;AACnC,iBAAa,IAAI,IAAI;AAAA,EACvB,CAAC;AACH;AASO,MAAM,sBAAsB,OAAO;AAAA,EACxC;AAAA,EACA;AAAA,EACA,UAAU,EAAE,YAAY;AAAA,EACxB;AACF,MAAyD;AACvD,QAAM,QAAQ;AAAA,IACZ,cAAc,YAAY,gBAAgB,CAAC;AAAA,IAC3C,iBAAiB,YAAY,mBAAmB,CAAC;AAAA,IACjD;AAAA,EACF;AAEA,QAAM,SAAS;AAAA,IACb,cAAc,EAAE,GAAG,MAAM,aAAa;AAAA,IACtC,iBAAiB,EAAE,GAAG,MAAM,gBAAgB;AAAA,IAC5C;AAAA,EACF;AAEA,QAAM,aAAa,OAAO,OAAO,kBAAkB,EAAE;AAAA,IACnD,CAAC,KAAK,WAAW;AACf,YAAM,gBAAgB,OAAO,MAAM;AACnC,UAAI,KAAK,GAAG,aAAa;AACzB,aAAO;AAAA,IACT;AAAA,IACA,CAAC;AAAA,EACH;AAEA,QAAM,QAAQ,IAAI;AAAA,IAChB,uBAAuB,OAAO,YAAY;AAAA,IAC1C,uBAAuB,OAAO,eAAe;AAAA,EAC/C,CAAC;AAED,QAAM,qBAAiB,iCAAiB;AAAA,IACtC,KAAK,MAAM;AAAA,IACX,KAAK,OAAO;AAAA,EACd,CAAC;AAED,qBAAI,QAAQ;AACZ,qBAAI,MAAM,mBAAI,KAAK,eAAe,CAAC;AAEnC,qBAAI,QAAQ;AACZ,QAAM,oBAAoB,QAAQ,cAAc;AAEhD,QAAM,wBAAoB,iCAAiB;AAAA,IACzC,KAAK,MAAM;AAAA,IACX,KAAK,OAAO;AAAA,EACd,CAAC;AAED,qBAAI,QAAQ;AACZ,qBAAI,MAAM,mBAAI,KAAK,mBAAmB,CAAC;AAEvC,qBAAI,QAAQ;AACZ,QAAM,uBAAuB,QAAQ,iBAAiB;AAEtD,QAAM,sBAAsB,YAAAC,QAAK,KAAK,iBAAiB,cAAc;AAErE,MAAI,CAAC,qBAAqB,CAAC,sBAAsB;AAC/C;AAAA,EACF;AAEA,SAAO,YAAY;AACjB,UAAM,qBAAqB,UAAM,+BAAc;AAAA,MAC7C,GAAG;AAAA,MACH,cAAc,OAAO;AAAA,MACrB,iBAAiB,OAAO;AAAA,IAC1B,CAAC;AAED,UAAM,gBAAAC,QAAG,SAAS,UAAU,qBAAqB,kBAAkB;AAEnE,QAAI,WAAW,WAAW,GAAG;AAC3B;AAAA,IACF;AAEA,cAAM,uBAAU;AAAA,MACd,YAAY;AAAA,MACZ;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AAAA,EACH;AACF;",
|
|
6
6
|
"names": ["import_package", "path", "fs"]
|
|
7
7
|
}
|
|
@@ -3,7 +3,7 @@ import type { DependencyDiff } from '../types.js';
|
|
|
3
3
|
interface GetDestinationManifestProps {
|
|
4
4
|
cwd?: string;
|
|
5
5
|
}
|
|
6
|
-
export declare const getDestinationManifest: (props?: GetDestinationManifestProps) => Promise<readPkgUp.
|
|
6
|
+
export declare const getDestinationManifest: (props?: GetDestinationManifestProps) => Promise<readPkgUp.ReadResult>;
|
|
7
7
|
interface DiffDependenciesProps {
|
|
8
8
|
old: Record<string, string | undefined>;
|
|
9
9
|
new: Record<string, string | undefined>;
|
|
@@ -36,7 +36,7 @@ var import_read_pkg_up = __toESM(require("read-pkg-up"));
|
|
|
36
36
|
var import_logging = require("../../../utils/logging.js");
|
|
37
37
|
var import_diff = require("./diff.js");
|
|
38
38
|
const getDestinationManifest = async (props) => {
|
|
39
|
-
const result = await (0, import_read_pkg_up.default)(props);
|
|
39
|
+
const result = await (0, import_read_pkg_up.default)({ ...props, normalize: false });
|
|
40
40
|
if (result === void 0) {
|
|
41
41
|
import_logging.log.err(
|
|
42
42
|
"Could not find a",
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../src/cli/configure/analysis/package.ts"],
|
|
4
|
-
"sourcesContent": ["import readPkgUp from 'read-pkg-up';\n\nimport { log } from '../../../utils/logging.js';\nimport type { DependencyDiff } from '../types.js';\n\nimport { determineOperation } from './diff.js';\n\ninterface GetDestinationManifestProps {\n cwd?: string;\n}\n\nexport const getDestinationManifest = async (\n props?: GetDestinationManifestProps,\n) => {\n const result = await readPkgUp(props);\n\n if (result === undefined) {\n log.err(\n 'Could not find a',\n log.bold('package.json'),\n 'in your working directory.',\n );\n process.exit(1);\n }\n\n return result;\n};\n\nconst joinVersions = (a: string | undefined, b: string | undefined) =>\n [a, b].filter((v) => v !== undefined).join(' -> ');\n\ninterface DiffDependenciesProps {\n old: Record<string, string | undefined>;\n new: Record<string, string | undefined>;\n}\n\nexport const diffDependencies = (\n props: DiffDependenciesProps,\n): DependencyDiff => {\n const deletionsAndModifications = Object.fromEntries(\n Object.entries(props.old).flatMap(([name, oldVersion]) => {\n if (oldVersion === props.new[name] || oldVersion === undefined) {\n return [];\n }\n\n const newVersion = props.new[name];\n\n const operation = determineOperation(oldVersion, newVersion);\n const version = joinVersions(oldVersion, newVersion);\n\n return [[name, { operation, version }]] as const;\n }),\n );\n\n const additions = Object.fromEntries(\n Object.entries(props.new).flatMap(([name, version]) => {\n if (name in props.old || version === undefined) {\n return [];\n }\n\n const oldVersion = props.old[name];\n\n const operation = determineOperation(oldVersion, version);\n\n return [[name, { operation, version }]] as const;\n }),\n );\n\n return {\n ...deletionsAndModifications,\n ...additions,\n };\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,yBAAsB;AAEtB,qBAAoB;AAGpB,kBAAmC;AAM5B,MAAM,yBAAyB,OACpC,UACG;AACH,QAAM,SAAS,UAAM,mBAAAA,SAAU,
|
|
4
|
+
"sourcesContent": ["import readPkgUp from 'read-pkg-up';\n\nimport { log } from '../../../utils/logging.js';\nimport type { DependencyDiff } from '../types.js';\n\nimport { determineOperation } from './diff.js';\n\ninterface GetDestinationManifestProps {\n cwd?: string;\n}\n\nexport const getDestinationManifest = async (\n props?: GetDestinationManifestProps,\n) => {\n const result = await readPkgUp({ ...props, normalize: false });\n\n if (result === undefined) {\n log.err(\n 'Could not find a',\n log.bold('package.json'),\n 'in your working directory.',\n );\n process.exit(1);\n }\n\n return result;\n};\n\nconst joinVersions = (a: string | undefined, b: string | undefined) =>\n [a, b].filter((v) => v !== undefined).join(' -> ');\n\ninterface DiffDependenciesProps {\n old: Record<string, string | undefined>;\n new: Record<string, string | undefined>;\n}\n\nexport const diffDependencies = (\n props: DiffDependenciesProps,\n): DependencyDiff => {\n const deletionsAndModifications = Object.fromEntries(\n Object.entries(props.old).flatMap(([name, oldVersion]) => {\n if (oldVersion === props.new[name] || oldVersion === undefined) {\n return [];\n }\n\n const newVersion = props.new[name];\n\n const operation = determineOperation(oldVersion, newVersion);\n const version = joinVersions(oldVersion, newVersion);\n\n return [[name, { operation, version }]] as const;\n }),\n );\n\n const additions = Object.fromEntries(\n Object.entries(props.new).flatMap(([name, version]) => {\n if (name in props.old || version === undefined) {\n return [];\n }\n\n const oldVersion = props.old[name];\n\n const operation = determineOperation(oldVersion, version);\n\n return [[name, { operation, version }]] as const;\n }),\n );\n\n return {\n ...deletionsAndModifications,\n ...additions,\n };\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,yBAAsB;AAEtB,qBAAoB;AAGpB,kBAAmC;AAM5B,MAAM,yBAAyB,OACpC,UACG;AACH,QAAM,SAAS,UAAM,mBAAAA,SAAU,EAAE,GAAG,OAAO,WAAW,MAAM,CAAC;AAE7D,MAAI,WAAW,QAAW;AACxB,uBAAI;AAAA,MACF;AAAA,MACA,mBAAI,KAAK,cAAc;AAAA,MACvB;AAAA,IACF;AACA,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,SAAO;AACT;AAEA,MAAM,eAAe,CAAC,GAAuB,MAC3C,CAAC,GAAG,CAAC,EAAE,OAAO,CAAC,MAAM,MAAM,MAAS,EAAE,KAAK,MAAM;AAO5C,MAAM,mBAAmB,CAC9B,UACmB;AACnB,QAAM,4BAA4B,OAAO;AAAA,IACvC,OAAO,QAAQ,MAAM,GAAG,EAAE,QAAQ,CAAC,CAAC,MAAM,UAAU,MAAM;AACxD,UAAI,eAAe,MAAM,IAAI,IAAI,KAAK,eAAe,QAAW;AAC9D,eAAO,CAAC;AAAA,MACV;AAEA,YAAM,aAAa,MAAM,IAAI,IAAI;AAEjC,YAAM,gBAAY,gCAAmB,YAAY,UAAU;AAC3D,YAAM,UAAU,aAAa,YAAY,UAAU;AAEnD,aAAO,CAAC,CAAC,MAAM,EAAE,WAAW,QAAQ,CAAC,CAAC;AAAA,IACxC,CAAC;AAAA,EACH;AAEA,QAAM,YAAY,OAAO;AAAA,IACvB,OAAO,QAAQ,MAAM,GAAG,EAAE,QAAQ,CAAC,CAAC,MAAM,OAAO,MAAM;AACrD,UAAI,QAAQ,MAAM,OAAO,YAAY,QAAW;AAC9C,eAAO,CAAC;AAAA,MACV;AAEA,YAAM,aAAa,MAAM,IAAI,IAAI;AAEjC,YAAM,gBAAY,gCAAmB,YAAY,OAAO;AAExD,aAAO,CAAC,CAAC,MAAM,EAAE,WAAW,QAAQ,CAAC,CAAC;AAAA,IACxC,CAAC;AAAA,EACH;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH,GAAG;AAAA,EACL;AACF;",
|
|
6
6
|
"names": ["readPkgUp"]
|
|
7
7
|
}
|
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
import type {
|
|
1
|
+
import type { ReadResult } from 'read-pkg-up';
|
|
2
2
|
import { type TemplateConfig } from '../../utils/template.js';
|
|
3
3
|
interface Props {
|
|
4
4
|
destinationRoot: string;
|
|
5
5
|
include: (pathname: string) => boolean;
|
|
6
|
-
manifest:
|
|
6
|
+
manifest: ReadResult;
|
|
7
7
|
}
|
|
8
8
|
export declare const ensureTemplateCompletion: ({ destinationRoot, include, manifest, }: Props) => Promise<TemplateConfig>;
|
|
9
9
|
export {};
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/configure/ensureTemplateCompletion.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport fs from 'fs-extra';\nimport type {
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport fs from 'fs-extra';\nimport type { ReadResult } from 'read-pkg-up';\nimport * as z from 'zod/v4';\n\nimport { copyFiles, createEjsRenderer } from '../../utils/copy.js';\nimport { log } from '../../utils/logging.js';\nimport {\n type TemplateConfig,\n ensureTemplateConfigDeletion,\n} from '../../utils/template.js';\nimport { hasStringProp } from '../../utils/validation.js';\nimport {\n getTemplateConfig,\n readJSONFromStdIn,\n runForm,\n} from '../init/getConfig.js';\n\nimport { formatPackage } from './processing/package.js';\n\ninterface Props {\n destinationRoot: string;\n include: (pathname: string) => boolean;\n manifest: ReadResult;\n}\n\nconst templateDataSchema = z.object({\n templateData: z.record(z.string(), z.string()),\n});\n\nconst getTemplateDataFromStdIn = async (\n templateConfig: TemplateConfig,\n): Promise<Record<string, string>> => {\n const config = await readJSONFromStdIn();\n const data = templateDataSchema.parse(config);\n\n templateConfig.fields.forEach((field) => {\n const value = data.templateData[field.name];\n if (value === undefined) {\n throw new Error(`Missing field: ${field.name}`);\n }\n\n if (field.validate && !field.validate(value)) {\n throw new Error(`Invalid value for field: ${field.name}`);\n }\n });\n\n return data.templateData;\n};\n\nexport const ensureTemplateCompletion = async ({\n destinationRoot,\n include,\n manifest,\n}: Props): Promise<TemplateConfig> => {\n const templateConfig = getTemplateConfig(destinationRoot);\n\n if (templateConfig.fields.length === 0) {\n return templateConfig;\n }\n\n const templateName = hasStringProp(manifest.packageJson.skuba, 'template')\n ? manifest.packageJson.skuba.template\n : 'template';\n\n log.newline();\n const templateData = process.stdin.isTTY\n ? await runForm({\n choices: templateConfig.fields,\n message: chalk.bold(`Complete ${chalk.cyan(templateName)}:`),\n name: 'customAnswers',\n })\n : await getTemplateDataFromStdIn(templateConfig);\n\n const updatedPackageJson = await formatPackage(manifest.packageJson);\n const packageJsonFilepath = path.join(destinationRoot, 'package.json');\n await fs.promises.writeFile(packageJsonFilepath, updatedPackageJson);\n\n await copyFiles({\n sourceRoot: destinationRoot,\n destinationRoot,\n include,\n processors: [createEjsRenderer(templateData)],\n });\n\n await ensureTemplateConfigDeletion(destinationRoot);\n\n log.newline();\n log.ok('Templating complete!');\n\n return templateConfig;\n};\n"],
|
|
5
5
|
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,sBAAe;AAEf,QAAmB;AAEnB,kBAA6C;AAC7C,qBAAoB;AACpB,sBAGO;AACP,wBAA8B;AAC9B,uBAIO;AAEP,qBAA8B;AAQ9B,MAAM,qBAAqB,EAAE,OAAO;AAAA,EAClC,cAAc,EAAE,OAAO,EAAE,OAAO,GAAG,EAAE,OAAO,CAAC;AAC/C,CAAC;AAED,MAAM,2BAA2B,OAC/B,mBACoC;AACpC,QAAM,SAAS,UAAM,oCAAkB;AACvC,QAAM,OAAO,mBAAmB,MAAM,MAAM;AAE5C,iBAAe,OAAO,QAAQ,CAAC,UAAU;AACvC,UAAM,QAAQ,KAAK,aAAa,MAAM,IAAI;AAC1C,QAAI,UAAU,QAAW;AACvB,YAAM,IAAI,MAAM,kBAAkB,MAAM,IAAI,EAAE;AAAA,IAChD;AAEA,QAAI,MAAM,YAAY,CAAC,MAAM,SAAS,KAAK,GAAG;AAC5C,YAAM,IAAI,MAAM,4BAA4B,MAAM,IAAI,EAAE;AAAA,IAC1D;AAAA,EACF,CAAC;AAED,SAAO,KAAK;AACd;AAEO,MAAM,2BAA2B,OAAO;AAAA,EAC7C;AAAA,EACA;AAAA,EACA;AACF,MAAsC;AACpC,QAAM,qBAAiB,oCAAkB,eAAe;AAExD,MAAI,eAAe,OAAO,WAAW,GAAG;AACtC,WAAO;AAAA,EACT;AAEA,QAAM,mBAAe,iCAAc,SAAS,YAAY,OAAO,UAAU,IACrE,SAAS,YAAY,MAAM,WAC3B;AAEJ,qBAAI,QAAQ;AACZ,QAAM,eAAe,QAAQ,MAAM,QAC/B,UAAM,0BAAQ;AAAA,IACZ,SAAS,eAAe;AAAA,IACxB,SAAS,aAAAA,QAAM,KAAK,YAAY,aAAAA,QAAM,KAAK,YAAY,CAAC,GAAG;AAAA,IAC3D,MAAM;AAAA,EACR,CAAC,IACD,MAAM,yBAAyB,cAAc;AAEjD,QAAM,qBAAqB,UAAM,8BAAc,SAAS,WAAW;AACnE,QAAM,sBAAsB,YAAAC,QAAK,KAAK,iBAAiB,cAAc;AACrE,QAAM,gBAAAC,QAAG,SAAS,UAAU,qBAAqB,kBAAkB;AAEnE,YAAM,uBAAU;AAAA,IACd,YAAY;AAAA,IACZ;AAAA,IACA;AAAA,IACA,YAAY,KAAC,+BAAkB,YAAY,CAAC;AAAA,EAC9C,CAAC;AAED,YAAM,8CAA6B,eAAe;AAElD,qBAAI,QAAQ;AACZ,qBAAI,GAAG,sBAAsB;AAE7B,SAAO;AACT;",
|
|
6
6
|
"names": ["chalk", "path", "fs"]
|
|
7
7
|
}
|
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
import type {
|
|
1
|
+
import type { ReadResult } from 'read-pkg-up';
|
|
2
2
|
import type { ProjectType } from '../../utils/manifest.js';
|
|
3
3
|
import type { TemplateConfig } from '../../utils/template.js';
|
|
4
4
|
interface Props {
|
|
5
5
|
destinationRoot: string;
|
|
6
|
-
manifest:
|
|
6
|
+
manifest: ReadResult;
|
|
7
7
|
templateConfig: TemplateConfig;
|
|
8
8
|
type: ProjectType;
|
|
9
9
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/configure/getEntryPoint.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport { Input } from 'enquirer';\nimport type {
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport { Input } from 'enquirer';\nimport type { ReadResult } from 'read-pkg-up';\n\nimport { log } from '../../utils/logging.js';\nimport type { ProjectType } from '../../utils/manifest.js';\nimport type { TemplateConfig } from '../../utils/template.js';\nimport { hasStringProp } from '../../utils/validation.js';\n\nimport { tsFileExists } from './analysis/files.js';\n\ninterface Props {\n destinationRoot: string;\n manifest: ReadResult;\n templateConfig: TemplateConfig;\n type: ProjectType;\n}\nexport const getEntryPoint = ({\n destinationRoot,\n manifest,\n templateConfig,\n type,\n}: Props) => {\n if (hasStringProp(manifest.packageJson.skuba, 'entryPoint')) {\n return manifest.packageJson.skuba.entryPoint;\n }\n\n if (templateConfig.entryPoint !== undefined) {\n return templateConfig.entryPoint;\n }\n\n log.newline();\n const entryPointPrompt = new Input({\n initial: type === 'package' ? 'src/index.ts' : 'src/app.ts',\n message: 'Entry point:',\n name: 'entryPoint',\n result: (value) => (value.endsWith('.ts') ? value : `${value}.ts`),\n validate: async (value) => {\n // Support exported function targeting, e.g. `src/module.ts#callMeMaybe`\n const [modulePath] = value.split('#', 2);\n\n if (!modulePath) {\n return `${chalk.bold(value)} is an invalid module path`;\n }\n\n const exists = await tsFileExists(path.join(destinationRoot, modulePath));\n\n return exists || `${chalk.bold(value)} is not a TypeScript file.`;\n },\n });\n\n return entryPointPrompt.run();\n};\n"],
|
|
5
5
|
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,sBAAsB;AAGtB,qBAAoB;AAGpB,wBAA8B;AAE9B,mBAA6B;AAQtB,MAAM,gBAAgB,CAAC;AAAA,EAC5B;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAAa;AACX,UAAI,iCAAc,SAAS,YAAY,OAAO,YAAY,GAAG;AAC3D,WAAO,SAAS,YAAY,MAAM;AAAA,EACpC;AAEA,MAAI,eAAe,eAAe,QAAW;AAC3C,WAAO,eAAe;AAAA,EACxB;AAEA,qBAAI,QAAQ;AACZ,QAAM,mBAAmB,IAAI,sBAAM;AAAA,IACjC,SAAS,SAAS,YAAY,iBAAiB;AAAA,IAC/C,SAAS;AAAA,IACT,MAAM;AAAA,IACN,QAAQ,CAAC,UAAW,MAAM,SAAS,KAAK,IAAI,QAAQ,GAAG,KAAK;AAAA,IAC5D,UAAU,OAAO,UAAU;AAEzB,YAAM,CAAC,UAAU,IAAI,MAAM,MAAM,KAAK,CAAC;AAEvC,UAAI,CAAC,YAAY;AACf,eAAO,GAAG,aAAAA,QAAM,KAAK,KAAK,CAAC;AAAA,MAC7B;AAEA,YAAM,SAAS,UAAM,2BAAa,YAAAC,QAAK,KAAK,iBAAiB,UAAU,CAAC;AAExE,aAAO,UAAU,GAAG,aAAAD,QAAM,KAAK,KAAK,CAAC;AAAA,IACvC;AAAA,EACF,CAAC;AAED,SAAO,iBAAiB,IAAI;AAC9B;",
|
|
6
6
|
"names": ["chalk", "path"]
|
|
7
7
|
}
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import type {
|
|
1
|
+
import type { ReadResult } from 'read-pkg-up';
|
|
2
2
|
import { type ProjectType } from '../../utils/manifest.js';
|
|
3
3
|
import type { TemplateConfig } from '../../utils/template.js';
|
|
4
4
|
interface Props {
|
|
5
|
-
manifest:
|
|
5
|
+
manifest: ReadResult;
|
|
6
6
|
templateConfig: TemplateConfig;
|
|
7
7
|
}
|
|
8
8
|
export declare const getProjectType: ({ manifest, templateConfig, }: Props) => Promise<ProjectType>;
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/configure/getProjectType.ts"],
|
|
4
|
-
"sourcesContent": ["import { Select } from 'enquirer';\nimport type {
|
|
4
|
+
"sourcesContent": ["import { Select } from 'enquirer';\nimport type { ReadResult } from 'read-pkg-up';\n\nimport { log } from '../../utils/logging.js';\nimport {\n PROJECT_TYPES,\n type ProjectType,\n projectTypeSchema,\n} from '../../utils/manifest.js';\nimport type { TemplateConfig } from '../../utils/template.js';\nimport { hasProp } from '../../utils/validation.js';\n\ninterface Props {\n manifest: ReadResult;\n templateConfig: TemplateConfig;\n}\n\nexport const getProjectType = async ({\n manifest,\n templateConfig,\n}: Props): Promise<ProjectType> => {\n const projectType = projectTypeSchema.safeParse(\n hasProp(manifest.packageJson.skuba, 'type')\n ? manifest.packageJson.skuba.type\n : null,\n );\n\n if (projectType.success) {\n return projectType.data;\n }\n\n if (templateConfig.type !== undefined) {\n return templateConfig.type;\n }\n\n const initial: ProjectType =\n manifest.packageJson.devDependencies?.['@seek/seek-module-toolkit'] ||\n manifest.packageJson.files\n ? 'package'\n : 'application';\n\n log.newline();\n const projectTypePrompt = new Select({\n choices: PROJECT_TYPES,\n message: 'Project type:',\n name: 'projectType',\n initial,\n });\n\n return projectTypePrompt.run();\n};\n"],
|
|
5
5
|
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAAuB;AAGvB,qBAAoB;AACpB,sBAIO;AAEP,wBAAwB;AAOjB,MAAM,iBAAiB,OAAO;AAAA,EACnC;AAAA,EACA;AACF,MAAmC;AACjC,QAAM,cAAc,kCAAkB;AAAA,QACpC,2BAAQ,SAAS,YAAY,OAAO,MAAM,IACtC,SAAS,YAAY,MAAM,OAC3B;AAAA,EACN;AAEA,MAAI,YAAY,SAAS;AACvB,WAAO,YAAY;AAAA,EACrB;AAEA,MAAI,eAAe,SAAS,QAAW;AACrC,WAAO,eAAe;AAAA,EACxB;AAEA,QAAM,UACJ,SAAS,YAAY,kBAAkB,2BAA2B,KAClE,SAAS,YAAY,QACjB,YACA;AAEN,qBAAI,QAAQ;AACZ,QAAM,oBAAoB,IAAI,uBAAO;AAAA,IACnC,SAAS;AAAA,IACT,SAAS;AAAA,IACT,MAAM;AAAA,IACN;AAAA,EACF,CAAC;AAED,SAAO,kBAAkB,IAAI;AAC/B;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -37,8 +37,14 @@ module.exports = __toCommonJS(package_exports);
|
|
|
37
37
|
var import_normalize_package_data = __toESM(require("normalize-package-data"));
|
|
38
38
|
var import_json = require("./json.js");
|
|
39
39
|
var import_prettier = require("./prettier.js");
|
|
40
|
+
const normalizeDataWithoutThrowing = (rawData) => {
|
|
41
|
+
try {
|
|
42
|
+
(0, import_normalize_package_data.default)(rawData);
|
|
43
|
+
} catch {
|
|
44
|
+
}
|
|
45
|
+
};
|
|
40
46
|
const formatPackage = async (rawData) => {
|
|
41
|
-
(
|
|
47
|
+
normalizeDataWithoutThrowing(rawData);
|
|
42
48
|
delete rawData._id;
|
|
43
49
|
if (rawData.name === "") {
|
|
44
50
|
delete rawData.name;
|
|
@@ -58,7 +64,7 @@ const parsePackage = (input) => {
|
|
|
58
64
|
if (data === void 0) {
|
|
59
65
|
return;
|
|
60
66
|
}
|
|
61
|
-
(
|
|
67
|
+
normalizeDataWithoutThrowing(data);
|
|
62
68
|
return data;
|
|
63
69
|
};
|
|
64
70
|
const createDependencyFilter = (names, type) => {
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../src/cli/configure/processing/package.ts"],
|
|
4
|
-
"sourcesContent": ["import normalizeData from 'normalize-package-data';\n\nimport type { PackageJson } from '../types.js';\n\nimport { parseObject } from './json.js';\nimport { formatPrettier } from './prettier.js';\n\nexport const formatPackage = async (rawData: PackageJson) => {\n
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAA0B;AAI1B,kBAA4B;AAC5B,sBAA+B;
|
|
4
|
+
"sourcesContent": ["import normalizeData from 'normalize-package-data';\n\nimport type { PackageJson } from '../types.js';\n\nimport { parseObject } from './json.js';\nimport { formatPrettier } from './prettier.js';\n\nconst normalizeDataWithoutThrowing = (rawData: PackageJson) => {\n try {\n normalizeData(rawData);\n } catch {\n // `normalize-package-data` can be picky about e.g. the `name` being valid.\n // This creates issues for partially-init-ed projects.\n }\n};\n\nexport const formatPackage = async (rawData: PackageJson) => {\n normalizeDataWithoutThrowing(rawData);\n\n // normalize-package-data fields that aren't useful for applications\n\n delete rawData._id;\n\n if (rawData.name === '') {\n delete rawData.name;\n }\n\n if (rawData.readme === 'ERROR: No README data found!') {\n delete rawData.readme;\n }\n\n if (rawData.version === '') {\n delete rawData.version;\n }\n\n return formatPrettier(JSON.stringify(rawData), {\n filepath: 'package.json',\n });\n};\n\nexport const parsePackage = (\n input: string | undefined,\n): PackageJson | undefined => {\n const data = parseObject(input);\n\n if (data === undefined) {\n return;\n }\n\n normalizeDataWithoutThrowing(data);\n\n return data;\n};\n\nexport const createDependencyFilter = (\n names: readonly string[],\n type: 'dependencies' | 'devDependencies',\n) => {\n const set = new Set(names);\n\n return (data: PackageJson) => ({\n ...data,\n [type]: Object.fromEntries(\n Object.entries(data[type] ?? {}).filter(([name]) => !set.has(name)),\n ),\n });\n};\n\nexport const withPackage =\n (fn: (data: PackageJson) => PackageJson) => (input: string | undefined) => {\n const inputObject = parsePackage(input);\n\n const outputObject = fn(inputObject ?? {});\n\n return formatPackage(outputObject);\n };\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAA0B;AAI1B,kBAA4B;AAC5B,sBAA+B;AAE/B,MAAM,+BAA+B,CAAC,YAAyB;AAC7D,MAAI;AACF,sCAAAA,SAAc,OAAO;AAAA,EACvB,QAAQ;AAAA,EAGR;AACF;AAEO,MAAM,gBAAgB,OAAO,YAAyB;AAC3D,+BAA6B,OAAO;AAIpC,SAAO,QAAQ;AAEf,MAAI,QAAQ,SAAS,IAAI;AACvB,WAAO,QAAQ;AAAA,EACjB;AAEA,MAAI,QAAQ,WAAW,gCAAgC;AACrD,WAAO,QAAQ;AAAA,EACjB;AAEA,MAAI,QAAQ,YAAY,IAAI;AAC1B,WAAO,QAAQ;AAAA,EACjB;AAEA,aAAO,gCAAe,KAAK,UAAU,OAAO,GAAG;AAAA,IAC7C,UAAU;AAAA,EACZ,CAAC;AACH;AAEO,MAAM,eAAe,CAC1B,UAC4B;AAC5B,QAAM,WAAO,yBAAY,KAAK;AAE9B,MAAI,SAAS,QAAW;AACtB;AAAA,EACF;AAEA,+BAA6B,IAAI;AAEjC,SAAO;AACT;AAEO,MAAM,yBAAyB,CACpC,OACA,SACG;AACH,QAAM,MAAM,IAAI,IAAI,KAAK;AAEzB,SAAO,CAAC,UAAuB;AAAA,IAC7B,GAAG;AAAA,IACH,CAAC,IAAI,GAAG,OAAO;AAAA,MACb,OAAO,QAAQ,KAAK,IAAI,KAAK,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI,MAAM,CAAC,IAAI,IAAI,IAAI,CAAC;AAAA,IACpE;AAAA,EACF;AACF;AAEO,MAAM,cACX,CAAC,OAA2C,CAAC,UAA8B;AACzE,QAAM,cAAc,aAAa,KAAK;AAEtC,QAAM,eAAe,GAAG,eAAe,CAAC,CAAC;AAEzC,SAAO,cAAc,YAAY;AACnC;",
|
|
6
6
|
"names": ["normalizeData"]
|
|
7
7
|
}
|
|
@@ -65,7 +65,7 @@ const runForm = (props) => {
|
|
|
65
65
|
name,
|
|
66
66
|
validate: async (values) => {
|
|
67
67
|
const results = await Promise.all(
|
|
68
|
-
choices.map((choice) => choice.validate(values[choice.name]))
|
|
68
|
+
choices.map(async (choice) => choice.validate(values[choice.name]))
|
|
69
69
|
);
|
|
70
70
|
return results.find((result) => typeof result === "string") ?? results.every((result) => result === true);
|
|
71
71
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/init/getConfig.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport { Form, type FormChoice } from 'enquirer';\nimport fs from 'fs-extra';\n\nimport { copyFiles } from '../../utils/copy.js';\nimport { isErrorWithCode } from '../../utils/error.js';\nimport { log } from '../../utils/logging.js';\nimport {\n DEFAULT_PACKAGE_MANAGER,\n configForPackageManager,\n} from '../../utils/packageManager.js';\nimport { getRandomPort } from '../../utils/port.js';\nimport {\n TEMPLATE_CONFIG_FILENAME,\n TEMPLATE_DIR,\n type TemplateConfig,\n templateConfigSchema,\n} from '../../utils/template.js';\n\nimport { downloadGitHubTemplate } from './git.js';\nimport {\n BASE_PROMPT_PROPS,\n type BaseFields,\n type Choice,\n GIT_PATH_PROMPT,\n SHOULD_CONTINUE_PROMPT,\n TEMPLATE_PROMPT,\n} from './prompts.js';\nimport { type InitConfig, initConfigInputSchema } from './types.js';\n\nexport const runForm = <T = Record<string, string>>(props: {\n choices: readonly Choice[];\n message: string;\n name: string;\n}) => {\n const { message, name } = props;\n\n const choices = props.choices.map((choice) => ({\n ...choice,\n validate: (value: string | undefined) => {\n if (\n !value ||\n value === '' ||\n (value === choice.initial && !choice.allowInitial)\n ) {\n return 'Form is not complete';\n }\n\n return choice.validate?.(value) ?? true;\n },\n }));\n\n const form = new Form<T>({\n choices,\n message,\n name,\n validate: async (values) => {\n const results = await Promise.all(\n choices.map((choice) => choice.validate(values[choice.name])),\n );\n\n return (\n results.find((result) => typeof result === 'string') ??\n results.every((result) => result === true)\n );\n },\n });\n\n return form.run();\n};\n\nconst confirmShouldContinue = async (choices: readonly FormChoice[]) => {\n const fieldsList = choices.map((choice) => choice.message);\n\n log.newline();\n log.plain('This template uses the following information:');\n log.newline();\n fieldsList.forEach((message) => log.subtle(`- ${message}`));\n\n log.newline();\n const result = await SHOULD_CONTINUE_PROMPT.run();\n\n return result === 'yes';\n};\n\nconst createDirectory = async (dir: string) => {\n try {\n await fs.promises.mkdir(dir);\n } catch (err) {\n if (isErrorWithCode(err, 'EEXIST')) {\n log.err(`The directory '${dir}' already exists.`);\n process.exit(1);\n }\n\n throw err;\n }\n};\n\nconst cloneTemplate = async (\n templateName: string,\n destinationDir: string,\n): Promise<TemplateConfig> => {\n const isCustomTemplate = templateName.startsWith('github:');\n\n if (isCustomTemplate) {\n const gitHubPath = templateName.slice('github:'.length);\n\n await downloadGitHubTemplate(gitHubPath, destinationDir);\n } else {\n const templateDir = path.join(TEMPLATE_DIR, templateName);\n\n await copyFiles({\n // assume built-in templates have no extraneous files\n include: () => true,\n sourceRoot: templateDir,\n destinationRoot: destinationDir,\n processors: [],\n // built-in templates have files like _package.json\n stripUnderscorePrefix: true,\n });\n }\n\n const templateConfig = getTemplateConfig(\n path.join(process.cwd(), destinationDir),\n );\n\n if (isCustomTemplate) {\n log.newline();\n log.warn(\n 'You may need to run',\n log.bold(\n configForPackageManager(templateConfig.packageManager).print.exec,\n 'skuba',\n 'configure',\n ),\n 'once this is done.',\n );\n }\n\n return templateConfig;\n};\n\nconst getTemplateName = async () => {\n const templateSelection = await TEMPLATE_PROMPT.run();\n\n if (templateSelection === 'github \u2192') {\n const gitHubPath = await GIT_PATH_PROMPT.run();\n return `github:${gitHubPath}`;\n }\n\n return templateSelection;\n};\n\nconst generatePlaceholders = (choices: FormChoice[]) =>\n Object.fromEntries(\n choices.map(({ name }) => [name, `<%- ${name} %>`] as const),\n );\n\nexport const getTemplateConfig = (dir: string): TemplateConfig => {\n const templateConfigPath = path.join(dir, TEMPLATE_CONFIG_FILENAME);\n\n try {\n // eslint-disable-next-line @typescript-eslint/no-require-imports\n const templateConfig = require(templateConfigPath) as unknown;\n\n return templateConfigSchema.parse(templateConfig);\n } catch (err) {\n if (isErrorWithCode(err, 'MODULE_NOT_FOUND')) {\n return {\n entryPoint: undefined,\n fields: [],\n packageManager: DEFAULT_PACKAGE_MANAGER,\n type: undefined,\n };\n }\n\n throw err;\n }\n};\n\nconst baseToTemplateData = async ({\n ownerName,\n platformName,\n repoName,\n defaultBranch,\n}: BaseFields) => {\n const [orgName, teamName] = ownerName.split('/');\n\n const port = String(await getRandomPort());\n\n if (!orgName) {\n throw new Error(`Invalid format for owner name: ${ownerName}`);\n }\n\n return {\n orgName,\n ownerName,\n repoName,\n defaultBranch,\n // Use standalone username in `teamName` contexts\n teamName: teamName ?? orgName,\n\n port,\n\n platformName,\n lambdaCdkArchitecture: platformName === 'amd64' ? 'X86_64' : 'ARM_64',\n lambdaServerlessArchitecture:\n platformName === 'amd64' ? 'x86_64' : platformName,\n };\n};\n\nexport const configureFromPrompt = async (): Promise<InitConfig> => {\n const { ownerName, platformName, repoName, defaultBranch } =\n await runForm<BaseFields>(BASE_PROMPT_PROPS);\n log.plain(chalk.cyan(repoName), 'by', chalk.cyan(ownerName));\n\n const templateData = await baseToTemplateData({\n ownerName,\n platformName,\n repoName,\n defaultBranch,\n });\n\n const destinationDir = repoName;\n\n await createDirectory(destinationDir);\n\n log.newline();\n const templateName = await getTemplateName();\n\n const { entryPoint, fields, noSkip, packageManager, type } =\n await cloneTemplate(templateName, destinationDir);\n\n if (fields.length === 0) {\n return {\n destinationDir,\n entryPoint,\n packageManager,\n templateComplete: true,\n templateData,\n templateName,\n type,\n };\n }\n\n const shouldContinue = noSkip ? true : await confirmShouldContinue(fields);\n\n if (shouldContinue) {\n log.newline();\n const customAnswers = await runForm({\n choices: fields,\n message: chalk.bold(`Complete ${chalk.cyan(templateName)}:`),\n name: 'customAnswers',\n });\n\n return {\n destinationDir,\n entryPoint,\n packageManager,\n templateComplete: true,\n templateData: { ...templateData, ...customAnswers },\n templateName,\n type,\n };\n }\n\n log.newline();\n log.warn(\n `Resume this later with ${chalk.bold(\n configForPackageManager(packageManager).print.exec,\n 'skuba configure',\n )}.`,\n );\n\n const customAnswers = generatePlaceholders(fields);\n\n return {\n destinationDir,\n entryPoint,\n packageManager,\n templateComplete: false,\n templateData: { ...templateData, ...customAnswers },\n templateName,\n type,\n };\n};\n\nexport const readJSONFromStdIn = async () => {\n let text = '';\n\n await new Promise((resolve) =>\n process.stdin\n .on('data', (chunk) => (text += chunk.toString()))\n .once('end', resolve),\n );\n\n text = text.trim();\n\n if (text === '') {\n log.err('No data from stdin.');\n process.exit(1);\n }\n\n let value: unknown;\n\n try {\n value = JSON.parse(text) as unknown;\n } catch {\n log.err('Invalid JSON from stdin.');\n process.exit(1);\n }\n\n return value;\n};\n\nconst configureFromPipe = async (): Promise<InitConfig> => {\n const value = await readJSONFromStdIn();\n\n const result = initConfigInputSchema.safeParse(value);\n\n if (!result.success) {\n log.err('Invalid data from stdin:');\n log.err(result.error);\n process.exit(1);\n }\n\n const { destinationDir, templateComplete, templateName } = result.data;\n\n const templateData = {\n ...(await baseToTemplateData(result.data.templateData)),\n ...result.data.templateData,\n };\n\n await createDirectory(destinationDir);\n\n const { entryPoint, fields, noSkip, packageManager, type } =\n await cloneTemplate(templateName, destinationDir);\n\n if (!templateComplete) {\n if (noSkip) {\n log.err('Templating for', log.bold(templateName), 'cannot be skipped.');\n process.exit(1);\n }\n\n return {\n ...result.data,\n entryPoint,\n packageManager,\n templateData: {\n ...templateData,\n ...generatePlaceholders(fields),\n },\n type,\n };\n }\n\n const required = fields.map(({ name }) => name);\n\n const provided = new Set(Object.keys(templateData));\n\n const missing = required.filter((name) => !provided.has(name));\n\n if (missing.length > 0) {\n log.err('This template uses the following information:');\n log.newline();\n missing.forEach((name) => log.err(`- ${name}`));\n process.exit(1);\n }\n\n return {\n ...result.data,\n entryPoint,\n packageManager,\n templateData,\n type,\n };\n};\n\nexport const getConfig = () =>\n process.stdin.isTTY ? configureFromPrompt() : configureFromPipe();\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,sBAAsC;AACtC,sBAAe;AAEf,kBAA0B;AAC1B,mBAAgC;AAChC,qBAAoB;AACpB,4BAGO;AACP,kBAA8B;AAC9B,sBAKO;AAEP,iBAAuC;AACvC,qBAOO;AACP,mBAAuD;AAEhD,MAAM,UAAU,CAA6B,UAI9C;AACJ,QAAM,EAAE,SAAS,KAAK,IAAI;AAE1B,QAAM,UAAU,MAAM,QAAQ,IAAI,CAAC,YAAY;AAAA,IAC7C,GAAG;AAAA,IACH,UAAU,CAAC,UAA8B;AACvC,UACE,CAAC,SACD,UAAU,MACT,UAAU,OAAO,WAAW,CAAC,OAAO,cACrC;AACA,eAAO;AAAA,MACT;AAEA,aAAO,OAAO,WAAW,KAAK,KAAK;AAAA,IACrC;AAAA,EACF,EAAE;AAEF,QAAM,OAAO,IAAI,qBAAQ;AAAA,IACvB;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,OAAO,WAAW;AAC1B,YAAM,UAAU,MAAM,QAAQ;AAAA,QAC5B,QAAQ,IAAI,
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport { Form, type FormChoice } from 'enquirer';\nimport fs from 'fs-extra';\n\nimport { copyFiles } from '../../utils/copy.js';\nimport { isErrorWithCode } from '../../utils/error.js';\nimport { log } from '../../utils/logging.js';\nimport {\n DEFAULT_PACKAGE_MANAGER,\n configForPackageManager,\n} from '../../utils/packageManager.js';\nimport { getRandomPort } from '../../utils/port.js';\nimport {\n TEMPLATE_CONFIG_FILENAME,\n TEMPLATE_DIR,\n type TemplateConfig,\n templateConfigSchema,\n} from '../../utils/template.js';\n\nimport { downloadGitHubTemplate } from './git.js';\nimport {\n BASE_PROMPT_PROPS,\n type BaseFields,\n type Choice,\n GIT_PATH_PROMPT,\n SHOULD_CONTINUE_PROMPT,\n TEMPLATE_PROMPT,\n} from './prompts.js';\nimport { type InitConfig, initConfigInputSchema } from './types.js';\n\nexport const runForm = <T = Record<string, string>>(props: {\n choices: readonly Choice[];\n message: string;\n name: string;\n}) => {\n const { message, name } = props;\n\n const choices = props.choices.map((choice) => ({\n ...choice,\n validate: (value: string | undefined) => {\n if (\n !value ||\n value === '' ||\n (value === choice.initial && !choice.allowInitial)\n ) {\n return 'Form is not complete';\n }\n\n return choice.validate?.(value) ?? true;\n },\n }));\n\n const form = new Form<T>({\n choices,\n message,\n name,\n validate: async (values) => {\n const results = await Promise.all(\n choices.map(async (choice) => choice.validate(values[choice.name])),\n );\n\n return (\n results.find((result) => typeof result === 'string') ??\n results.every((result) => result === true)\n );\n },\n });\n\n return form.run();\n};\n\nconst confirmShouldContinue = async (choices: readonly FormChoice[]) => {\n const fieldsList = choices.map((choice) => choice.message);\n\n log.newline();\n log.plain('This template uses the following information:');\n log.newline();\n fieldsList.forEach((message) => log.subtle(`- ${message}`));\n\n log.newline();\n const result = await SHOULD_CONTINUE_PROMPT.run();\n\n return result === 'yes';\n};\n\nconst createDirectory = async (dir: string) => {\n try {\n await fs.promises.mkdir(dir);\n } catch (err) {\n if (isErrorWithCode(err, 'EEXIST')) {\n log.err(`The directory '${dir}' already exists.`);\n process.exit(1);\n }\n\n throw err;\n }\n};\n\nconst cloneTemplate = async (\n templateName: string,\n destinationDir: string,\n): Promise<TemplateConfig> => {\n const isCustomTemplate = templateName.startsWith('github:');\n\n if (isCustomTemplate) {\n const gitHubPath = templateName.slice('github:'.length);\n\n await downloadGitHubTemplate(gitHubPath, destinationDir);\n } else {\n const templateDir = path.join(TEMPLATE_DIR, templateName);\n\n await copyFiles({\n // assume built-in templates have no extraneous files\n include: () => true,\n sourceRoot: templateDir,\n destinationRoot: destinationDir,\n processors: [],\n // built-in templates have files like _package.json\n stripUnderscorePrefix: true,\n });\n }\n\n const templateConfig = getTemplateConfig(\n path.join(process.cwd(), destinationDir),\n );\n\n if (isCustomTemplate) {\n log.newline();\n log.warn(\n 'You may need to run',\n log.bold(\n configForPackageManager(templateConfig.packageManager).print.exec,\n 'skuba',\n 'configure',\n ),\n 'once this is done.',\n );\n }\n\n return templateConfig;\n};\n\nconst getTemplateName = async () => {\n const templateSelection = await TEMPLATE_PROMPT.run();\n\n if (templateSelection === 'github \u2192') {\n const gitHubPath = await GIT_PATH_PROMPT.run();\n return `github:${gitHubPath}`;\n }\n\n return templateSelection;\n};\n\nconst generatePlaceholders = (choices: FormChoice[]) =>\n Object.fromEntries(\n choices.map(({ name }) => [name, `<%- ${name} %>`] as const),\n );\n\nexport const getTemplateConfig = (dir: string): TemplateConfig => {\n const templateConfigPath = path.join(dir, TEMPLATE_CONFIG_FILENAME);\n\n try {\n // eslint-disable-next-line @typescript-eslint/no-require-imports\n const templateConfig = require(templateConfigPath) as unknown;\n\n return templateConfigSchema.parse(templateConfig);\n } catch (err) {\n if (isErrorWithCode(err, 'MODULE_NOT_FOUND')) {\n return {\n entryPoint: undefined,\n fields: [],\n packageManager: DEFAULT_PACKAGE_MANAGER,\n type: undefined,\n };\n }\n\n throw err;\n }\n};\n\nconst baseToTemplateData = async ({\n ownerName,\n platformName,\n repoName,\n defaultBranch,\n}: BaseFields) => {\n const [orgName, teamName] = ownerName.split('/');\n\n const port = String(await getRandomPort());\n\n if (!orgName) {\n throw new Error(`Invalid format for owner name: ${ownerName}`);\n }\n\n return {\n orgName,\n ownerName,\n repoName,\n defaultBranch,\n // Use standalone username in `teamName` contexts\n teamName: teamName ?? orgName,\n\n port,\n\n platformName,\n lambdaCdkArchitecture: platformName === 'amd64' ? 'X86_64' : 'ARM_64',\n lambdaServerlessArchitecture:\n platformName === 'amd64' ? 'x86_64' : platformName,\n };\n};\n\nexport const configureFromPrompt = async (): Promise<InitConfig> => {\n const { ownerName, platformName, repoName, defaultBranch } =\n await runForm<BaseFields>(BASE_PROMPT_PROPS);\n log.plain(chalk.cyan(repoName), 'by', chalk.cyan(ownerName));\n\n const templateData = await baseToTemplateData({\n ownerName,\n platformName,\n repoName,\n defaultBranch,\n });\n\n const destinationDir = repoName;\n\n await createDirectory(destinationDir);\n\n log.newline();\n const templateName = await getTemplateName();\n\n const { entryPoint, fields, noSkip, packageManager, type } =\n await cloneTemplate(templateName, destinationDir);\n\n if (fields.length === 0) {\n return {\n destinationDir,\n entryPoint,\n packageManager,\n templateComplete: true,\n templateData,\n templateName,\n type,\n };\n }\n\n const shouldContinue = noSkip ? true : await confirmShouldContinue(fields);\n\n if (shouldContinue) {\n log.newline();\n const customAnswers = await runForm({\n choices: fields,\n message: chalk.bold(`Complete ${chalk.cyan(templateName)}:`),\n name: 'customAnswers',\n });\n\n return {\n destinationDir,\n entryPoint,\n packageManager,\n templateComplete: true,\n templateData: { ...templateData, ...customAnswers },\n templateName,\n type,\n };\n }\n\n log.newline();\n log.warn(\n `Resume this later with ${chalk.bold(\n configForPackageManager(packageManager).print.exec,\n 'skuba configure',\n )}.`,\n );\n\n const customAnswers = generatePlaceholders(fields);\n\n return {\n destinationDir,\n entryPoint,\n packageManager,\n templateComplete: false,\n templateData: { ...templateData, ...customAnswers },\n templateName,\n type,\n };\n};\n\nexport const readJSONFromStdIn = async () => {\n let text = '';\n\n await new Promise((resolve) =>\n process.stdin\n .on('data', (chunk) => (text += chunk.toString()))\n .once('end', resolve),\n );\n\n text = text.trim();\n\n if (text === '') {\n log.err('No data from stdin.');\n process.exit(1);\n }\n\n let value: unknown;\n\n try {\n value = JSON.parse(text) as unknown;\n } catch {\n log.err('Invalid JSON from stdin.');\n process.exit(1);\n }\n\n return value;\n};\n\nconst configureFromPipe = async (): Promise<InitConfig> => {\n const value = await readJSONFromStdIn();\n\n const result = initConfigInputSchema.safeParse(value);\n\n if (!result.success) {\n log.err('Invalid data from stdin:');\n log.err(result.error);\n process.exit(1);\n }\n\n const { destinationDir, templateComplete, templateName } = result.data;\n\n const templateData = {\n ...(await baseToTemplateData(result.data.templateData)),\n ...result.data.templateData,\n };\n\n await createDirectory(destinationDir);\n\n const { entryPoint, fields, noSkip, packageManager, type } =\n await cloneTemplate(templateName, destinationDir);\n\n if (!templateComplete) {\n if (noSkip) {\n log.err('Templating for', log.bold(templateName), 'cannot be skipped.');\n process.exit(1);\n }\n\n return {\n ...result.data,\n entryPoint,\n packageManager,\n templateData: {\n ...templateData,\n ...generatePlaceholders(fields),\n },\n type,\n };\n }\n\n const required = fields.map(({ name }) => name);\n\n const provided = new Set(Object.keys(templateData));\n\n const missing = required.filter((name) => !provided.has(name));\n\n if (missing.length > 0) {\n log.err('This template uses the following information:');\n log.newline();\n missing.forEach((name) => log.err(`- ${name}`));\n process.exit(1);\n }\n\n return {\n ...result.data,\n entryPoint,\n packageManager,\n templateData,\n type,\n };\n};\n\nexport const getConfig = () =>\n process.stdin.isTTY ? configureFromPrompt() : configureFromPipe();\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,sBAAsC;AACtC,sBAAe;AAEf,kBAA0B;AAC1B,mBAAgC;AAChC,qBAAoB;AACpB,4BAGO;AACP,kBAA8B;AAC9B,sBAKO;AAEP,iBAAuC;AACvC,qBAOO;AACP,mBAAuD;AAEhD,MAAM,UAAU,CAA6B,UAI9C;AACJ,QAAM,EAAE,SAAS,KAAK,IAAI;AAE1B,QAAM,UAAU,MAAM,QAAQ,IAAI,CAAC,YAAY;AAAA,IAC7C,GAAG;AAAA,IACH,UAAU,CAAC,UAA8B;AACvC,UACE,CAAC,SACD,UAAU,MACT,UAAU,OAAO,WAAW,CAAC,OAAO,cACrC;AACA,eAAO;AAAA,MACT;AAEA,aAAO,OAAO,WAAW,KAAK,KAAK;AAAA,IACrC;AAAA,EACF,EAAE;AAEF,QAAM,OAAO,IAAI,qBAAQ;AAAA,IACvB;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,OAAO,WAAW;AAC1B,YAAM,UAAU,MAAM,QAAQ;AAAA,QAC5B,QAAQ,IAAI,OAAO,WAAW,OAAO,SAAS,OAAO,OAAO,IAAI,CAAC,CAAC;AAAA,MACpE;AAEA,aACE,QAAQ,KAAK,CAAC,WAAW,OAAO,WAAW,QAAQ,KACnD,QAAQ,MAAM,CAAC,WAAW,WAAW,IAAI;AAAA,IAE7C;AAAA,EACF,CAAC;AAED,SAAO,KAAK,IAAI;AAClB;AAEA,MAAM,wBAAwB,OAAO,YAAmC;AACtE,QAAM,aAAa,QAAQ,IAAI,CAAC,WAAW,OAAO,OAAO;AAEzD,qBAAI,QAAQ;AACZ,qBAAI,MAAM,+CAA+C;AACzD,qBAAI,QAAQ;AACZ,aAAW,QAAQ,CAAC,YAAY,mBAAI,OAAO,KAAK,OAAO,EAAE,CAAC;AAE1D,qBAAI,QAAQ;AACZ,QAAM,SAAS,MAAM,sCAAuB,IAAI;AAEhD,SAAO,WAAW;AACpB;AAEA,MAAM,kBAAkB,OAAO,QAAgB;AAC7C,MAAI;AACF,UAAM,gBAAAA,QAAG,SAAS,MAAM,GAAG;AAAA,EAC7B,SAAS,KAAK;AACZ,YAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC,yBAAI,IAAI,kBAAkB,GAAG,mBAAmB;AAChD,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,UAAM;AAAA,EACR;AACF;AAEA,MAAM,gBAAgB,OACpB,cACA,mBAC4B;AAC5B,QAAM,mBAAmB,aAAa,WAAW,SAAS;AAE1D,MAAI,kBAAkB;AACpB,UAAM,aAAa,aAAa,MAAM,UAAU,MAAM;AAEtD,cAAM,mCAAuB,YAAY,cAAc;AAAA,EACzD,OAAO;AACL,UAAM,cAAc,YAAAC,QAAK,KAAK,8BAAc,YAAY;AAExD,cAAM,uBAAU;AAAA;AAAA,MAEd,SAAS,MAAM;AAAA,MACf,YAAY;AAAA,MACZ,iBAAiB;AAAA,MACjB,YAAY,CAAC;AAAA;AAAA,MAEb,uBAAuB;AAAA,IACzB,CAAC;AAAA,EACH;AAEA,QAAM,iBAAiB;AAAA,IACrB,YAAAA,QAAK,KAAK,QAAQ,IAAI,GAAG,cAAc;AAAA,EACzC;AAEA,MAAI,kBAAkB;AACpB,uBAAI,QAAQ;AACZ,uBAAI;AAAA,MACF;AAAA,MACA,mBAAI;AAAA,YACF,+CAAwB,eAAe,cAAc,EAAE,MAAM;AAAA,QAC7D;AAAA,QACA;AAAA,MACF;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAEA,MAAM,kBAAkB,YAAY;AAClC,QAAM,oBAAoB,MAAM,+BAAgB,IAAI;AAEpD,MAAI,sBAAsB,iBAAY;AACpC,UAAM,aAAa,MAAM,+BAAgB,IAAI;AAC7C,WAAO,UAAU,UAAU;AAAA,EAC7B;AAEA,SAAO;AACT;AAEA,MAAM,uBAAuB,CAAC,YAC5B,OAAO;AAAA,EACL,QAAQ,IAAI,CAAC,EAAE,KAAK,MAAM,CAAC,MAAM,OAAO,IAAI,KAAK,CAAU;AAC7D;AAEK,MAAM,oBAAoB,CAAC,QAAgC;AAChE,QAAM,qBAAqB,YAAAA,QAAK,KAAK,KAAK,wCAAwB;AAElE,MAAI;AAEF,UAAM,iBAAiB,QAAQ,kBAAkB;AAEjD,WAAO,qCAAqB,MAAM,cAAc;AAAA,EAClD,SAAS,KAAK;AACZ,YAAI,8BAAgB,KAAK,kBAAkB,GAAG;AAC5C,aAAO;AAAA,QACL,YAAY;AAAA,QACZ,QAAQ,CAAC;AAAA,QACT,gBAAgB;AAAA,QAChB,MAAM;AAAA,MACR;AAAA,IACF;AAEA,UAAM;AAAA,EACR;AACF;AAEA,MAAM,qBAAqB,OAAO;AAAA,EAChC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAAkB;AAChB,QAAM,CAAC,SAAS,QAAQ,IAAI,UAAU,MAAM,GAAG;AAE/C,QAAM,OAAO,OAAO,UAAM,2BAAc,CAAC;AAEzC,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,MAAM,kCAAkC,SAAS,EAAE;AAAA,EAC/D;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA;AAAA,IAEA,UAAU,YAAY;AAAA,IAEtB;AAAA,IAEA;AAAA,IACA,uBAAuB,iBAAiB,UAAU,WAAW;AAAA,IAC7D,8BACE,iBAAiB,UAAU,WAAW;AAAA,EAC1C;AACF;AAEO,MAAM,sBAAsB,YAAiC;AAClE,QAAM,EAAE,WAAW,cAAc,UAAU,cAAc,IACvD,MAAM,QAAoB,gCAAiB;AAC7C,qBAAI,MAAM,aAAAC,QAAM,KAAK,QAAQ,GAAG,MAAM,aAAAA,QAAM,KAAK,SAAS,CAAC;AAE3D,QAAM,eAAe,MAAM,mBAAmB;AAAA,IAC5C;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,iBAAiB;AAEvB,QAAM,gBAAgB,cAAc;AAEpC,qBAAI,QAAQ;AACZ,QAAM,eAAe,MAAM,gBAAgB;AAE3C,QAAM,EAAE,YAAY,QAAQ,QAAQ,gBAAgB,KAAK,IACvD,MAAM,cAAc,cAAc,cAAc;AAElD,MAAI,OAAO,WAAW,GAAG;AACvB,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA,kBAAkB;AAAA,MAClB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,iBAAiB,SAAS,OAAO,MAAM,sBAAsB,MAAM;AAEzE,MAAI,gBAAgB;AAClB,uBAAI,QAAQ;AACZ,UAAMC,iBAAgB,MAAM,QAAQ;AAAA,MAClC,SAAS;AAAA,MACT,SAAS,aAAAD,QAAM,KAAK,YAAY,aAAAA,QAAM,KAAK,YAAY,CAAC,GAAG;AAAA,MAC3D,MAAM;AAAA,IACR,CAAC;AAED,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA,kBAAkB;AAAA,MAClB,cAAc,EAAE,GAAG,cAAc,GAAGC,eAAc;AAAA,MAClD;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,qBAAI,QAAQ;AACZ,qBAAI;AAAA,IACF,0BAA0B,aAAAD,QAAM;AAAA,UAC9B,+CAAwB,cAAc,EAAE,MAAM;AAAA,MAC9C;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,gBAAgB,qBAAqB,MAAM;AAEjD,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,kBAAkB;AAAA,IAClB,cAAc,EAAE,GAAG,cAAc,GAAG,cAAc;AAAA,IAClD;AAAA,IACA;AAAA,EACF;AACF;AAEO,MAAM,oBAAoB,YAAY;AAC3C,MAAI,OAAO;AAEX,QAAM,IAAI;AAAA,IAAQ,CAAC,YACjB,QAAQ,MACL,GAAG,QAAQ,CAAC,UAAW,QAAQ,MAAM,SAAS,CAAE,EAChD,KAAK,OAAO,OAAO;AAAA,EACxB;AAEA,SAAO,KAAK,KAAK;AAEjB,MAAI,SAAS,IAAI;AACf,uBAAI,IAAI,qBAAqB;AAC7B,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,MAAI;AAEJ,MAAI;AACF,YAAQ,KAAK,MAAM,IAAI;AAAA,EACzB,QAAQ;AACN,uBAAI,IAAI,0BAA0B;AAClC,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,SAAO;AACT;AAEA,MAAM,oBAAoB,YAAiC;AACzD,QAAM,QAAQ,MAAM,kBAAkB;AAEtC,QAAM,SAAS,mCAAsB,UAAU,KAAK;AAEpD,MAAI,CAAC,OAAO,SAAS;AACnB,uBAAI,IAAI,0BAA0B;AAClC,uBAAI,IAAI,OAAO,KAAK;AACpB,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,EAAE,gBAAgB,kBAAkB,aAAa,IAAI,OAAO;AAElE,QAAM,eAAe;AAAA,IACnB,GAAI,MAAM,mBAAmB,OAAO,KAAK,YAAY;AAAA,IACrD,GAAG,OAAO,KAAK;AAAA,EACjB;AAEA,QAAM,gBAAgB,cAAc;AAEpC,QAAM,EAAE,YAAY,QAAQ,QAAQ,gBAAgB,KAAK,IACvD,MAAM,cAAc,cAAc,cAAc;AAElD,MAAI,CAAC,kBAAkB;AACrB,QAAI,QAAQ;AACV,yBAAI,IAAI,kBAAkB,mBAAI,KAAK,YAAY,GAAG,oBAAoB;AACtE,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,WAAO;AAAA,MACL,GAAG,OAAO;AAAA,MACV;AAAA,MACA;AAAA,MACA,cAAc;AAAA,QACZ,GAAG;AAAA,QACH,GAAG,qBAAqB,MAAM;AAAA,MAChC;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,WAAW,OAAO,IAAI,CAAC,EAAE,KAAK,MAAM,IAAI;AAE9C,QAAM,WAAW,IAAI,IAAI,OAAO,KAAK,YAAY,CAAC;AAElD,QAAM,UAAU,SAAS,OAAO,CAAC,SAAS,CAAC,SAAS,IAAI,IAAI,CAAC;AAE7D,MAAI,QAAQ,SAAS,GAAG;AACtB,uBAAI,IAAI,+CAA+C;AACvD,uBAAI,QAAQ;AACZ,YAAQ,QAAQ,CAAC,SAAS,mBAAI,IAAI,KAAK,IAAI,EAAE,CAAC;AAC9C,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,SAAO;AAAA,IACL,GAAG,OAAO;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAEO,MAAM,YAAY,MACvB,QAAQ,MAAM,QAAQ,oBAAoB,IAAI,kBAAkB;",
|
|
6
6
|
"names": ["fs", "path", "chalk", "customAnswers"]
|
|
7
7
|
}
|
package/lib/cli/lint/internal.js
CHANGED
|
@@ -68,7 +68,7 @@ const lintConcurrently = async (mode, logger, additionalFlags) => {
|
|
|
68
68
|
results.push(
|
|
69
69
|
...await Promise.all(
|
|
70
70
|
lintGroup.map(
|
|
71
|
-
({ name, lint }) => lint(
|
|
71
|
+
async ({ name, lint }) => lint(
|
|
72
72
|
mode,
|
|
73
73
|
(0, import_logging.childLogger)(logger, { suffixes: [import_chalk.default.dim(name)] }),
|
|
74
74
|
additionalFlags
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/lint/internal.ts"],
|
|
4
|
-
"sourcesContent": ["import { inspect } from 'util';\n\nimport chalk from 'chalk';\n\nimport { type Logger, childLogger, createLogger } from '../../utils/logging.js';\n\nimport { tryDetectBadCodeowners } from './internalLints/detectBadCodeowners.js';\nimport { noSkubaTemplateJs } from './internalLints/noSkubaTemplateJs.js';\nimport { tryRefreshConfigFiles } from './internalLints/refreshConfigFiles.js';\nimport { upgradeSkuba } from './internalLints/upgrade/index.js';\nimport type { Input } from './types.js';\n\nexport type InternalLintResult = {\n ok: boolean;\n fixable: boolean;\n annotations?: Array<{\n start_line?: number;\n end_line?: number;\n path: string;\n message: string;\n }>;\n};\n\nconst lints: Array<\n Array<{\n name: string;\n lint: (\n mode: 'format' | 'lint',\n logger: Logger,\n additionalFlags: string[],\n ) => Promise<InternalLintResult>;\n }>\n> = [\n // Run upgradeSkuba first, in particular before refreshConfigFiles, for npmrc handling\n [{ name: 'upgrade-skuba', lint: upgradeSkuba }],\n [\n { name: 'no-skuba-template-js', lint: noSkubaTemplateJs },\n { name: 'refresh-config-files', lint: tryRefreshConfigFiles },\n { name: 'detect-bad-codeowners', lint: tryDetectBadCodeowners },\n ],\n];\n\nconst lintSerially = async (\n mode: 'format' | 'lint',\n logger: Logger,\n additionalFlags: string[],\n) => {\n const results: InternalLintResult[] = [];\n for (const lintGroup of lints) {\n for (const { lint, name } of lintGroup) {\n results.push(\n await lint(\n mode,\n childLogger(logger, { suffixes: [chalk.dim(name)] }),\n additionalFlags,\n ),\n );\n }\n }\n return results;\n};\n\nconst lintConcurrently = async (\n mode: 'format' | 'lint',\n logger: Logger,\n additionalFlags: string[],\n) => {\n const results: InternalLintResult[] = [];\n\n for (const lintGroup of lints) {\n results.push(\n ...(await Promise.all(\n lintGroup.map(({ name, lint }) =>\n lint(\n mode,\n childLogger(logger, { suffixes: [chalk.dim(name)] }),\n additionalFlags,\n ),\n ),\n )),\n );\n }\n\n return results;\n};\n\nconst selectLintFunction = (input?: Input) => {\n const isSerial = input?.debug || input?.serial;\n return isSerial ? lintSerially : lintConcurrently;\n};\n\nexport const internalLint = async (\n mode: 'format' | 'lint',\n input?: Input,\n): Promise<InternalLintResult> => {\n const start = process.hrtime.bigint();\n const logger = createLogger({\n debug: input?.debug ?? false,\n prefixes: [...(mode === 'lint' ? [chalk.blueBright('skuba \u2502')] : [])],\n });\n\n try {\n const lint = selectLintFunction(input);\n const results = await lint(mode, logger, input?.additionalFlags ?? []);\n const result = combineResults(results);\n const end = process.hrtime.bigint();\n logger.plain(`Processed skuba lints in ${logger.timing(start, end)}.`);\n return result;\n } catch (err) {\n logger.err(logger.bold('Failed to run skuba lints.'));\n logger.subtle(inspect(err));\n\n process.exitCode = 1;\n\n return { ok: false, fixable: false, annotations: [] };\n }\n};\n\nconst combineResults = (results: InternalLintResult[]): InternalLintResult =>\n results.reduce(\n (cur, next) => ({\n ok: cur.ok && next.ok,\n fixable: cur.fixable || next.fixable,\n annotations: [...(cur.annotations ?? []), ...(next.annotations ?? [])],\n }),\n { ok: true, fixable: false },\n );\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,mBAAkB;AAElB,qBAAuD;AAEvD,iCAAuC;AACvC,+BAAkC;AAClC,gCAAsC;AACtC,qBAA6B;AAc7B,MAAM,QASF;AAAA;AAAA,EAEF,CAAC,EAAE,MAAM,iBAAiB,MAAM,4BAAa,CAAC;AAAA,EAC9C;AAAA,IACE,EAAE,MAAM,wBAAwB,MAAM,2CAAkB;AAAA,IACxD,EAAE,MAAM,wBAAwB,MAAM,gDAAsB;AAAA,IAC5D,EAAE,MAAM,yBAAyB,MAAM,kDAAuB;AAAA,EAChE;AACF;AAEA,MAAM,eAAe,OACnB,MACA,QACA,oBACG;AACH,QAAM,UAAgC,CAAC;AACvC,aAAW,aAAa,OAAO;AAC7B,eAAW,EAAE,MAAM,KAAK,KAAK,WAAW;AACtC,cAAQ;AAAA,QACN,MAAM;AAAA,UACJ;AAAA,cACA,4BAAY,QAAQ,EAAE,UAAU,CAAC,aAAAA,QAAM,IAAI,IAAI,CAAC,EAAE,CAAC;AAAA,UACnD;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACA,SAAO;AACT;AAEA,MAAM,mBAAmB,OACvB,MACA,QACA,oBACG;AACH,QAAM,UAAgC,CAAC;AAEvC,aAAW,aAAa,OAAO;AAC7B,YAAQ;AAAA,MACN,GAAI,MAAM,QAAQ;AAAA,QAChB,UAAU;AAAA,UAAI,
|
|
4
|
+
"sourcesContent": ["import { inspect } from 'util';\n\nimport chalk from 'chalk';\n\nimport { type Logger, childLogger, createLogger } from '../../utils/logging.js';\n\nimport { tryDetectBadCodeowners } from './internalLints/detectBadCodeowners.js';\nimport { noSkubaTemplateJs } from './internalLints/noSkubaTemplateJs.js';\nimport { tryRefreshConfigFiles } from './internalLints/refreshConfigFiles.js';\nimport { upgradeSkuba } from './internalLints/upgrade/index.js';\nimport type { Input } from './types.js';\n\nexport type InternalLintResult = {\n ok: boolean;\n fixable: boolean;\n annotations?: Array<{\n start_line?: number;\n end_line?: number;\n path: string;\n message: string;\n }>;\n};\n\nconst lints: Array<\n Array<{\n name: string;\n lint: (\n mode: 'format' | 'lint',\n logger: Logger,\n additionalFlags: string[],\n ) => Promise<InternalLintResult>;\n }>\n> = [\n // Run upgradeSkuba first, in particular before refreshConfigFiles, for npmrc handling\n [{ name: 'upgrade-skuba', lint: upgradeSkuba }],\n [\n { name: 'no-skuba-template-js', lint: noSkubaTemplateJs },\n { name: 'refresh-config-files', lint: tryRefreshConfigFiles },\n { name: 'detect-bad-codeowners', lint: tryDetectBadCodeowners },\n ],\n];\n\nconst lintSerially = async (\n mode: 'format' | 'lint',\n logger: Logger,\n additionalFlags: string[],\n) => {\n const results: InternalLintResult[] = [];\n for (const lintGroup of lints) {\n for (const { lint, name } of lintGroup) {\n results.push(\n await lint(\n mode,\n childLogger(logger, { suffixes: [chalk.dim(name)] }),\n additionalFlags,\n ),\n );\n }\n }\n return results;\n};\n\nconst lintConcurrently = async (\n mode: 'format' | 'lint',\n logger: Logger,\n additionalFlags: string[],\n) => {\n const results: InternalLintResult[] = [];\n\n for (const lintGroup of lints) {\n results.push(\n ...(await Promise.all(\n lintGroup.map(async ({ name, lint }) =>\n lint(\n mode,\n childLogger(logger, { suffixes: [chalk.dim(name)] }),\n additionalFlags,\n ),\n ),\n )),\n );\n }\n\n return results;\n};\n\nconst selectLintFunction = (input?: Input) => {\n const isSerial = input?.debug || input?.serial;\n return isSerial ? lintSerially : lintConcurrently;\n};\n\nexport const internalLint = async (\n mode: 'format' | 'lint',\n input?: Input,\n): Promise<InternalLintResult> => {\n const start = process.hrtime.bigint();\n const logger = createLogger({\n debug: input?.debug ?? false,\n prefixes: [...(mode === 'lint' ? [chalk.blueBright('skuba \u2502')] : [])],\n });\n\n try {\n const lint = selectLintFunction(input);\n const results = await lint(mode, logger, input?.additionalFlags ?? []);\n const result = combineResults(results);\n const end = process.hrtime.bigint();\n logger.plain(`Processed skuba lints in ${logger.timing(start, end)}.`);\n return result;\n } catch (err) {\n logger.err(logger.bold('Failed to run skuba lints.'));\n logger.subtle(inspect(err));\n\n process.exitCode = 1;\n\n return { ok: false, fixable: false, annotations: [] };\n }\n};\n\nconst combineResults = (results: InternalLintResult[]): InternalLintResult =>\n results.reduce(\n (cur, next) => ({\n ok: cur.ok && next.ok,\n fixable: cur.fixable || next.fixable,\n annotations: [...(cur.annotations ?? []), ...(next.annotations ?? [])],\n }),\n { ok: true, fixable: false },\n );\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAwB;AAExB,mBAAkB;AAElB,qBAAuD;AAEvD,iCAAuC;AACvC,+BAAkC;AAClC,gCAAsC;AACtC,qBAA6B;AAc7B,MAAM,QASF;AAAA;AAAA,EAEF,CAAC,EAAE,MAAM,iBAAiB,MAAM,4BAAa,CAAC;AAAA,EAC9C;AAAA,IACE,EAAE,MAAM,wBAAwB,MAAM,2CAAkB;AAAA,IACxD,EAAE,MAAM,wBAAwB,MAAM,gDAAsB;AAAA,IAC5D,EAAE,MAAM,yBAAyB,MAAM,kDAAuB;AAAA,EAChE;AACF;AAEA,MAAM,eAAe,OACnB,MACA,QACA,oBACG;AACH,QAAM,UAAgC,CAAC;AACvC,aAAW,aAAa,OAAO;AAC7B,eAAW,EAAE,MAAM,KAAK,KAAK,WAAW;AACtC,cAAQ;AAAA,QACN,MAAM;AAAA,UACJ;AAAA,cACA,4BAAY,QAAQ,EAAE,UAAU,CAAC,aAAAA,QAAM,IAAI,IAAI,CAAC,EAAE,CAAC;AAAA,UACnD;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACA,SAAO;AACT;AAEA,MAAM,mBAAmB,OACvB,MACA,QACA,oBACG;AACH,QAAM,UAAgC,CAAC;AAEvC,aAAW,aAAa,OAAO;AAC7B,YAAQ;AAAA,MACN,GAAI,MAAM,QAAQ;AAAA,QAChB,UAAU;AAAA,UAAI,OAAO,EAAE,MAAM,KAAK,MAChC;AAAA,YACE;AAAA,gBACA,4BAAY,QAAQ,EAAE,UAAU,CAAC,aAAAA,QAAM,IAAI,IAAI,CAAC,EAAE,CAAC;AAAA,YACnD;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAEA,MAAM,qBAAqB,CAAC,UAAkB;AAC5C,QAAM,WAAW,OAAO,SAAS,OAAO;AACxC,SAAO,WAAW,eAAe;AACnC;AAEO,MAAM,eAAe,OAC1B,MACA,UACgC;AAChC,QAAM,QAAQ,QAAQ,OAAO,OAAO;AACpC,QAAM,aAAS,6BAAa;AAAA,IAC1B,OAAO,OAAO,SAAS;AAAA,IACvB,UAAU,CAAC,GAAI,SAAS,SAAS,CAAC,aAAAA,QAAM,WAAW,iBAAY,CAAC,IAAI,CAAC,CAAE;AAAA,EACzE,CAAC;AAED,MAAI;AACF,UAAM,OAAO,mBAAmB,KAAK;AACrC,UAAM,UAAU,MAAM,KAAK,MAAM,QAAQ,OAAO,mBAAmB,CAAC,CAAC;AACrE,UAAM,SAAS,eAAe,OAAO;AACrC,UAAM,MAAM,QAAQ,OAAO,OAAO;AAClC,WAAO,MAAM,4BAA4B,OAAO,OAAO,OAAO,GAAG,CAAC,GAAG;AACrE,WAAO;AAAA,EACT,SAAS,KAAK;AACZ,WAAO,IAAI,OAAO,KAAK,4BAA4B,CAAC;AACpD,WAAO,WAAO,qBAAQ,GAAG,CAAC;AAE1B,YAAQ,WAAW;AAEnB,WAAO,EAAE,IAAI,OAAO,SAAS,OAAO,aAAa,CAAC,EAAE;AAAA,EACtD;AACF;AAEA,MAAM,iBAAiB,CAAC,YACtB,QAAQ;AAAA,EACN,CAAC,KAAK,UAAU;AAAA,IACd,IAAI,IAAI,MAAM,KAAK;AAAA,IACnB,SAAS,IAAI,WAAW,KAAK;AAAA,IAC7B,aAAa,CAAC,GAAI,IAAI,eAAe,CAAC,GAAI,GAAI,KAAK,eAAe,CAAC,CAAE;AAAA,EACvE;AAAA,EACA,EAAE,IAAI,MAAM,SAAS,MAAM;AAC7B;",
|
|
6
6
|
"names": ["chalk"]
|
|
7
7
|
}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import type
|
|
1
|
+
import type { ReadResult } from 'read-pkg-up';
|
|
2
2
|
import type { Logger } from '../../../../utils/logging.js';
|
|
3
3
|
import { type PackageManagerConfig } from '../../../../utils/packageManager.js';
|
|
4
4
|
import type { InternalLintResult } from '../../internal.js';
|
|
@@ -15,7 +15,7 @@ export type PatchReturnType = {
|
|
|
15
15
|
};
|
|
16
16
|
export type PatchConfig = {
|
|
17
17
|
mode: 'format' | 'lint';
|
|
18
|
-
manifest:
|
|
18
|
+
manifest: ReadResult;
|
|
19
19
|
packageManager: PackageManagerConfig;
|
|
20
20
|
dir?: string;
|
|
21
21
|
};
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../../src/cli/lint/internalLints/upgrade/index.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport fs from 'fs-extra';\nimport type
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport fs from 'fs-extra';\nimport type { ReadResult } from 'read-pkg-up';\nimport { gte, sort } from 'semver';\n\nimport type { Logger } from '../../../../utils/logging.js';\nimport { getConsumerManifest } from '../../../../utils/manifest.js';\nimport {\n type PackageManagerConfig,\n detectPackageManager,\n} from '../../../../utils/packageManager.js';\nimport { getSkubaVersion } from '../../../../utils/version.js';\nimport { formatPackage } from '../../../configure/processing/package.js';\nimport type { SkubaPackageJson } from '../../../init/writePackageJson.js';\nimport type { InternalLintResult } from '../../internal.js';\n\nexport type Patches = Patch[];\nexport type Patch = {\n apply: PatchFunction;\n description: string;\n};\nexport type PatchReturnType =\n | { result: 'apply' }\n | { result: 'skip'; reason?: string };\n\nexport type PatchConfig = {\n mode: 'format' | 'lint';\n manifest: ReadResult;\n packageManager: PackageManagerConfig;\n dir?: string;\n};\n\nexport type PatchFunction = (config: PatchConfig) => Promise<PatchReturnType>;\n\nconst getPatches = async (manifestVersion: string): Promise<Patches> => {\n const patches = await fs.readdir(path.join(__dirname, 'patches'), {\n withFileTypes: true,\n });\n\n // The patches are sorted by the version they were added from.\n // Only return patches that are newer or equal to the current version.\n const patchesForVersion = sort(\n patches.flatMap((patch) =>\n // Is a directory rather than a JavaScript source file\n patch.isDirectory() &&\n // Has been added since the last patch run on the project\n gte(patch.name, manifestVersion)\n ? patch.name\n : [],\n ),\n );\n\n return (await Promise.all(patchesForVersion.map(resolvePatches))).flat();\n};\n\nconst fileExtensions = ['js', 'ts'];\n\n// Hack to allow our Jest environment/transform to resolve the patches\n// In normal scenarios this will resolve immediately after the .js import\nconst resolvePatches = async (version: string): Promise<Patches> => {\n for (const extension of fileExtensions) {\n try {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-return, @typescript-eslint/no-unsafe-member-access\n return (await import(`./patches/${version}/index.${extension}`)).patches;\n } catch {\n // Ignore\n }\n }\n throw new Error(`Could not resolve patches for ${version}`);\n};\n\nexport const upgradeSkuba = async (\n mode: 'lint' | 'format',\n logger: Logger,\n additionalFlags: string[] = [],\n): Promise<InternalLintResult> => {\n const [currentVersion, manifest, packageManager] = await Promise.all([\n getSkubaVersion(),\n getConsumerManifest(),\n detectPackageManager(),\n ]);\n\n if (!manifest) {\n throw new Error('Could not find a package json for this project');\n }\n\n manifest.packageJson.skuba ??= { version: '1.0.0' };\n\n const manifestVersion = additionalFlags.includes('--force-apply-all-patches')\n ? '1.0.0'\n : (manifest.packageJson.skuba as SkubaPackageJson).version;\n\n // We are up to date, skip patches\n if (gte(manifestVersion, currentVersion)) {\n return { ok: true, fixable: false };\n }\n\n const patches = await getPatches(manifestVersion);\n // No patches to apply even if version out of date. Early exit to avoid unnecessary commits.\n if (patches.length === 0) {\n return { ok: true, fixable: false };\n }\n\n if (mode === 'lint') {\n const results = await Promise.all(\n patches.map(\n async ({ apply }) =>\n await apply({\n mode,\n manifest,\n packageManager,\n }),\n ),\n );\n\n // No patches are applicable. Early exit to avoid unnecessary commits.\n if (results.every(({ result }) => result === 'skip')) {\n return { ok: true, fixable: false };\n }\n\n logger.warn(\n `skuba has patches to apply. Run ${logger.bold(\n packageManager.print.exec,\n 'skuba',\n 'format',\n )} to run them.`,\n );\n\n return {\n ok: false,\n fixable: true,\n annotations: [\n {\n // package.json as likely skuba version has changed\n // TODO: locate the \"skuba\": {} config in the package.json and annotate on the version property\n path: manifest.path,\n message: `skuba has patches to apply. Run ${packageManager.print.exec} skuba format to run them.`,\n },\n ],\n };\n }\n\n logger.plain('Updating skuba...');\n\n // Run these in series in case a subsequent patch relies on a previous patch\n for (const { apply, description } of patches) {\n const result = await apply({\n mode,\n manifest,\n packageManager,\n });\n logger.newline();\n if (result.result === 'skip') {\n logger.plain(\n `Patch skipped: ${description}${\n result.reason ? ` - ${result.reason}` : ''\n }`,\n );\n } else {\n logger.plain(`Patch applied: ${description}`);\n }\n }\n\n (manifest.packageJson.skuba as SkubaPackageJson).version = currentVersion;\n\n const updatedPackageJson = await formatPackage(manifest.packageJson);\n\n await fs.writeFile(manifest.path, updatedPackageJson);\n logger.newline();\n logger.plain('skuba update complete.');\n logger.newline();\n\n return {\n ok: true,\n fixable: false,\n };\n};\n"],
|
|
5
5
|
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAe;AAEf,oBAA0B;AAG1B,sBAAoC;AACpC,4BAGO;AACP,qBAAgC;AAChC,qBAA8B;AAsB9B,MAAM,aAAa,OAAO,oBAA8C;AACtE,QAAM,UAAU,MAAM,gBAAAA,QAAG,QAAQ,YAAAC,QAAK,KAAK,WAAW,SAAS,GAAG;AAAA,IAChE,eAAe;AAAA,EACjB,CAAC;AAID,QAAM,wBAAoB;AAAA,IACxB,QAAQ;AAAA,MAAQ,CAAC;AAAA;AAAA,QAEf,MAAM,YAAY;AAAA,YAElB,mBAAI,MAAM,MAAM,eAAe,IAC3B,MAAM,OACN,CAAC;AAAA;AAAA,IACP;AAAA,EACF;AAEA,UAAQ,MAAM,QAAQ,IAAI,kBAAkB,IAAI,cAAc,CAAC,GAAG,KAAK;AACzE;AAEA,MAAM,iBAAiB,CAAC,MAAM,IAAI;AAIlC,MAAM,iBAAiB,OAAO,YAAsC;AAClE,aAAW,aAAa,gBAAgB;AACtC,QAAI;AAEF,cAAQ,MAAM,OAAO,aAAa,OAAO,UAAU,SAAS,KAAK;AAAA,IACnE,QAAQ;AAAA,IAER;AAAA,EACF;AACA,QAAM,IAAI,MAAM,iCAAiC,OAAO,EAAE;AAC5D;AAEO,MAAM,eAAe,OAC1B,MACA,QACA,kBAA4B,CAAC,MACG;AAChC,QAAM,CAAC,gBAAgB,UAAU,cAAc,IAAI,MAAM,QAAQ,IAAI;AAAA,QACnE,gCAAgB;AAAA,QAChB,qCAAoB;AAAA,QACpB,4CAAqB;AAAA,EACvB,CAAC;AAED,MAAI,CAAC,UAAU;AACb,UAAM,IAAI,MAAM,gDAAgD;AAAA,EAClE;AAEA,WAAS,YAAY,UAAU,EAAE,SAAS,QAAQ;AAElD,QAAM,kBAAkB,gBAAgB,SAAS,2BAA2B,IACxE,UACC,SAAS,YAAY,MAA2B;AAGrD,UAAI,mBAAI,iBAAiB,cAAc,GAAG;AACxC,WAAO,EAAE,IAAI,MAAM,SAAS,MAAM;AAAA,EACpC;AAEA,QAAM,UAAU,MAAM,WAAW,eAAe;AAEhD,MAAI,QAAQ,WAAW,GAAG;AACxB,WAAO,EAAE,IAAI,MAAM,SAAS,MAAM;AAAA,EACpC;AAEA,MAAI,SAAS,QAAQ;AACnB,UAAM,UAAU,MAAM,QAAQ;AAAA,MAC5B,QAAQ;AAAA,QACN,OAAO,EAAE,MAAM,MACb,MAAM,MAAM;AAAA,UACV;AAAA,UACA;AAAA,UACA;AAAA,QACF,CAAC;AAAA,MACL;AAAA,IACF;AAGA,QAAI,QAAQ,MAAM,CAAC,EAAE,OAAO,MAAM,WAAW,MAAM,GAAG;AACpD,aAAO,EAAE,IAAI,MAAM,SAAS,MAAM;AAAA,IACpC;AAEA,WAAO;AAAA,MACL,mCAAmC,OAAO;AAAA,QACxC,eAAe,MAAM;AAAA,QACrB;AAAA,QACA;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO;AAAA,MACL,IAAI;AAAA,MACJ,SAAS;AAAA,MACT,aAAa;AAAA,QACX;AAAA;AAAA;AAAA,UAGE,MAAM,SAAS;AAAA,UACf,SAAS,mCAAmC,eAAe,MAAM,IAAI;AAAA,QACvE;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,SAAO,MAAM,mBAAmB;AAGhC,aAAW,EAAE,OAAO,YAAY,KAAK,SAAS;AAC5C,UAAM,SAAS,MAAM,MAAM;AAAA,MACzB;AAAA,MACA;AAAA,MACA;AAAA,IACF,CAAC;AACD,WAAO,QAAQ;AACf,QAAI,OAAO,WAAW,QAAQ;AAC5B,aAAO;AAAA,QACL,kBAAkB,WAAW,GAC3B,OAAO,SAAS,MAAM,OAAO,MAAM,KAAK,EAC1C;AAAA,MACF;AAAA,IACF,OAAO;AACL,aAAO,MAAM,kBAAkB,WAAW,EAAE;AAAA,IAC9C;AAAA,EACF;AAEA,EAAC,SAAS,YAAY,MAA2B,UAAU;AAE3D,QAAM,qBAAqB,UAAM,8BAAc,SAAS,WAAW;AAEnE,QAAM,gBAAAD,QAAG,UAAU,SAAS,MAAM,kBAAkB;AACpD,SAAO,QAAQ;AACf,SAAO,MAAM,wBAAwB;AACrC,SAAO,QAAQ;AAEf,SAAO;AAAA,IACL,IAAI;AAAA,IACJ,SAAS;AAAA,EACX;AACF;",
|
|
6
6
|
"names": ["fs", "path"]
|
|
7
7
|
}
|
|
@@ -61,7 +61,7 @@ const tryReadFilesSequentially = async (filepaths) => {
|
|
|
61
61
|
}
|
|
62
62
|
return;
|
|
63
63
|
};
|
|
64
|
-
const IMPORT_REGEX = /import\s+(?:\{\s*(\w*[Ll]ogger)(?:\s+as\s+(\w*[Ll]ogger))?\s*\}|(\w*[Ll]ogger))\s+from\s+['"][^'"]+\/(?:logger|logging)(?:\.
|
|
64
|
+
const IMPORT_REGEX = /import\s+(?:\{\s*(\w*[Ll]ogger)(?:\s+as\s+(\w*[Ll]ogger))?\s*\}|(\w*[Ll]ogger))\s+from\s+['"][^'"]+\/(?:logger|logging)(?:\/index)?(?:\.[jt]s)?['"]/u;
|
|
65
65
|
const NAMED_EXPORT_REGEX = /export\s+(?:const\s+|\{[^{}]*)\b(\w*[Ll]ogger)\b/u;
|
|
66
66
|
const findLogger = async ({
|
|
67
67
|
contents,
|