skuba 8.0.1-dive-migration-20240326010259 → 8.1.0-configure-stdin-20240509055640
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -2
- package/jest/transform.js +2 -2
- package/lib/api/git/index.d.ts +1 -0
- package/lib/api/git/index.js +3 -0
- package/lib/api/git/index.js.map +2 -2
- package/lib/api/git/isFileGitIgnored.d.ts +4 -0
- package/lib/api/git/isFileGitIgnored.js +49 -0
- package/lib/api/git/isFileGitIgnored.js.map +7 -0
- package/lib/api/net/waitFor.d.ts +3 -3
- package/lib/cli/configure/ensureTemplateCompletion.js +18 -2
- package/lib/cli/configure/ensureTemplateCompletion.js.map +2 -2
- package/lib/cli/init/getConfig.d.ts +1 -0
- package/lib/cli/init/getConfig.js +7 -1
- package/lib/cli/init/getConfig.js.map +2 -2
- package/lib/cli/lint/internalLints/refreshConfigFiles.js +14 -3
- package/lib/cli/lint/internalLints/refreshConfigFiles.js.map +2 -2
- package/lib/utils/packageManager.js +13 -8
- package/lib/utils/packageManager.js.map +3 -3
- package/package.json +8 -9
- package/template/base/.vscode/extensions.json +3 -0
- package/template/base/_.gitignore +1 -0
- package/template/base/_.prettierignore +1 -0
- package/template/express-rest-api/.buildkite/pipeline.yml +3 -2
- package/template/express-rest-api/Dockerfile.dev-deps +3 -1
- package/template/express-rest-api/package.json +3 -2
- package/template/greeter/.buildkite/pipeline.yml +4 -1
- package/template/greeter/Dockerfile +3 -1
- package/template/greeter/package.json +1 -1
- package/template/koa-rest-api/.buildkite/pipeline.yml +3 -2
- package/template/koa-rest-api/Dockerfile.dev-deps +3 -1
- package/template/koa-rest-api/package.json +7 -6
- package/template/lambda-sqs-worker/.buildkite/pipeline.yml +3 -2
- package/template/lambda-sqs-worker/Dockerfile +3 -1
- package/template/lambda-sqs-worker/package.json +3 -3
- package/template/lambda-sqs-worker-cdk/.buildkite/pipeline.yml +3 -2
- package/template/lambda-sqs-worker-cdk/Dockerfile +3 -1
- package/template/lambda-sqs-worker-cdk/cdk.json +0 -23
- package/template/lambda-sqs-worker-cdk/infra/__snapshots__/appStack.test.ts.snap +6 -6
- package/template/lambda-sqs-worker-cdk/infra/appStack.test.ts +18 -20
- package/template/lambda-sqs-worker-cdk/infra/appStack.ts +15 -14
- package/template/lambda-sqs-worker-cdk/infra/config.ts +41 -0
- package/template/lambda-sqs-worker-cdk/infra/index.ts +2 -5
- package/template/lambda-sqs-worker-cdk/package.json +2 -2
- package/template/oss-npm-package/.github/workflows/release.yml +1 -1
- package/template/oss-npm-package/.github/workflows/validate.yml +1 -1
- package/template/lambda-sqs-worker-cdk/shared/context-types.ts +0 -22
package/README.md
CHANGED
|
@@ -2,8 +2,8 @@
|
|
|
2
2
|
|
|
3
3
|
---
|
|
4
4
|
|
|
5
|
-
[](https://github.com/seek-oss/skuba/actions?query=workflow%3ARelease)
|
|
6
|
+
[](https://github.com/seek-oss/skuba/actions?query=workflow%3AValidate)
|
|
7
7
|
[](https://nodejs.org/en/)
|
|
8
8
|
[](https://www.npmjs.com/package/skuba)
|
|
9
9
|
|
package/jest/transform.js
CHANGED
|
@@ -18,15 +18,15 @@ const maybeTsConfig = tryParseTsConfig();
|
|
|
18
18
|
const isolatedModules = maybeTsConfig?.options.isolatedModules ?? true;
|
|
19
19
|
|
|
20
20
|
const BROKEN_MODULE_RESOLUTIONS = new Set([
|
|
21
|
+
ModuleResolutionKind.Bundler,
|
|
21
22
|
ModuleResolutionKind.Node16,
|
|
22
23
|
ModuleResolutionKind.NodeNext,
|
|
23
24
|
]);
|
|
24
25
|
|
|
25
26
|
/**
|
|
26
|
-
* Passing through
|
|
27
|
+
* Passing through these module resolutions seems to break `ts-jest`.
|
|
27
28
|
*
|
|
28
29
|
* ```
|
|
29
|
-
* error TS5110: Option 'module' must be set to 'Node16' when option 'moduleResolution' is set to 'Node16'.
|
|
30
30
|
* error TS5110: Option 'module' must be set to 'NodeNext' when option 'moduleResolution' is set to 'NodeNext'.
|
|
31
31
|
* ```
|
|
32
32
|
*
|
package/lib/api/git/index.d.ts
CHANGED
package/lib/api/git/index.js
CHANGED
|
@@ -27,6 +27,7 @@ __export(git_exports, {
|
|
|
27
27
|
getHeadCommitId: () => import_log.getHeadCommitId,
|
|
28
28
|
getHeadCommitMessage: () => import_log.getHeadCommitMessage,
|
|
29
29
|
getOwnerAndRepo: () => import_remote.getOwnerAndRepo,
|
|
30
|
+
isFileGitIgnored: () => import_isFileGitIgnored.isFileGitIgnored,
|
|
30
31
|
push: () => import_push.push,
|
|
31
32
|
reset: () => import_reset.reset
|
|
32
33
|
});
|
|
@@ -41,6 +42,7 @@ var import_remote = require("./remote");
|
|
|
41
42
|
var import_push = require("./push");
|
|
42
43
|
var import_pull = require("./pull");
|
|
43
44
|
var import_reset = require("./reset");
|
|
45
|
+
var import_isFileGitIgnored = require("./isFileGitIgnored");
|
|
44
46
|
// Annotate the CommonJS export names for ESM import in node:
|
|
45
47
|
0 && (module.exports = {
|
|
46
48
|
commit,
|
|
@@ -52,6 +54,7 @@ var import_reset = require("./reset");
|
|
|
52
54
|
getHeadCommitId,
|
|
53
55
|
getHeadCommitMessage,
|
|
54
56
|
getOwnerAndRepo,
|
|
57
|
+
isFileGitIgnored,
|
|
55
58
|
push,
|
|
56
59
|
reset
|
|
57
60
|
});
|
package/lib/api/git/index.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/api/git/index.ts"],
|
|
4
|
-
"sourcesContent": ["export { commit } from './commit';\nexport { commitAllChanges } from './commitAllChanges';\nexport { currentBranch } from './currentBranch';\nexport { findRoot } from './findRoot';\nexport type { ChangedFile } from './getChangedFiles';\nexport { getChangedFiles } from './getChangedFiles';\nexport { getHeadCommitId, getHeadCommitMessage } from './log';\nexport { getOwnerAndRepo } from './remote';\nexport { push } from './push';\nexport { fastForwardBranch } from './pull';\nexport { reset } from './reset';\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAuB;AACvB,8BAAiC;AACjC,2BAA8B;AAC9B,sBAAyB;AAEzB,6BAAgC;AAChC,iBAAsD;AACtD,oBAAgC;AAChC,kBAAqB;AACrB,kBAAkC;AAClC,mBAAsB;",
|
|
4
|
+
"sourcesContent": ["export { commit } from './commit';\nexport { commitAllChanges } from './commitAllChanges';\nexport { currentBranch } from './currentBranch';\nexport { findRoot } from './findRoot';\nexport type { ChangedFile } from './getChangedFiles';\nexport { getChangedFiles } from './getChangedFiles';\nexport { getHeadCommitId, getHeadCommitMessage } from './log';\nexport { getOwnerAndRepo } from './remote';\nexport { push } from './push';\nexport { fastForwardBranch } from './pull';\nexport { reset } from './reset';\nexport { isFileGitIgnored } from './isFileGitIgnored';\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAAuB;AACvB,8BAAiC;AACjC,2BAA8B;AAC9B,sBAAyB;AAEzB,6BAAgC;AAChC,iBAAsD;AACtD,oBAAgC;AAChC,kBAAqB;AACrB,kBAAkC;AAClC,mBAAsB;AACtB,8BAAiC;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
var isFileGitIgnored_exports = {};
|
|
30
|
+
__export(isFileGitIgnored_exports, {
|
|
31
|
+
isFileGitIgnored: () => isFileGitIgnored
|
|
32
|
+
});
|
|
33
|
+
module.exports = __toCommonJS(isFileGitIgnored_exports);
|
|
34
|
+
var import_path = __toESM(require("path"));
|
|
35
|
+
var import_fs_extra = __toESM(require("fs-extra"));
|
|
36
|
+
var import_isomorphic_git = __toESM(require("isomorphic-git"));
|
|
37
|
+
const isFileGitIgnored = ({
|
|
38
|
+
absolutePath,
|
|
39
|
+
gitRoot
|
|
40
|
+
}) => import_isomorphic_git.default.isIgnored({
|
|
41
|
+
dir: gitRoot,
|
|
42
|
+
filepath: import_path.default.relative(gitRoot, absolutePath),
|
|
43
|
+
fs: import_fs_extra.default
|
|
44
|
+
});
|
|
45
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
46
|
+
0 && (module.exports = {
|
|
47
|
+
isFileGitIgnored
|
|
48
|
+
});
|
|
49
|
+
//# sourceMappingURL=isFileGitIgnored.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../../src/api/git/isFileGitIgnored.ts"],
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport fs from 'fs-extra';\nimport git from 'isomorphic-git';\n\nexport const isFileGitIgnored = ({\n absolutePath,\n gitRoot,\n}: {\n absolutePath: string;\n gitRoot: string;\n}): Promise<boolean> =>\n git.isIgnored({\n dir: gitRoot,\n filepath: path.relative(gitRoot, absolutePath),\n fs,\n });\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAe;AACf,4BAAgB;AAET,MAAM,mBAAmB,CAAC;AAAA,EAC/B;AAAA,EACA;AACF,MAIE,sBAAAA,QAAI,UAAU;AAAA,EACZ,KAAK;AAAA,EACL,UAAU,YAAAC,QAAK,SAAS,SAAS,YAAY;AAAA,EAC7C,oBAAAC;AACF,CAAC;",
|
|
6
|
+
"names": ["git", "path", "fs"]
|
|
7
|
+
}
|
package/lib/api/net/waitFor.d.ts
CHANGED
|
@@ -6,7 +6,7 @@ import { type SocketAddress } from './socket';
|
|
|
6
6
|
* `timeout` is reached.
|
|
7
7
|
*/
|
|
8
8
|
export declare const waitFor: ({ host, port, resolveCompose, timeout, }: {
|
|
9
|
-
host?: string
|
|
9
|
+
host?: string;
|
|
10
10
|
port: number;
|
|
11
11
|
/**
|
|
12
12
|
* Whether to treat the `host` and `port` arguments as a private Docker
|
|
@@ -17,6 +17,6 @@ export declare const waitFor: ({ host, port, resolveCompose, timeout, }: {
|
|
|
17
17
|
* - Enabled locally, when the application is running directly on the machine
|
|
18
18
|
* - Disabled in CI, when running in a container on the Docker Compose network
|
|
19
19
|
*/
|
|
20
|
-
resolveCompose?: boolean
|
|
21
|
-
timeout?: number
|
|
20
|
+
resolveCompose?: boolean;
|
|
21
|
+
timeout?: number;
|
|
22
22
|
}) => Promise<SocketAddress>;
|
|
@@ -34,12 +34,28 @@ module.exports = __toCommonJS(ensureTemplateCompletion_exports);
|
|
|
34
34
|
var import_path = __toESM(require("path"));
|
|
35
35
|
var import_chalk = __toESM(require("chalk"));
|
|
36
36
|
var import_fs_extra = __toESM(require("fs-extra"));
|
|
37
|
+
var import_zod = require("zod");
|
|
37
38
|
var import_copy = require("../../utils/copy");
|
|
38
39
|
var import_logging = require("../../utils/logging");
|
|
39
40
|
var import_template = require("../../utils/template");
|
|
40
41
|
var import_validation = require("../../utils/validation");
|
|
41
42
|
var import_getConfig = require("../init/getConfig");
|
|
42
43
|
var import_package = require("./processing/package");
|
|
44
|
+
const recordSchema = import_zod.z.object({ templateData: import_zod.z.record(import_zod.z.string()) });
|
|
45
|
+
const getTemplateDataFromStdIn = async (templateConfig) => {
|
|
46
|
+
const config = await (0, import_getConfig.readJSONFromStdIn)();
|
|
47
|
+
const data = recordSchema.parse(config);
|
|
48
|
+
templateConfig.fields.forEach((field) => {
|
|
49
|
+
const value = data.templateData[field.name];
|
|
50
|
+
if (value === void 0) {
|
|
51
|
+
throw new Error(`Missing field: ${field.name}`);
|
|
52
|
+
}
|
|
53
|
+
if (field.validate && !field.validate(value)) {
|
|
54
|
+
throw new Error(`Invalid value for field: ${field.name}`);
|
|
55
|
+
}
|
|
56
|
+
});
|
|
57
|
+
return data.templateData;
|
|
58
|
+
};
|
|
43
59
|
const ensureTemplateCompletion = async ({
|
|
44
60
|
destinationRoot,
|
|
45
61
|
include,
|
|
@@ -51,11 +67,11 @@ const ensureTemplateCompletion = async ({
|
|
|
51
67
|
}
|
|
52
68
|
const templateName = (0, import_validation.hasStringProp)(manifest.packageJson.skuba, "template") ? manifest.packageJson.skuba.template : "template";
|
|
53
69
|
import_logging.log.newline();
|
|
54
|
-
const templateData = await (0, import_getConfig.runForm)({
|
|
70
|
+
const templateData = process.stdin.isTTY ? await (0, import_getConfig.runForm)({
|
|
55
71
|
choices: templateConfig.fields,
|
|
56
72
|
message: import_chalk.default.bold(`Complete ${import_chalk.default.cyan(templateName)}:`),
|
|
57
73
|
name: "customAnswers"
|
|
58
|
-
});
|
|
74
|
+
}) : await getTemplateDataFromStdIn(templateConfig);
|
|
59
75
|
const updatedPackageJson = await (0, import_package.formatPackage)(manifest.packageJson);
|
|
60
76
|
const packageJsonFilepath = import_path.default.join(destinationRoot, "package.json");
|
|
61
77
|
await import_fs_extra.default.promises.writeFile(packageJsonFilepath, updatedPackageJson);
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/configure/ensureTemplateCompletion.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport fs from 'fs-extra';\nimport type { NormalizedReadResult } from 'read-pkg-up';\n\nimport { copyFiles, createEjsRenderer } from '../../utils/copy';\nimport { log } from '../../utils/logging';\nimport {\n type TemplateConfig,\n ensureTemplateConfigDeletion,\n} from '../../utils/template';\nimport { hasStringProp } from '../../utils/validation';\nimport {
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,sBAAe;
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport fs from 'fs-extra';\nimport type { NormalizedReadResult } from 'read-pkg-up';\nimport { z } from 'zod';\n\nimport { copyFiles, createEjsRenderer } from '../../utils/copy';\nimport { log } from '../../utils/logging';\nimport {\n type TemplateConfig,\n ensureTemplateConfigDeletion,\n} from '../../utils/template';\nimport { hasStringProp } from '../../utils/validation';\nimport {\n getTemplateConfig,\n readJSONFromStdIn,\n runForm,\n} from '../init/getConfig';\n\nimport { formatPackage } from './processing/package';\n\ninterface Props {\n destinationRoot: string;\n include: (pathname: string) => boolean;\n manifest: NormalizedReadResult;\n}\n\nconst recordSchema = z.object({ templateData: z.record(z.string()) });\n\nconst getTemplateDataFromStdIn = async (\n templateConfig: TemplateConfig,\n): Promise<Record<string, string>> => {\n const config = await readJSONFromStdIn();\n const data = recordSchema.parse(config);\n\n templateConfig.fields.forEach((field) => {\n const value = data.templateData[field.name];\n if (value === undefined) {\n throw new Error(`Missing field: ${field.name}`);\n }\n\n if (field.validate && !field.validate(value)) {\n throw new Error(`Invalid value for field: ${field.name}`);\n }\n });\n\n return data.templateData;\n};\n\nexport const ensureTemplateCompletion = async ({\n destinationRoot,\n include,\n manifest,\n}: Props): Promise<TemplateConfig> => {\n const templateConfig = getTemplateConfig(destinationRoot);\n\n if (templateConfig.fields.length === 0) {\n return templateConfig;\n }\n\n const templateName = hasStringProp(manifest.packageJson.skuba, 'template')\n ? manifest.packageJson.skuba.template\n : 'template';\n\n log.newline();\n const templateData = process.stdin.isTTY\n ? await runForm({\n choices: templateConfig.fields,\n message: chalk.bold(`Complete ${chalk.cyan(templateName)}:`),\n name: 'customAnswers',\n })\n : await getTemplateDataFromStdIn(templateConfig);\n\n const updatedPackageJson = await formatPackage(manifest.packageJson);\n const packageJsonFilepath = path.join(destinationRoot, 'package.json');\n await fs.promises.writeFile(packageJsonFilepath, updatedPackageJson);\n\n await copyFiles({\n sourceRoot: destinationRoot,\n destinationRoot,\n include,\n processors: [createEjsRenderer(templateData)],\n });\n\n await ensureTemplateConfigDeletion(destinationRoot);\n\n log.newline();\n log.ok('Templating complete!');\n\n return templateConfig;\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,sBAAe;AAEf,iBAAkB;AAElB,kBAA6C;AAC7C,qBAAoB;AACpB,sBAGO;AACP,wBAA8B;AAC9B,uBAIO;AAEP,qBAA8B;AAQ9B,MAAM,eAAe,aAAE,OAAO,EAAE,cAAc,aAAE,OAAO,aAAE,OAAO,CAAC,EAAE,CAAC;AAEpE,MAAM,2BAA2B,OAC/B,mBACoC;AACpC,QAAM,SAAS,UAAM,oCAAkB;AACvC,QAAM,OAAO,aAAa,MAAM,MAAM;AAEtC,iBAAe,OAAO,QAAQ,CAAC,UAAU;AACvC,UAAM,QAAQ,KAAK,aAAa,MAAM,IAAI;AAC1C,QAAI,UAAU,QAAW;AACvB,YAAM,IAAI,MAAM,kBAAkB,MAAM,IAAI,EAAE;AAAA,IAChD;AAEA,QAAI,MAAM,YAAY,CAAC,MAAM,SAAS,KAAK,GAAG;AAC5C,YAAM,IAAI,MAAM,4BAA4B,MAAM,IAAI,EAAE;AAAA,IAC1D;AAAA,EACF,CAAC;AAED,SAAO,KAAK;AACd;AAEO,MAAM,2BAA2B,OAAO;AAAA,EAC7C;AAAA,EACA;AAAA,EACA;AACF,MAAsC;AACpC,QAAM,qBAAiB,oCAAkB,eAAe;AAExD,MAAI,eAAe,OAAO,WAAW,GAAG;AACtC,WAAO;AAAA,EACT;AAEA,QAAM,mBAAe,iCAAc,SAAS,YAAY,OAAO,UAAU,IACrE,SAAS,YAAY,MAAM,WAC3B;AAEJ,qBAAI,QAAQ;AACZ,QAAM,eAAe,QAAQ,MAAM,QAC/B,UAAM,0BAAQ;AAAA,IACZ,SAAS,eAAe;AAAA,IACxB,SAAS,aAAAA,QAAM,KAAK,YAAY,aAAAA,QAAM,KAAK,YAAY,CAAC,GAAG;AAAA,IAC3D,MAAM;AAAA,EACR,CAAC,IACD,MAAM,yBAAyB,cAAc;AAEjD,QAAM,qBAAqB,UAAM,8BAAc,SAAS,WAAW;AACnE,QAAM,sBAAsB,YAAAC,QAAK,KAAK,iBAAiB,cAAc;AACrE,QAAM,gBAAAC,QAAG,SAAS,UAAU,qBAAqB,kBAAkB;AAEnE,YAAM,uBAAU;AAAA,IACd,YAAY;AAAA,IACZ;AAAA,IACA;AAAA,IACA,YAAY,KAAC,+BAAkB,YAAY,CAAC;AAAA,EAC9C,CAAC;AAED,YAAM,8CAA6B,eAAe;AAElD,qBAAI,QAAQ;AACZ,qBAAI,GAAG,sBAAsB;AAE7B,SAAO;AACT;",
|
|
6
6
|
"names": ["chalk", "path", "fs"]
|
|
7
7
|
}
|
|
@@ -8,6 +8,7 @@ export declare const runForm: <T = Record<string, string>>(props: {
|
|
|
8
8
|
}) => Promise<T>;
|
|
9
9
|
export declare const getTemplateConfig: (dir: string) => TemplateConfig;
|
|
10
10
|
export declare const configureFromPrompt: () => Promise<InitConfig>;
|
|
11
|
+
export declare const readJSONFromStdIn: () => Promise<unknown>;
|
|
11
12
|
export declare const getConfig: () => Promise<{
|
|
12
13
|
packageManager: "yarn" | "pnpm";
|
|
13
14
|
templateName: string;
|
|
@@ -31,6 +31,7 @@ __export(getConfig_exports, {
|
|
|
31
31
|
configureFromPrompt: () => configureFromPrompt,
|
|
32
32
|
getConfig: () => getConfig,
|
|
33
33
|
getTemplateConfig: () => getTemplateConfig,
|
|
34
|
+
readJSONFromStdIn: () => readJSONFromStdIn,
|
|
34
35
|
runForm: () => runForm
|
|
35
36
|
});
|
|
36
37
|
module.exports = __toCommonJS(getConfig_exports);
|
|
@@ -239,7 +240,7 @@ const configureFromPrompt = async () => {
|
|
|
239
240
|
type
|
|
240
241
|
};
|
|
241
242
|
};
|
|
242
|
-
const
|
|
243
|
+
const readJSONFromStdIn = async () => {
|
|
243
244
|
let text = "";
|
|
244
245
|
await new Promise(
|
|
245
246
|
(resolve) => process.stdin.on("data", (chunk) => text += chunk.toString()).once("end", resolve)
|
|
@@ -256,6 +257,10 @@ const configureFromPipe = async () => {
|
|
|
256
257
|
import_logging.log.err("Invalid JSON from stdin.");
|
|
257
258
|
process.exit(1);
|
|
258
259
|
}
|
|
260
|
+
return value;
|
|
261
|
+
};
|
|
262
|
+
const configureFromPipe = async () => {
|
|
263
|
+
const value = await readJSONFromStdIn();
|
|
259
264
|
const result = import_types.initConfigInputSchema.safeParse(value);
|
|
260
265
|
if (!result.success) {
|
|
261
266
|
import_logging.log.err("Invalid data from stdin:");
|
|
@@ -308,6 +313,7 @@ const getConfig = () => process.stdin.isTTY ? configureFromPrompt() : configureF
|
|
|
308
313
|
configureFromPrompt,
|
|
309
314
|
getConfig,
|
|
310
315
|
getTemplateConfig,
|
|
316
|
+
readJSONFromStdIn,
|
|
311
317
|
runForm
|
|
312
318
|
});
|
|
313
319
|
//# sourceMappingURL=getConfig.js.map
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/init/getConfig.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport { Form, type FormChoice } from 'enquirer';\nimport fs from 'fs-extra';\n\nimport { copyFiles } from '../../utils/copy';\nimport { isErrorWithCode } from '../../utils/error';\nimport { log } from '../../utils/logging';\nimport {\n DEFAULT_PACKAGE_MANAGER,\n configForPackageManager,\n} from '../../utils/packageManager';\nimport { getRandomPort } from '../../utils/port';\nimport {\n TEMPLATE_CONFIG_FILENAME,\n TEMPLATE_DIR,\n type TemplateConfig,\n templateConfigSchema,\n} from '../../utils/template';\n\nimport { downloadGitHubTemplate } from './git';\nimport {\n BASE_PROMPT_PROPS,\n type BaseFields,\n type Choice,\n GIT_PATH_PROMPT,\n SHOULD_CONTINUE_PROMPT,\n TEMPLATE_PROMPT,\n} from './prompts';\nimport { type InitConfig, initConfigInputSchema } from './types';\n\nexport const runForm = <T = Record<string, string>>(props: {\n choices: Readonly<Choice[]>;\n message: string;\n name: string;\n}) => {\n const { message, name } = props;\n\n const choices = props.choices.map((choice) => ({\n ...choice,\n validate: (value: string | undefined) => {\n if (\n !value ||\n value === '' ||\n (value === choice.initial && !choice.allowInitial)\n ) {\n return 'Form is not complete';\n }\n\n return choice.validate?.(value) ?? true;\n },\n }));\n\n const form = new Form<T>({\n choices,\n message,\n name,\n validate: async (values) => {\n const results = await Promise.all(\n choices.map((choice) => choice.validate(values[choice.name])),\n );\n\n return (\n results.find((result) => typeof result === 'string') ??\n results.every((result) => result === true)\n );\n },\n });\n\n return form.run();\n};\n\nconst confirmShouldContinue = async (choices: Readonly<FormChoice[]>) => {\n const fieldsList = choices.map((choice) => choice.message);\n\n log.newline();\n log.plain('This template uses the following information:');\n log.newline();\n fieldsList.forEach((message) => log.subtle(`- ${message}`));\n\n log.newline();\n const result = await SHOULD_CONTINUE_PROMPT.run();\n\n return result === 'yes';\n};\n\nconst createDirectory = async (dir: string) => {\n try {\n await fs.promises.mkdir(dir);\n } catch (err) {\n if (isErrorWithCode(err, 'EEXIST')) {\n log.err(`The directory '${dir}' already exists.`);\n process.exit(1);\n }\n\n throw err;\n }\n};\n\nconst cloneTemplate = async (\n templateName: string,\n destinationDir: string,\n): Promise<TemplateConfig> => {\n const isCustomTemplate = templateName.startsWith('github:');\n\n if (isCustomTemplate) {\n const gitHubPath = templateName.slice('github:'.length);\n\n await downloadGitHubTemplate(gitHubPath, destinationDir);\n } else {\n const templateDir = path.join(TEMPLATE_DIR, templateName);\n\n await copyFiles({\n // assume built-in templates have no extraneous files\n include: () => true,\n sourceRoot: templateDir,\n destinationRoot: destinationDir,\n processors: [],\n // built-in templates have files like _package.json\n stripUnderscorePrefix: true,\n });\n }\n\n const templateConfig = getTemplateConfig(\n path.join(process.cwd(), destinationDir),\n );\n\n if (isCustomTemplate) {\n log.newline();\n log.warn(\n 'You may need to run',\n log.bold(\n configForPackageManager(templateConfig.packageManager).exec,\n 'skuba',\n 'configure',\n ),\n 'once this is done.',\n );\n }\n\n return templateConfig;\n};\n\nconst getTemplateName = async () => {\n const templateSelection = await TEMPLATE_PROMPT.run();\n\n if (templateSelection === 'github \u2192') {\n const gitHubPath = await GIT_PATH_PROMPT.run();\n return `github:${gitHubPath}`;\n }\n\n return templateSelection;\n};\n\nconst generatePlaceholders = (choices: FormChoice[]) =>\n Object.fromEntries(\n choices.map(({ name }) => [name, `<%- ${name} %>`] as const),\n );\n\nexport const getTemplateConfig = (dir: string): TemplateConfig => {\n const templateConfigPath = path.join(dir, TEMPLATE_CONFIG_FILENAME);\n\n try {\n /* eslint-disable-next-line @typescript-eslint/no-var-requires */\n const templateConfig = require(templateConfigPath) as unknown;\n\n return templateConfigSchema.parse(templateConfig);\n } catch (err) {\n if (isErrorWithCode(err, 'MODULE_NOT_FOUND')) {\n return {\n entryPoint: undefined,\n fields: [],\n packageManager: DEFAULT_PACKAGE_MANAGER,\n type: undefined,\n };\n }\n\n throw err;\n }\n};\n\nconst baseToTemplateData = async ({\n ownerName,\n platformName,\n repoName,\n defaultBranch,\n}: BaseFields) => {\n const [orgName, teamName] = ownerName.split('/');\n\n const port = String(await getRandomPort());\n\n if (!orgName) {\n throw new Error(`Invalid format for owner name: ${ownerName}`);\n }\n\n return {\n orgName,\n ownerName,\n repoName,\n defaultBranch,\n // Use standalone username in `teamName` contexts\n teamName: teamName ?? orgName,\n\n port,\n\n platformName,\n lambdaCdkArchitecture: platformName === 'amd64' ? 'X86_64' : 'ARM_64',\n lambdaServerlessArchitecture:\n platformName === 'amd64' ? 'x86_64' : platformName,\n };\n};\n\nexport const configureFromPrompt = async (): Promise<InitConfig> => {\n const { ownerName, platformName, repoName, defaultBranch } =\n await runForm<BaseFields>(BASE_PROMPT_PROPS);\n log.plain(chalk.cyan(repoName), 'by', chalk.cyan(ownerName));\n\n const templateData = await baseToTemplateData({\n ownerName,\n platformName,\n repoName,\n defaultBranch,\n });\n\n const destinationDir = repoName;\n\n await createDirectory(destinationDir);\n\n log.newline();\n const templateName = await getTemplateName();\n\n const { entryPoint, fields, noSkip, packageManager, type } =\n await cloneTemplate(templateName, destinationDir);\n\n if (fields.length === 0) {\n return {\n destinationDir,\n entryPoint,\n packageManager,\n templateComplete: true,\n templateData,\n templateName,\n type,\n };\n }\n\n const shouldContinue = noSkip ? true : await confirmShouldContinue(fields);\n\n if (shouldContinue) {\n log.newline();\n const customAnswers = await runForm({\n choices: fields,\n message: chalk.bold(`Complete ${chalk.cyan(templateName)}:`),\n name: 'customAnswers',\n });\n\n return {\n destinationDir,\n entryPoint,\n packageManager,\n templateComplete: true,\n templateData: { ...templateData, ...customAnswers },\n templateName,\n type,\n };\n }\n\n log.newline();\n log.warn(\n `Resume this later with ${chalk.bold(\n configForPackageManager(packageManager).exec,\n 'skuba configure',\n )}.`,\n );\n\n const customAnswers = generatePlaceholders(fields);\n\n return {\n destinationDir,\n entryPoint,\n packageManager,\n templateComplete: false,\n templateData: { ...templateData, ...customAnswers },\n templateName,\n type,\n };\n};\n\
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,sBAAsC;AACtC,sBAAe;AAEf,kBAA0B;AAC1B,mBAAgC;AAChC,qBAAoB;AACpB,4BAGO;AACP,kBAA8B;AAC9B,sBAKO;AAEP,iBAAuC;AACvC,qBAOO;AACP,mBAAuD;AAEhD,MAAM,UAAU,CAA6B,UAI9C;AACJ,QAAM,EAAE,SAAS,KAAK,IAAI;AAE1B,QAAM,UAAU,MAAM,QAAQ,IAAI,CAAC,YAAY;AAAA,IAC7C,GAAG;AAAA,IACH,UAAU,CAAC,UAA8B;AACvC,UACE,CAAC,SACD,UAAU,MACT,UAAU,OAAO,WAAW,CAAC,OAAO,cACrC;AACA,eAAO;AAAA,MACT;AAEA,aAAO,OAAO,WAAW,KAAK,KAAK;AAAA,IACrC;AAAA,EACF,EAAE;AAEF,QAAM,OAAO,IAAI,qBAAQ;AAAA,IACvB;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,OAAO,WAAW;AAC1B,YAAM,UAAU,MAAM,QAAQ;AAAA,QAC5B,QAAQ,IAAI,CAAC,WAAW,OAAO,SAAS,OAAO,OAAO,IAAI,CAAC,CAAC;AAAA,MAC9D;AAEA,aACE,QAAQ,KAAK,CAAC,WAAW,OAAO,WAAW,QAAQ,KACnD,QAAQ,MAAM,CAAC,WAAW,WAAW,IAAI;AAAA,IAE7C;AAAA,EACF,CAAC;AAED,SAAO,KAAK,IAAI;AAClB;AAEA,MAAM,wBAAwB,OAAO,YAAoC;AACvE,QAAM,aAAa,QAAQ,IAAI,CAAC,WAAW,OAAO,OAAO;AAEzD,qBAAI,QAAQ;AACZ,qBAAI,MAAM,+CAA+C;AACzD,qBAAI,QAAQ;AACZ,aAAW,QAAQ,CAAC,YAAY,mBAAI,OAAO,KAAK,OAAO,EAAE,CAAC;AAE1D,qBAAI,QAAQ;AACZ,QAAM,SAAS,MAAM,sCAAuB,IAAI;AAEhD,SAAO,WAAW;AACpB;AAEA,MAAM,kBAAkB,OAAO,QAAgB;AAC7C,MAAI;AACF,UAAM,gBAAAA,QAAG,SAAS,MAAM,GAAG;AAAA,EAC7B,SAAS,KAAK;AACZ,YAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC,yBAAI,IAAI,kBAAkB,GAAG,mBAAmB;AAChD,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,UAAM;AAAA,EACR;AACF;AAEA,MAAM,gBAAgB,OACpB,cACA,mBAC4B;AAC5B,QAAM,mBAAmB,aAAa,WAAW,SAAS;AAE1D,MAAI,kBAAkB;AACpB,UAAM,aAAa,aAAa,MAAM,UAAU,MAAM;AAEtD,cAAM,mCAAuB,YAAY,cAAc;AAAA,EACzD,OAAO;AACL,UAAM,cAAc,YAAAC,QAAK,KAAK,8BAAc,YAAY;AAExD,cAAM,uBAAU;AAAA;AAAA,MAEd,SAAS,MAAM;AAAA,MACf,YAAY;AAAA,MACZ,iBAAiB;AAAA,MACjB,YAAY,CAAC;AAAA;AAAA,MAEb,uBAAuB;AAAA,IACzB,CAAC;AAAA,EACH;AAEA,QAAM,iBAAiB;AAAA,IACrB,YAAAA,QAAK,KAAK,QAAQ,IAAI,GAAG,cAAc;AAAA,EACzC;AAEA,MAAI,kBAAkB;AACpB,uBAAI,QAAQ;AACZ,uBAAI;AAAA,MACF;AAAA,MACA,mBAAI;AAAA,YACF,+CAAwB,eAAe,cAAc,EAAE;AAAA,QACvD;AAAA,QACA;AAAA,MACF;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAEA,MAAM,kBAAkB,YAAY;AAClC,QAAM,oBAAoB,MAAM,+BAAgB,IAAI;AAEpD,MAAI,sBAAsB,iBAAY;AACpC,UAAM,aAAa,MAAM,+BAAgB,IAAI;AAC7C,WAAO,UAAU,UAAU;AAAA,EAC7B;AAEA,SAAO;AACT;AAEA,MAAM,uBAAuB,CAAC,YAC5B,OAAO;AAAA,EACL,QAAQ,IAAI,CAAC,EAAE,KAAK,MAAM,CAAC,MAAM,OAAO,IAAI,KAAK,CAAU;AAC7D;AAEK,MAAM,oBAAoB,CAAC,QAAgC;AAChE,QAAM,qBAAqB,YAAAA,QAAK,KAAK,KAAK,wCAAwB;AAElE,MAAI;AAEF,UAAM,iBAAiB,QAAQ,kBAAkB;AAEjD,WAAO,qCAAqB,MAAM,cAAc;AAAA,EAClD,SAAS,KAAK;AACZ,YAAI,8BAAgB,KAAK,kBAAkB,GAAG;AAC5C,aAAO;AAAA,QACL,YAAY;AAAA,QACZ,QAAQ,CAAC;AAAA,QACT,gBAAgB;AAAA,QAChB,MAAM;AAAA,MACR;AAAA,IACF;AAEA,UAAM;AAAA,EACR;AACF;AAEA,MAAM,qBAAqB,OAAO;AAAA,EAChC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAAkB;AAChB,QAAM,CAAC,SAAS,QAAQ,IAAI,UAAU,MAAM,GAAG;AAE/C,QAAM,OAAO,OAAO,UAAM,2BAAc,CAAC;AAEzC,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,MAAM,kCAAkC,SAAS,EAAE;AAAA,EAC/D;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA;AAAA,IAEA,UAAU,YAAY;AAAA,IAEtB;AAAA,IAEA;AAAA,IACA,uBAAuB,iBAAiB,UAAU,WAAW;AAAA,IAC7D,8BACE,iBAAiB,UAAU,WAAW;AAAA,EAC1C;AACF;AAEO,MAAM,sBAAsB,YAAiC;AAClE,QAAM,EAAE,WAAW,cAAc,UAAU,cAAc,IACvD,MAAM,QAAoB,gCAAiB;AAC7C,qBAAI,MAAM,aAAAC,QAAM,KAAK,QAAQ,GAAG,MAAM,aAAAA,QAAM,KAAK,SAAS,CAAC;AAE3D,QAAM,eAAe,MAAM,mBAAmB;AAAA,IAC5C;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,iBAAiB;AAEvB,QAAM,gBAAgB,cAAc;AAEpC,qBAAI,QAAQ;AACZ,QAAM,eAAe,MAAM,gBAAgB;AAE3C,QAAM,EAAE,YAAY,QAAQ,QAAQ,gBAAgB,KAAK,IACvD,MAAM,cAAc,cAAc,cAAc;AAElD,MAAI,OAAO,WAAW,GAAG;AACvB,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA,kBAAkB;AAAA,MAClB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,iBAAiB,SAAS,OAAO,MAAM,sBAAsB,MAAM;AAEzE,MAAI,gBAAgB;AAClB,uBAAI,QAAQ;AACZ,UAAMC,iBAAgB,MAAM,QAAQ;AAAA,MAClC,SAAS;AAAA,MACT,SAAS,aAAAD,QAAM,KAAK,YAAY,aAAAA,QAAM,KAAK,YAAY,CAAC,GAAG;AAAA,MAC3D,MAAM;AAAA,IACR,CAAC;AAED,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA,kBAAkB;AAAA,MAClB,cAAc,EAAE,GAAG,cAAc,GAAGC,eAAc;AAAA,MAClD;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,qBAAI,QAAQ;AACZ,qBAAI;AAAA,IACF,0BAA0B,aAAAD,QAAM;AAAA,UAC9B,+CAAwB,cAAc,EAAE;AAAA,MACxC;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,gBAAgB,qBAAqB,MAAM;AAEjD,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,kBAAkB;AAAA,IAClB,cAAc,EAAE,GAAG,cAAc,GAAG,cAAc;AAAA,IAClD;AAAA,IACA;AAAA,EACF;AACF;
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport chalk from 'chalk';\nimport { Form, type FormChoice } from 'enquirer';\nimport fs from 'fs-extra';\n\nimport { copyFiles } from '../../utils/copy';\nimport { isErrorWithCode } from '../../utils/error';\nimport { log } from '../../utils/logging';\nimport {\n DEFAULT_PACKAGE_MANAGER,\n configForPackageManager,\n} from '../../utils/packageManager';\nimport { getRandomPort } from '../../utils/port';\nimport {\n TEMPLATE_CONFIG_FILENAME,\n TEMPLATE_DIR,\n type TemplateConfig,\n templateConfigSchema,\n} from '../../utils/template';\n\nimport { downloadGitHubTemplate } from './git';\nimport {\n BASE_PROMPT_PROPS,\n type BaseFields,\n type Choice,\n GIT_PATH_PROMPT,\n SHOULD_CONTINUE_PROMPT,\n TEMPLATE_PROMPT,\n} from './prompts';\nimport { type InitConfig, initConfigInputSchema } from './types';\n\nexport const runForm = <T = Record<string, string>>(props: {\n choices: Readonly<Choice[]>;\n message: string;\n name: string;\n}) => {\n const { message, name } = props;\n\n const choices = props.choices.map((choice) => ({\n ...choice,\n validate: (value: string | undefined) => {\n if (\n !value ||\n value === '' ||\n (value === choice.initial && !choice.allowInitial)\n ) {\n return 'Form is not complete';\n }\n\n return choice.validate?.(value) ?? true;\n },\n }));\n\n const form = new Form<T>({\n choices,\n message,\n name,\n validate: async (values) => {\n const results = await Promise.all(\n choices.map((choice) => choice.validate(values[choice.name])),\n );\n\n return (\n results.find((result) => typeof result === 'string') ??\n results.every((result) => result === true)\n );\n },\n });\n\n return form.run();\n};\n\nconst confirmShouldContinue = async (choices: Readonly<FormChoice[]>) => {\n const fieldsList = choices.map((choice) => choice.message);\n\n log.newline();\n log.plain('This template uses the following information:');\n log.newline();\n fieldsList.forEach((message) => log.subtle(`- ${message}`));\n\n log.newline();\n const result = await SHOULD_CONTINUE_PROMPT.run();\n\n return result === 'yes';\n};\n\nconst createDirectory = async (dir: string) => {\n try {\n await fs.promises.mkdir(dir);\n } catch (err) {\n if (isErrorWithCode(err, 'EEXIST')) {\n log.err(`The directory '${dir}' already exists.`);\n process.exit(1);\n }\n\n throw err;\n }\n};\n\nconst cloneTemplate = async (\n templateName: string,\n destinationDir: string,\n): Promise<TemplateConfig> => {\n const isCustomTemplate = templateName.startsWith('github:');\n\n if (isCustomTemplate) {\n const gitHubPath = templateName.slice('github:'.length);\n\n await downloadGitHubTemplate(gitHubPath, destinationDir);\n } else {\n const templateDir = path.join(TEMPLATE_DIR, templateName);\n\n await copyFiles({\n // assume built-in templates have no extraneous files\n include: () => true,\n sourceRoot: templateDir,\n destinationRoot: destinationDir,\n processors: [],\n // built-in templates have files like _package.json\n stripUnderscorePrefix: true,\n });\n }\n\n const templateConfig = getTemplateConfig(\n path.join(process.cwd(), destinationDir),\n );\n\n if (isCustomTemplate) {\n log.newline();\n log.warn(\n 'You may need to run',\n log.bold(\n configForPackageManager(templateConfig.packageManager).exec,\n 'skuba',\n 'configure',\n ),\n 'once this is done.',\n );\n }\n\n return templateConfig;\n};\n\nconst getTemplateName = async () => {\n const templateSelection = await TEMPLATE_PROMPT.run();\n\n if (templateSelection === 'github \u2192') {\n const gitHubPath = await GIT_PATH_PROMPT.run();\n return `github:${gitHubPath}`;\n }\n\n return templateSelection;\n};\n\nconst generatePlaceholders = (choices: FormChoice[]) =>\n Object.fromEntries(\n choices.map(({ name }) => [name, `<%- ${name} %>`] as const),\n );\n\nexport const getTemplateConfig = (dir: string): TemplateConfig => {\n const templateConfigPath = path.join(dir, TEMPLATE_CONFIG_FILENAME);\n\n try {\n /* eslint-disable-next-line @typescript-eslint/no-var-requires */\n const templateConfig = require(templateConfigPath) as unknown;\n\n return templateConfigSchema.parse(templateConfig);\n } catch (err) {\n if (isErrorWithCode(err, 'MODULE_NOT_FOUND')) {\n return {\n entryPoint: undefined,\n fields: [],\n packageManager: DEFAULT_PACKAGE_MANAGER,\n type: undefined,\n };\n }\n\n throw err;\n }\n};\n\nconst baseToTemplateData = async ({\n ownerName,\n platformName,\n repoName,\n defaultBranch,\n}: BaseFields) => {\n const [orgName, teamName] = ownerName.split('/');\n\n const port = String(await getRandomPort());\n\n if (!orgName) {\n throw new Error(`Invalid format for owner name: ${ownerName}`);\n }\n\n return {\n orgName,\n ownerName,\n repoName,\n defaultBranch,\n // Use standalone username in `teamName` contexts\n teamName: teamName ?? orgName,\n\n port,\n\n platformName,\n lambdaCdkArchitecture: platformName === 'amd64' ? 'X86_64' : 'ARM_64',\n lambdaServerlessArchitecture:\n platformName === 'amd64' ? 'x86_64' : platformName,\n };\n};\n\nexport const configureFromPrompt = async (): Promise<InitConfig> => {\n const { ownerName, platformName, repoName, defaultBranch } =\n await runForm<BaseFields>(BASE_PROMPT_PROPS);\n log.plain(chalk.cyan(repoName), 'by', chalk.cyan(ownerName));\n\n const templateData = await baseToTemplateData({\n ownerName,\n platformName,\n repoName,\n defaultBranch,\n });\n\n const destinationDir = repoName;\n\n await createDirectory(destinationDir);\n\n log.newline();\n const templateName = await getTemplateName();\n\n const { entryPoint, fields, noSkip, packageManager, type } =\n await cloneTemplate(templateName, destinationDir);\n\n if (fields.length === 0) {\n return {\n destinationDir,\n entryPoint,\n packageManager,\n templateComplete: true,\n templateData,\n templateName,\n type,\n };\n }\n\n const shouldContinue = noSkip ? true : await confirmShouldContinue(fields);\n\n if (shouldContinue) {\n log.newline();\n const customAnswers = await runForm({\n choices: fields,\n message: chalk.bold(`Complete ${chalk.cyan(templateName)}:`),\n name: 'customAnswers',\n });\n\n return {\n destinationDir,\n entryPoint,\n packageManager,\n templateComplete: true,\n templateData: { ...templateData, ...customAnswers },\n templateName,\n type,\n };\n }\n\n log.newline();\n log.warn(\n `Resume this later with ${chalk.bold(\n configForPackageManager(packageManager).exec,\n 'skuba configure',\n )}.`,\n );\n\n const customAnswers = generatePlaceholders(fields);\n\n return {\n destinationDir,\n entryPoint,\n packageManager,\n templateComplete: false,\n templateData: { ...templateData, ...customAnswers },\n templateName,\n type,\n };\n};\n\nexport const readJSONFromStdIn = async () => {\n let text = '';\n\n await new Promise((resolve) =>\n process.stdin\n .on('data', (chunk) => (text += chunk.toString()))\n .once('end', resolve),\n );\n\n text = text.trim();\n\n if (text === '') {\n log.err('No data from stdin.');\n process.exit(1);\n }\n\n let value: unknown;\n\n try {\n value = JSON.parse(text) as unknown;\n } catch {\n log.err('Invalid JSON from stdin.');\n process.exit(1);\n }\n\n return value;\n};\n\nconst configureFromPipe = async (): Promise<InitConfig> => {\n const value = await readJSONFromStdIn();\n\n const result = initConfigInputSchema.safeParse(value);\n\n if (!result.success) {\n log.err('Invalid data from stdin:');\n log.err(result.error);\n process.exit(1);\n }\n\n const { destinationDir, templateComplete, templateName } = result.data;\n\n const templateData = {\n ...(await baseToTemplateData(result.data.templateData)),\n ...result.data.templateData,\n };\n\n await createDirectory(destinationDir);\n\n const { entryPoint, fields, noSkip, packageManager, type } =\n await cloneTemplate(templateName, destinationDir);\n\n if (!templateComplete) {\n if (noSkip) {\n log.err('Templating for', log.bold(templateName), 'cannot be skipped.');\n process.exit(1);\n }\n\n return {\n ...result.data,\n entryPoint,\n packageManager,\n templateData: {\n ...templateData,\n ...generatePlaceholders(fields),\n },\n type,\n };\n }\n\n const required = fields.map(({ name }) => name);\n\n const provided = new Set(Object.keys(templateData));\n\n const missing = required.filter((name) => !provided.has(name));\n\n if (missing.length > 0) {\n log.err('This template uses the following information:');\n log.newline();\n missing.forEach((name) => log.err(`- ${name}`));\n process.exit(1);\n }\n\n return {\n ...result.data,\n entryPoint,\n packageManager,\n templateData,\n type,\n };\n};\n\nexport const getConfig = () =>\n process.stdin.isTTY ? configureFromPrompt() : configureFromPipe();\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,sBAAsC;AACtC,sBAAe;AAEf,kBAA0B;AAC1B,mBAAgC;AAChC,qBAAoB;AACpB,4BAGO;AACP,kBAA8B;AAC9B,sBAKO;AAEP,iBAAuC;AACvC,qBAOO;AACP,mBAAuD;AAEhD,MAAM,UAAU,CAA6B,UAI9C;AACJ,QAAM,EAAE,SAAS,KAAK,IAAI;AAE1B,QAAM,UAAU,MAAM,QAAQ,IAAI,CAAC,YAAY;AAAA,IAC7C,GAAG;AAAA,IACH,UAAU,CAAC,UAA8B;AACvC,UACE,CAAC,SACD,UAAU,MACT,UAAU,OAAO,WAAW,CAAC,OAAO,cACrC;AACA,eAAO;AAAA,MACT;AAEA,aAAO,OAAO,WAAW,KAAK,KAAK;AAAA,IACrC;AAAA,EACF,EAAE;AAEF,QAAM,OAAO,IAAI,qBAAQ;AAAA,IACvB;AAAA,IACA;AAAA,IACA;AAAA,IACA,UAAU,OAAO,WAAW;AAC1B,YAAM,UAAU,MAAM,QAAQ;AAAA,QAC5B,QAAQ,IAAI,CAAC,WAAW,OAAO,SAAS,OAAO,OAAO,IAAI,CAAC,CAAC;AAAA,MAC9D;AAEA,aACE,QAAQ,KAAK,CAAC,WAAW,OAAO,WAAW,QAAQ,KACnD,QAAQ,MAAM,CAAC,WAAW,WAAW,IAAI;AAAA,IAE7C;AAAA,EACF,CAAC;AAED,SAAO,KAAK,IAAI;AAClB;AAEA,MAAM,wBAAwB,OAAO,YAAoC;AACvE,QAAM,aAAa,QAAQ,IAAI,CAAC,WAAW,OAAO,OAAO;AAEzD,qBAAI,QAAQ;AACZ,qBAAI,MAAM,+CAA+C;AACzD,qBAAI,QAAQ;AACZ,aAAW,QAAQ,CAAC,YAAY,mBAAI,OAAO,KAAK,OAAO,EAAE,CAAC;AAE1D,qBAAI,QAAQ;AACZ,QAAM,SAAS,MAAM,sCAAuB,IAAI;AAEhD,SAAO,WAAW;AACpB;AAEA,MAAM,kBAAkB,OAAO,QAAgB;AAC7C,MAAI;AACF,UAAM,gBAAAA,QAAG,SAAS,MAAM,GAAG;AAAA,EAC7B,SAAS,KAAK;AACZ,YAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC,yBAAI,IAAI,kBAAkB,GAAG,mBAAmB;AAChD,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,UAAM;AAAA,EACR;AACF;AAEA,MAAM,gBAAgB,OACpB,cACA,mBAC4B;AAC5B,QAAM,mBAAmB,aAAa,WAAW,SAAS;AAE1D,MAAI,kBAAkB;AACpB,UAAM,aAAa,aAAa,MAAM,UAAU,MAAM;AAEtD,cAAM,mCAAuB,YAAY,cAAc;AAAA,EACzD,OAAO;AACL,UAAM,cAAc,YAAAC,QAAK,KAAK,8BAAc,YAAY;AAExD,cAAM,uBAAU;AAAA;AAAA,MAEd,SAAS,MAAM;AAAA,MACf,YAAY;AAAA,MACZ,iBAAiB;AAAA,MACjB,YAAY,CAAC;AAAA;AAAA,MAEb,uBAAuB;AAAA,IACzB,CAAC;AAAA,EACH;AAEA,QAAM,iBAAiB;AAAA,IACrB,YAAAA,QAAK,KAAK,QAAQ,IAAI,GAAG,cAAc;AAAA,EACzC;AAEA,MAAI,kBAAkB;AACpB,uBAAI,QAAQ;AACZ,uBAAI;AAAA,MACF;AAAA,MACA,mBAAI;AAAA,YACF,+CAAwB,eAAe,cAAc,EAAE;AAAA,QACvD;AAAA,QACA;AAAA,MACF;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,SAAO;AACT;AAEA,MAAM,kBAAkB,YAAY;AAClC,QAAM,oBAAoB,MAAM,+BAAgB,IAAI;AAEpD,MAAI,sBAAsB,iBAAY;AACpC,UAAM,aAAa,MAAM,+BAAgB,IAAI;AAC7C,WAAO,UAAU,UAAU;AAAA,EAC7B;AAEA,SAAO;AACT;AAEA,MAAM,uBAAuB,CAAC,YAC5B,OAAO;AAAA,EACL,QAAQ,IAAI,CAAC,EAAE,KAAK,MAAM,CAAC,MAAM,OAAO,IAAI,KAAK,CAAU;AAC7D;AAEK,MAAM,oBAAoB,CAAC,QAAgC;AAChE,QAAM,qBAAqB,YAAAA,QAAK,KAAK,KAAK,wCAAwB;AAElE,MAAI;AAEF,UAAM,iBAAiB,QAAQ,kBAAkB;AAEjD,WAAO,qCAAqB,MAAM,cAAc;AAAA,EAClD,SAAS,KAAK;AACZ,YAAI,8BAAgB,KAAK,kBAAkB,GAAG;AAC5C,aAAO;AAAA,QACL,YAAY;AAAA,QACZ,QAAQ,CAAC;AAAA,QACT,gBAAgB;AAAA,QAChB,MAAM;AAAA,MACR;AAAA,IACF;AAEA,UAAM;AAAA,EACR;AACF;AAEA,MAAM,qBAAqB,OAAO;AAAA,EAChC;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAAkB;AAChB,QAAM,CAAC,SAAS,QAAQ,IAAI,UAAU,MAAM,GAAG;AAE/C,QAAM,OAAO,OAAO,UAAM,2BAAc,CAAC;AAEzC,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,MAAM,kCAAkC,SAAS,EAAE;AAAA,EAC/D;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA;AAAA,IAEA,UAAU,YAAY;AAAA,IAEtB;AAAA,IAEA;AAAA,IACA,uBAAuB,iBAAiB,UAAU,WAAW;AAAA,IAC7D,8BACE,iBAAiB,UAAU,WAAW;AAAA,EAC1C;AACF;AAEO,MAAM,sBAAsB,YAAiC;AAClE,QAAM,EAAE,WAAW,cAAc,UAAU,cAAc,IACvD,MAAM,QAAoB,gCAAiB;AAC7C,qBAAI,MAAM,aAAAC,QAAM,KAAK,QAAQ,GAAG,MAAM,aAAAA,QAAM,KAAK,SAAS,CAAC;AAE3D,QAAM,eAAe,MAAM,mBAAmB;AAAA,IAC5C;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,iBAAiB;AAEvB,QAAM,gBAAgB,cAAc;AAEpC,qBAAI,QAAQ;AACZ,QAAM,eAAe,MAAM,gBAAgB;AAE3C,QAAM,EAAE,YAAY,QAAQ,QAAQ,gBAAgB,KAAK,IACvD,MAAM,cAAc,cAAc,cAAc;AAElD,MAAI,OAAO,WAAW,GAAG;AACvB,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA,kBAAkB;AAAA,MAClB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,iBAAiB,SAAS,OAAO,MAAM,sBAAsB,MAAM;AAEzE,MAAI,gBAAgB;AAClB,uBAAI,QAAQ;AACZ,UAAMC,iBAAgB,MAAM,QAAQ;AAAA,MAClC,SAAS;AAAA,MACT,SAAS,aAAAD,QAAM,KAAK,YAAY,aAAAA,QAAM,KAAK,YAAY,CAAC,GAAG;AAAA,MAC3D,MAAM;AAAA,IACR,CAAC;AAED,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,MACA,kBAAkB;AAAA,MAClB,cAAc,EAAE,GAAG,cAAc,GAAGC,eAAc;AAAA,MAClD;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,qBAAI,QAAQ;AACZ,qBAAI;AAAA,IACF,0BAA0B,aAAAD,QAAM;AAAA,UAC9B,+CAAwB,cAAc,EAAE;AAAA,MACxC;AAAA,IACF,CAAC;AAAA,EACH;AAEA,QAAM,gBAAgB,qBAAqB,MAAM;AAEjD,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,kBAAkB;AAAA,IAClB,cAAc,EAAE,GAAG,cAAc,GAAG,cAAc;AAAA,IAClD;AAAA,IACA;AAAA,EACF;AACF;AAEO,MAAM,oBAAoB,YAAY;AAC3C,MAAI,OAAO;AAEX,QAAM,IAAI;AAAA,IAAQ,CAAC,YACjB,QAAQ,MACL,GAAG,QAAQ,CAAC,UAAW,QAAQ,MAAM,SAAS,CAAE,EAChD,KAAK,OAAO,OAAO;AAAA,EACxB;AAEA,SAAO,KAAK,KAAK;AAEjB,MAAI,SAAS,IAAI;AACf,uBAAI,IAAI,qBAAqB;AAC7B,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,MAAI;AAEJ,MAAI;AACF,YAAQ,KAAK,MAAM,IAAI;AAAA,EACzB,QAAQ;AACN,uBAAI,IAAI,0BAA0B;AAClC,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,SAAO;AACT;AAEA,MAAM,oBAAoB,YAAiC;AACzD,QAAM,QAAQ,MAAM,kBAAkB;AAEtC,QAAM,SAAS,mCAAsB,UAAU,KAAK;AAEpD,MAAI,CAAC,OAAO,SAAS;AACnB,uBAAI,IAAI,0BAA0B;AAClC,uBAAI,IAAI,OAAO,KAAK;AACpB,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,EAAE,gBAAgB,kBAAkB,aAAa,IAAI,OAAO;AAElE,QAAM,eAAe;AAAA,IACnB,GAAI,MAAM,mBAAmB,OAAO,KAAK,YAAY;AAAA,IACrD,GAAG,OAAO,KAAK;AAAA,EACjB;AAEA,QAAM,gBAAgB,cAAc;AAEpC,QAAM,EAAE,YAAY,QAAQ,QAAQ,gBAAgB,KAAK,IACvD,MAAM,cAAc,cAAc,cAAc;AAElD,MAAI,CAAC,kBAAkB;AACrB,QAAI,QAAQ;AACV,yBAAI,IAAI,kBAAkB,mBAAI,KAAK,YAAY,GAAG,oBAAoB;AACtE,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,WAAO;AAAA,MACL,GAAG,OAAO;AAAA,MACV;AAAA,MACA;AAAA,MACA,cAAc;AAAA,QACZ,GAAG;AAAA,QACH,GAAG,qBAAqB,MAAM;AAAA,MAChC;AAAA,MACA;AAAA,IACF;AAAA,EACF;AAEA,QAAM,WAAW,OAAO,IAAI,CAAC,EAAE,KAAK,MAAM,IAAI;AAE9C,QAAM,WAAW,IAAI,IAAI,OAAO,KAAK,YAAY,CAAC;AAElD,QAAM,UAAU,SAAS,OAAO,CAAC,SAAS,CAAC,SAAS,IAAI,IAAI,CAAC;AAE7D,MAAI,QAAQ,SAAS,GAAG;AACtB,uBAAI,IAAI,+CAA+C;AACvD,uBAAI,QAAQ;AACZ,YAAQ,QAAQ,CAAC,SAAS,mBAAI,IAAI,KAAK,IAAI,EAAE,CAAC;AAC9C,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,SAAO;AAAA,IACL,GAAG,OAAO;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAEO,MAAM,YAAY,MACvB,QAAQ,MAAM,QAAQ,oBAAoB,IAAI,kBAAkB;",
|
|
6
6
|
"names": ["fs", "path", "chalk", "customAnswers"]
|
|
7
7
|
}
|
|
@@ -37,6 +37,7 @@ var import_path = __toESM(require("path"));
|
|
|
37
37
|
var import_util = require("util");
|
|
38
38
|
var import_fs_extra = require("fs-extra");
|
|
39
39
|
var import_strip_ansi = __toESM(require("strip-ansi"));
|
|
40
|
+
var import__ = require("../../..");
|
|
40
41
|
var import_npmrc = require("../../../utils/npmrc");
|
|
41
42
|
var import_packageManager = require("../../../utils/packageManager");
|
|
42
43
|
var import_template = require("../../../utils/template");
|
|
@@ -72,7 +73,10 @@ const REFRESHABLE_CONFIG_FILES = [
|
|
|
72
73
|
}
|
|
73
74
|
];
|
|
74
75
|
const refreshConfigFiles = async (mode, logger) => {
|
|
75
|
-
const manifest = await
|
|
76
|
+
const [manifest, gitRoot] = await Promise.all([
|
|
77
|
+
(0, import_package.getDestinationManifest)(),
|
|
78
|
+
import__.Git.findRoot({ dir: process.cwd() })
|
|
79
|
+
]);
|
|
76
80
|
const destinationRoot = import_path.default.dirname(manifest.path);
|
|
77
81
|
const readDestinationFile = (0, import_project.createDestinationFileReader)(destinationRoot);
|
|
78
82
|
const refreshConfigFile = async ({
|
|
@@ -84,10 +88,17 @@ const refreshConfigFiles = async (mode, logger) => {
|
|
|
84
88
|
if (!condition(packageManager2)) {
|
|
85
89
|
return { needsChange: false };
|
|
86
90
|
}
|
|
87
|
-
const [inputFile, templateFile] = await Promise.all([
|
|
91
|
+
const [inputFile, templateFile, isGitIgnored] = await Promise.all([
|
|
88
92
|
readDestinationFile(filename),
|
|
89
|
-
(0, import_template.readBaseTemplateFile)(`_${filename}`)
|
|
93
|
+
(0, import_template.readBaseTemplateFile)(`_${filename}`),
|
|
94
|
+
gitRoot ? import__.Git.isFileGitIgnored({
|
|
95
|
+
gitRoot,
|
|
96
|
+
absolutePath: import_path.default.join(destinationRoot, filename)
|
|
97
|
+
}) : false
|
|
90
98
|
]);
|
|
99
|
+
if (inputFile === void 0 && isGitIgnored) {
|
|
100
|
+
return { needsChange: false };
|
|
101
|
+
}
|
|
91
102
|
const data = additionalMapping(
|
|
92
103
|
inputFile ? (0, import_configFile.mergeWithConfigFile)(templateFile, fileType)(inputFile) : templateFile,
|
|
93
104
|
packageManager2
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../../src/cli/lint/internalLints/refreshConfigFiles.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\nimport { inspect } from 'util';\n\nimport { writeFile } from 'fs-extra';\nimport stripAnsi from 'strip-ansi';\n\nimport type { Logger } from '../../../utils/logging';\nimport { NPMRC_LINES, hasNpmrcSecret } from '../../../utils/npmrc';\nimport {\n type PackageManagerConfig,\n detectPackageManager,\n} from '../../../utils/packageManager';\nimport { readBaseTemplateFile } from '../../../utils/template';\nimport { getDestinationManifest } from '../../configure/analysis/package';\nimport { createDestinationFileReader } from '../../configure/analysis/project';\nimport { mergeWithConfigFile } from '../../configure/processing/configFile';\nimport type { InternalLintResult } from '../internal';\n\nconst ensureNoAuthToken = (fileContents: string) =>\n fileContents\n .split('\\n')\n .filter((line) => !hasNpmrcSecret(line))\n .join('\\n');\n\ntype RefreshableConfigFile = {\n name: string;\n type: 'ignore' | 'npmrc';\n additionalMapping?: (\n s: string,\n packageManager: PackageManagerConfig,\n ) => string;\n if?: (packageManager: PackageManagerConfig) => boolean;\n};\n\nconst removeRedundantNpmrc = (contents: string) => {\n const npmrcLines = contents\n .split('\\n')\n .filter((line) => NPMRC_LINES.includes(line.trim()));\n\n // If we're only left with !.npmrc line we can remove it\n // TODO: Consider if we should generalise this\n if (npmrcLines.length > 0 && npmrcLines.every((line) => line.includes('!'))) {\n return contents\n .split('\\n')\n .filter((line) => !NPMRC_LINES.includes(line.trim()))\n .join('\\n');\n }\n return contents;\n};\n\nexport const REFRESHABLE_CONFIG_FILES: RefreshableConfigFile[] = [\n { name: '.eslintignore', type: 'ignore' },\n {\n name: '.gitignore',\n type: 'ignore',\n additionalMapping: removeRedundantNpmrc,\n },\n { name: '.prettierignore', type: 'ignore' },\n {\n name: '.npmrc',\n type: 'npmrc',\n additionalMapping: ensureNoAuthToken,\n if: (packageManager: PackageManagerConfig) =>\n packageManager.command === 'pnpm',\n },\n {\n name: '.dockerignore',\n type: 'ignore',\n additionalMapping: removeRedundantNpmrc,\n },\n];\n\nexport const refreshConfigFiles = async (\n mode: 'format' | 'lint',\n logger: Logger,\n) => {\n const manifest = await getDestinationManifest();\n\n const destinationRoot = path.dirname(manifest.path);\n\n const readDestinationFile = createDestinationFileReader(destinationRoot);\n\n const refreshConfigFile = async (\n {\n name: filename,\n type: fileType,\n additionalMapping = (s) => s,\n if: condition = () => true,\n }: RefreshableConfigFile,\n packageManager: PackageManagerConfig,\n ) => {\n if (!condition(packageManager)) {\n return { needsChange: false };\n }\n\n const [inputFile, templateFile] = await Promise.all([\n readDestinationFile(filename),\n readBaseTemplateFile(`_${filename}`),\n ]);\n\n const data = additionalMapping(\n inputFile\n ? mergeWithConfigFile(templateFile, fileType)(inputFile)\n : templateFile,\n packageManager,\n );\n\n const filepath = path.join(destinationRoot, filename);\n\n if (mode === 'format') {\n if (data === inputFile) {\n return { needsChange: false };\n }\n\n await writeFile(filepath, data);\n return {\n needsChange: false,\n msg: `Refreshed ${logger.bold(filename)}.`,\n filename,\n };\n }\n\n if (data !== inputFile) {\n return {\n needsChange: true,\n msg: `The ${logger.bold(\n filename,\n )} file is out of date. Run \\`${logger.bold(\n packageManager.exec,\n 'skuba',\n 'format',\n )}\\` to update it.`,\n filename,\n };\n }\n\n return { needsChange: false };\n };\n\n const packageManager = await detectPackageManager(destinationRoot);\n\n const results = await Promise.all(\n REFRESHABLE_CONFIG_FILES.map((conf) =>\n refreshConfigFile(conf, packageManager),\n ),\n );\n\n // Log after for reproducible test output ordering\n results.forEach((result) => {\n if (result.msg) {\n logger.warn(result.msg, logger.dim('refresh-config-files'));\n }\n });\n\n const anyNeedChanging = results.some(({ needsChange }) => needsChange);\n\n return {\n ok: !anyNeedChanging,\n fixable: anyNeedChanging,\n annotations: results.flatMap(({ needsChange, filename, msg }) =>\n needsChange && msg\n ? [\n {\n path: filename,\n message: stripAnsi(msg),\n },\n ]\n : [],\n ),\n };\n};\n\nexport const tryRefreshConfigFiles = async (\n mode: 'format' | 'lint',\n logger: Logger,\n): Promise<InternalLintResult> => {\n try {\n return await refreshConfigFiles(mode, logger);\n } catch (err) {\n logger.warn('Failed to refresh config files.');\n logger.subtle(inspect(err));\n\n return {\n ok: false,\n fixable: false,\n annotations: [],\n };\n }\n};\n"],
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAwB;AAExB,sBAA0B;AAC1B,wBAAsB;
|
|
4
|
+
"sourcesContent": ["import path from 'path';\nimport { inspect } from 'util';\n\nimport { writeFile } from 'fs-extra';\nimport stripAnsi from 'strip-ansi';\n\nimport { Git } from '../../..';\nimport type { Logger } from '../../../utils/logging';\nimport { NPMRC_LINES, hasNpmrcSecret } from '../../../utils/npmrc';\nimport {\n type PackageManagerConfig,\n detectPackageManager,\n} from '../../../utils/packageManager';\nimport { readBaseTemplateFile } from '../../../utils/template';\nimport { getDestinationManifest } from '../../configure/analysis/package';\nimport { createDestinationFileReader } from '../../configure/analysis/project';\nimport { mergeWithConfigFile } from '../../configure/processing/configFile';\nimport type { InternalLintResult } from '../internal';\n\nconst ensureNoAuthToken = (fileContents: string) =>\n fileContents\n .split('\\n')\n .filter((line) => !hasNpmrcSecret(line))\n .join('\\n');\n\ntype RefreshableConfigFile = {\n name: string;\n type: 'ignore' | 'npmrc';\n additionalMapping?: (\n s: string,\n packageManager: PackageManagerConfig,\n ) => string;\n if?: (packageManager: PackageManagerConfig) => boolean;\n};\n\nconst removeRedundantNpmrc = (contents: string) => {\n const npmrcLines = contents\n .split('\\n')\n .filter((line) => NPMRC_LINES.includes(line.trim()));\n\n // If we're only left with !.npmrc line we can remove it\n // TODO: Consider if we should generalise this\n if (npmrcLines.length > 0 && npmrcLines.every((line) => line.includes('!'))) {\n return contents\n .split('\\n')\n .filter((line) => !NPMRC_LINES.includes(line.trim()))\n .join('\\n');\n }\n return contents;\n};\n\nexport const REFRESHABLE_CONFIG_FILES: RefreshableConfigFile[] = [\n { name: '.eslintignore', type: 'ignore' },\n {\n name: '.gitignore',\n type: 'ignore',\n additionalMapping: removeRedundantNpmrc,\n },\n { name: '.prettierignore', type: 'ignore' },\n {\n name: '.npmrc',\n type: 'npmrc',\n additionalMapping: ensureNoAuthToken,\n if: (packageManager: PackageManagerConfig) =>\n packageManager.command === 'pnpm',\n },\n {\n name: '.dockerignore',\n type: 'ignore',\n additionalMapping: removeRedundantNpmrc,\n },\n];\n\nexport const refreshConfigFiles = async (\n mode: 'format' | 'lint',\n logger: Logger,\n) => {\n const [manifest, gitRoot] = await Promise.all([\n getDestinationManifest(),\n Git.findRoot({ dir: process.cwd() }),\n ]);\n\n const destinationRoot = path.dirname(manifest.path);\n\n const readDestinationFile = createDestinationFileReader(destinationRoot);\n\n const refreshConfigFile = async (\n {\n name: filename,\n type: fileType,\n additionalMapping = (s) => s,\n if: condition = () => true,\n }: RefreshableConfigFile,\n packageManager: PackageManagerConfig,\n ) => {\n if (!condition(packageManager)) {\n return { needsChange: false };\n }\n\n const [inputFile, templateFile, isGitIgnored] = await Promise.all([\n readDestinationFile(filename),\n readBaseTemplateFile(`_${filename}`),\n gitRoot\n ? Git.isFileGitIgnored({\n gitRoot,\n absolutePath: path.join(destinationRoot, filename),\n })\n : false,\n ]);\n\n // If the file is gitignored and doesn't exist, don't make it\n if (inputFile === undefined && isGitIgnored) {\n return { needsChange: false };\n }\n\n const data = additionalMapping(\n inputFile\n ? mergeWithConfigFile(templateFile, fileType)(inputFile)\n : templateFile,\n packageManager,\n );\n\n const filepath = path.join(destinationRoot, filename);\n\n if (mode === 'format') {\n if (data === inputFile) {\n return { needsChange: false };\n }\n\n await writeFile(filepath, data);\n return {\n needsChange: false,\n msg: `Refreshed ${logger.bold(filename)}.`,\n filename,\n };\n }\n\n if (data !== inputFile) {\n return {\n needsChange: true,\n msg: `The ${logger.bold(\n filename,\n )} file is out of date. Run \\`${logger.bold(\n packageManager.exec,\n 'skuba',\n 'format',\n )}\\` to update it.`,\n filename,\n };\n }\n\n return { needsChange: false };\n };\n\n const packageManager = await detectPackageManager(destinationRoot);\n\n const results = await Promise.all(\n REFRESHABLE_CONFIG_FILES.map((conf) =>\n refreshConfigFile(conf, packageManager),\n ),\n );\n\n // Log after for reproducible test output ordering\n results.forEach((result) => {\n if (result.msg) {\n logger.warn(result.msg, logger.dim('refresh-config-files'));\n }\n });\n\n const anyNeedChanging = results.some(({ needsChange }) => needsChange);\n\n return {\n ok: !anyNeedChanging,\n fixable: anyNeedChanging,\n annotations: results.flatMap(({ needsChange, filename, msg }) =>\n needsChange && msg\n ? [\n {\n path: filename,\n message: stripAnsi(msg),\n },\n ]\n : [],\n ),\n };\n};\n\nexport const tryRefreshConfigFiles = async (\n mode: 'format' | 'lint',\n logger: Logger,\n): Promise<InternalLintResult> => {\n try {\n return await refreshConfigFiles(mode, logger);\n } catch (err) {\n logger.warn('Failed to refresh config files.');\n logger.subtle(inspect(err));\n\n return {\n ok: false,\n fixable: false,\n annotations: [],\n };\n }\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAwB;AAExB,sBAA0B;AAC1B,wBAAsB;AAEtB,eAAoB;AAEpB,mBAA4C;AAC5C,4BAGO;AACP,sBAAqC;AACrC,qBAAuC;AACvC,qBAA4C;AAC5C,wBAAoC;AAGpC,MAAM,oBAAoB,CAAC,iBACzB,aACG,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,KAAC,6BAAe,IAAI,CAAC,EACtC,KAAK,IAAI;AAYd,MAAM,uBAAuB,CAAC,aAAqB;AACjD,QAAM,aAAa,SAChB,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,yBAAY,SAAS,KAAK,KAAK,CAAC,CAAC;AAIrD,MAAI,WAAW,SAAS,KAAK,WAAW,MAAM,CAAC,SAAS,KAAK,SAAS,GAAG,CAAC,GAAG;AAC3E,WAAO,SACJ,MAAM,IAAI,EACV,OAAO,CAAC,SAAS,CAAC,yBAAY,SAAS,KAAK,KAAK,CAAC,CAAC,EACnD,KAAK,IAAI;AAAA,EACd;AACA,SAAO;AACT;AAEO,MAAM,2BAAoD;AAAA,EAC/D,EAAE,MAAM,iBAAiB,MAAM,SAAS;AAAA,EACxC;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,mBAAmB;AAAA,EACrB;AAAA,EACA,EAAE,MAAM,mBAAmB,MAAM,SAAS;AAAA,EAC1C;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,mBAAmB;AAAA,IACnB,IAAI,CAAC,mBACH,eAAe,YAAY;AAAA,EAC/B;AAAA,EACA;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,mBAAmB;AAAA,EACrB;AACF;AAEO,MAAM,qBAAqB,OAChC,MACA,WACG;AACH,QAAM,CAAC,UAAU,OAAO,IAAI,MAAM,QAAQ,IAAI;AAAA,QAC5C,uCAAuB;AAAA,IACvB,aAAI,SAAS,EAAE,KAAK,QAAQ,IAAI,EAAE,CAAC;AAAA,EACrC,CAAC;AAED,QAAM,kBAAkB,YAAAA,QAAK,QAAQ,SAAS,IAAI;AAElD,QAAM,0BAAsB,4CAA4B,eAAe;AAEvE,QAAM,oBAAoB,OACxB;AAAA,IACE,MAAM;AAAA,IACN,MAAM;AAAA,IACN,oBAAoB,CAAC,MAAM;AAAA,IAC3B,IAAI,YAAY,MAAM;AAAA,EACxB,GACAC,oBACG;AACH,QAAI,CAAC,UAAUA,eAAc,GAAG;AAC9B,aAAO,EAAE,aAAa,MAAM;AAAA,IAC9B;AAEA,UAAM,CAAC,WAAW,cAAc,YAAY,IAAI,MAAM,QAAQ,IAAI;AAAA,MAChE,oBAAoB,QAAQ;AAAA,UAC5B,sCAAqB,IAAI,QAAQ,EAAE;AAAA,MACnC,UACI,aAAI,iBAAiB;AAAA,QACnB;AAAA,QACA,cAAc,YAAAD,QAAK,KAAK,iBAAiB,QAAQ;AAAA,MACnD,CAAC,IACD;AAAA,IACN,CAAC;AAGD,QAAI,cAAc,UAAa,cAAc;AAC3C,aAAO,EAAE,aAAa,MAAM;AAAA,IAC9B;AAEA,UAAM,OAAO;AAAA,MACX,gBACI,uCAAoB,cAAc,QAAQ,EAAE,SAAS,IACrD;AAAA,MACJC;AAAA,IACF;AAEA,UAAM,WAAW,YAAAD,QAAK,KAAK,iBAAiB,QAAQ;AAEpD,QAAI,SAAS,UAAU;AACrB,UAAI,SAAS,WAAW;AACtB,eAAO,EAAE,aAAa,MAAM;AAAA,MAC9B;AAEA,gBAAM,2BAAU,UAAU,IAAI;AAC9B,aAAO;AAAA,QACL,aAAa;AAAA,QACb,KAAK,aAAa,OAAO,KAAK,QAAQ,CAAC;AAAA,QACvC;AAAA,MACF;AAAA,IACF;AAEA,QAAI,SAAS,WAAW;AACtB,aAAO;AAAA,QACL,aAAa;AAAA,QACb,KAAK,OAAO,OAAO;AAAA,UACjB;AAAA,QACF,CAAC,+BAA+B,OAAO;AAAA,UACrCC,gBAAe;AAAA,UACf;AAAA,UACA;AAAA,QACF,CAAC;AAAA,QACD;AAAA,MACF;AAAA,IACF;AAEA,WAAO,EAAE,aAAa,MAAM;AAAA,EAC9B;AAEA,QAAM,iBAAiB,UAAM,4CAAqB,eAAe;AAEjE,QAAM,UAAU,MAAM,QAAQ;AAAA,IAC5B,yBAAyB;AAAA,MAAI,CAAC,SAC5B,kBAAkB,MAAM,cAAc;AAAA,IACxC;AAAA,EACF;AAGA,UAAQ,QAAQ,CAAC,WAAW;AAC1B,QAAI,OAAO,KAAK;AACd,aAAO,KAAK,OAAO,KAAK,OAAO,IAAI,sBAAsB,CAAC;AAAA,IAC5D;AAAA,EACF,CAAC;AAED,QAAM,kBAAkB,QAAQ,KAAK,CAAC,EAAE,YAAY,MAAM,WAAW;AAErE,SAAO;AAAA,IACL,IAAI,CAAC;AAAA,IACL,SAAS;AAAA,IACT,aAAa,QAAQ;AAAA,MAAQ,CAAC,EAAE,aAAa,UAAU,IAAI,MACzD,eAAe,MACX;AAAA,QACE;AAAA,UACE,MAAM;AAAA,UACN,aAAS,kBAAAC,SAAU,GAAG;AAAA,QACxB;AAAA,MACF,IACA,CAAC;AAAA,IACP;AAAA,EACF;AACF;AAEO,MAAM,wBAAwB,OACnC,MACA,WACgC;AAChC,MAAI;AACF,WAAO,MAAM,mBAAmB,MAAM,MAAM;AAAA,EAC9C,SAAS,KAAK;AACZ,WAAO,KAAK,iCAAiC;AAC7C,WAAO,WAAO,qBAAQ,GAAG,CAAC;AAE1B,WAAO;AAAA,MACL,IAAI;AAAA,MACJ,SAAS;AAAA,MACT,aAAa,CAAC;AAAA,IAChB;AAAA,EACF;AACF;",
|
|
6
6
|
"names": ["path", "packageManager", "stripAnsi"]
|
|
7
7
|
}
|
|
@@ -34,7 +34,7 @@ __export(packageManager_exports, {
|
|
|
34
34
|
packageManagerSchema: () => packageManagerSchema
|
|
35
35
|
});
|
|
36
36
|
module.exports = __toCommonJS(packageManager_exports);
|
|
37
|
-
var
|
|
37
|
+
var import_find_up = __toESM(require("find-up"));
|
|
38
38
|
var import_is_installed_globally = __toESM(require("is-installed-globally"));
|
|
39
39
|
var import_zod = require("zod");
|
|
40
40
|
var import_logging = require("./logging");
|
|
@@ -59,10 +59,15 @@ const configForPackageManager = (packageManager) => ({
|
|
|
59
59
|
});
|
|
60
60
|
const detectPackageManager = async (cwd) => {
|
|
61
61
|
let packageManager = DEFAULT_PACKAGE_MANAGER;
|
|
62
|
-
let raw;
|
|
63
62
|
try {
|
|
64
|
-
|
|
65
|
-
|
|
63
|
+
const [yarnDepth, pnpmDepth] = await Promise.all([
|
|
64
|
+
findDepth("yarn.lock", cwd),
|
|
65
|
+
findDepth("pnpm-lock.yaml", cwd)
|
|
66
|
+
]);
|
|
67
|
+
if (yarnDepth === void 0 && pnpmDepth === void 0) {
|
|
68
|
+
throw new Error("No package manager lockfile found.");
|
|
69
|
+
}
|
|
70
|
+
packageManager = (pnpmDepth ?? -1) > (yarnDepth ?? -1) ? "pnpm" : "yarn";
|
|
66
71
|
} catch (err) {
|
|
67
72
|
import_logging.log.warn(
|
|
68
73
|
`Failed to detect package manager; defaulting to ${import_logging.log.bold(
|
|
@@ -72,10 +77,6 @@ const detectPackageManager = async (cwd) => {
|
|
|
72
77
|
import_logging.log.subtle(
|
|
73
78
|
(() => {
|
|
74
79
|
switch (true) {
|
|
75
|
-
case err instanceof import_zod.ZodError:
|
|
76
|
-
return `Expected ${Object.keys(PACKAGE_MANAGERS).join(
|
|
77
|
-
"|"
|
|
78
|
-
)}, received ${raw}`;
|
|
79
80
|
case err instanceof Error:
|
|
80
81
|
return err.message;
|
|
81
82
|
default:
|
|
@@ -86,6 +87,10 @@ const detectPackageManager = async (cwd) => {
|
|
|
86
87
|
}
|
|
87
88
|
return configForPackageManager(packageManager);
|
|
88
89
|
};
|
|
90
|
+
const findDepth = async (filename, cwd) => {
|
|
91
|
+
const path = await (0, import_find_up.default)(filename, { cwd });
|
|
92
|
+
return path ? path.split("/").length : void 0;
|
|
93
|
+
};
|
|
89
94
|
const packageManagerSchema = import_zod.z.enum(["pnpm", "yarn"]).default(DEFAULT_PACKAGE_MANAGER);
|
|
90
95
|
// Annotate the CommonJS export names for ESM import in node:
|
|
91
96
|
0 && (module.exports = {
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/utils/packageManager.ts"],
|
|
4
|
-
"sourcesContent": ["import
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,
|
|
6
|
-
"names": ["isInstalledGlobally"]
|
|
4
|
+
"sourcesContent": ["import findUp from 'find-up';\nimport isInstalledGlobally from 'is-installed-globally';\nimport { z } from 'zod';\n\nimport { log } from './logging';\n\n// TODO: consider changing to this to `pnpm` in a future major version.\nexport const DEFAULT_PACKAGE_MANAGER = 'yarn';\n\nexport type PackageManagerConfig =\n (typeof PACKAGE_MANAGERS)[keyof typeof PACKAGE_MANAGERS] & {\n command: PackageManager;\n };\n\nconst PACKAGE_MANAGERS = {\n pnpm: {\n exec: 'pnpm exec',\n install: 'pnpm install',\n runSilent: 'pnpm --silent run',\n update: isInstalledGlobally ? 'pnpm update --global' : 'pnpm update',\n },\n yarn: {\n exec: 'yarn',\n install: 'yarn install',\n runSilent: 'yarn -s',\n update: isInstalledGlobally ? 'yarn global upgrade' : 'yarn upgrade',\n },\n};\n\nexport const configForPackageManager = (\n packageManager: PackageManager,\n): PackageManagerConfig => ({\n ...PACKAGE_MANAGERS[packageManager],\n command: packageManager,\n});\n\nexport const detectPackageManager = async (\n cwd?: string,\n): Promise<PackageManagerConfig> => {\n let packageManager: PackageManager = DEFAULT_PACKAGE_MANAGER;\n\n try {\n const [yarnDepth, pnpmDepth] = await Promise.all([\n findDepth('yarn.lock', cwd),\n findDepth('pnpm-lock.yaml', cwd),\n ]);\n\n if (yarnDepth === undefined && pnpmDepth === undefined) {\n throw new Error('No package manager lockfile found.');\n }\n\n packageManager = (pnpmDepth ?? -1) > (yarnDepth ?? -1) ? 'pnpm' : 'yarn';\n } catch (err) {\n log.warn(\n `Failed to detect package manager; defaulting to ${log.bold(\n DEFAULT_PACKAGE_MANAGER,\n )}.`,\n );\n log.subtle(\n (() => {\n switch (true) {\n case err instanceof Error:\n return err.message;\n\n default:\n return String(err);\n }\n })(),\n );\n }\n\n return configForPackageManager(packageManager);\n};\n\nconst findDepth = async (filename: string, cwd?: string) => {\n const path = await findUp(filename, { cwd });\n return path ? path.split('/').length : undefined;\n};\n\nexport type PackageManager = z.infer<typeof packageManagerSchema>;\n\nexport const packageManagerSchema = z\n .enum(['pnpm', 'yarn'])\n .default(DEFAULT_PACKAGE_MANAGER);\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAmB;AACnB,mCAAgC;AAChC,iBAAkB;AAElB,qBAAoB;AAGb,MAAM,0BAA0B;AAOvC,MAAM,mBAAmB;AAAA,EACvB,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,SAAS;AAAA,IACT,WAAW;AAAA,IACX,QAAQ,6BAAAA,UAAsB,yBAAyB;AAAA,EACzD;AAAA,EACA,MAAM;AAAA,IACJ,MAAM;AAAA,IACN,SAAS;AAAA,IACT,WAAW;AAAA,IACX,QAAQ,6BAAAA,UAAsB,wBAAwB;AAAA,EACxD;AACF;AAEO,MAAM,0BAA0B,CACrC,oBAC0B;AAAA,EAC1B,GAAG,iBAAiB,cAAc;AAAA,EAClC,SAAS;AACX;AAEO,MAAM,uBAAuB,OAClC,QACkC;AAClC,MAAI,iBAAiC;AAErC,MAAI;AACF,UAAM,CAAC,WAAW,SAAS,IAAI,MAAM,QAAQ,IAAI;AAAA,MAC/C,UAAU,aAAa,GAAG;AAAA,MAC1B,UAAU,kBAAkB,GAAG;AAAA,IACjC,CAAC;AAED,QAAI,cAAc,UAAa,cAAc,QAAW;AACtD,YAAM,IAAI,MAAM,oCAAoC;AAAA,IACtD;AAEA,sBAAkB,aAAa,OAAO,aAAa,MAAM,SAAS;AAAA,EACpE,SAAS,KAAK;AACZ,uBAAI;AAAA,MACF,mDAAmD,mBAAI;AAAA,QACrD;AAAA,MACF,CAAC;AAAA,IACH;AACA,uBAAI;AAAA,OACD,MAAM;AACL,gBAAQ,MAAM;AAAA,UACZ,KAAK,eAAe;AAClB,mBAAO,IAAI;AAAA,UAEb;AACE,mBAAO,OAAO,GAAG;AAAA,QACrB;AAAA,MACF,GAAG;AAAA,IACL;AAAA,EACF;AAEA,SAAO,wBAAwB,cAAc;AAC/C;AAEA,MAAM,YAAY,OAAO,UAAkB,QAAiB;AAC1D,QAAM,OAAO,UAAM,eAAAC,SAAO,UAAU,EAAE,IAAI,CAAC;AAC3C,SAAO,OAAO,KAAK,MAAM,GAAG,EAAE,SAAS;AACzC;AAIO,MAAM,uBAAuB,aACjC,KAAK,CAAC,QAAQ,MAAM,CAAC,EACrB,QAAQ,uBAAuB;",
|
|
6
|
+
"names": ["isInstalledGlobally", "findUp"]
|
|
7
7
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "skuba",
|
|
3
|
-
"version": "8.0
|
|
3
|
+
"version": "8.1.0-configure-stdin-20240509055640",
|
|
4
4
|
"private": false,
|
|
5
5
|
"description": "SEEK development toolkit for backend applications and packages",
|
|
6
6
|
"homepage": "https://github.com/seek-oss/skuba#readme",
|
|
@@ -55,19 +55,19 @@
|
|
|
55
55
|
"@octokit/graphql": "^8.0.0",
|
|
56
56
|
"@octokit/graphql-schema": "^15.3.0",
|
|
57
57
|
"@octokit/rest": "^20.0.0",
|
|
58
|
-
"@octokit/types": "^
|
|
58
|
+
"@octokit/types": "^13.0.0",
|
|
59
59
|
"@types/jest": "^29.0.0",
|
|
60
60
|
"@types/node": ">=18.12",
|
|
61
61
|
"chalk": "^4.1.0",
|
|
62
62
|
"concurrently": "^8.0.0",
|
|
63
|
-
"detect-package-manager": "^3.0.1",
|
|
64
63
|
"dotenv": "^16.0.0",
|
|
65
64
|
"ejs": "^3.1.6",
|
|
66
65
|
"enquirer": "^2.3.6",
|
|
67
66
|
"esbuild": "~0.20.0",
|
|
68
|
-
"eslint": "^8.
|
|
67
|
+
"eslint": "^8.56.0",
|
|
69
68
|
"execa": "^5.0.0",
|
|
70
69
|
"fast-glob": "^3.3.2",
|
|
70
|
+
"find-up": "^5.0.0",
|
|
71
71
|
"fs-extra": "^11.0.0",
|
|
72
72
|
"function-arguments": "^1.0.9",
|
|
73
73
|
"get-port": "^5.1.1",
|
|
@@ -132,14 +132,13 @@
|
|
|
132
132
|
"type-fest": "2.19.0"
|
|
133
133
|
},
|
|
134
134
|
"peerDependencies": {
|
|
135
|
-
"skuba-dive": "2
|
|
135
|
+
"skuba-dive": "1 || 2"
|
|
136
136
|
},
|
|
137
137
|
"peerDependenciesMeta": {
|
|
138
138
|
"skuba-dive": {
|
|
139
139
|
"optional": true
|
|
140
140
|
}
|
|
141
141
|
},
|
|
142
|
-
"packageManager": "pnpm@8.15.5",
|
|
143
142
|
"engines": {
|
|
144
143
|
"node": ">=18.18.0"
|
|
145
144
|
},
|
|
@@ -162,11 +161,11 @@
|
|
|
162
161
|
"lint:md": "remark --frail --quiet .",
|
|
163
162
|
"release": "pnpm --silent build && changeset publish",
|
|
164
163
|
"skuba": "pnpm --silent build && pnpm --silent skuba:exec",
|
|
165
|
-
"skuba:exec": "node --
|
|
164
|
+
"skuba:exec": "node --experimental-vm-modules lib/skuba",
|
|
166
165
|
"stage": "changeset version && node ./.changeset/inject.js && pnpm format",
|
|
167
|
-
"test": "pnpm --silent skuba test --selectProjects unit",
|
|
166
|
+
"test": "pnpm --silent skuba test --selectProjects unit --",
|
|
168
167
|
"test:ci": "pnpm --silent skuba test --runInBand",
|
|
169
|
-
"test:int": "pnpm --silent skuba test --
|
|
168
|
+
"test:int": "pnpm --silent skuba test --selectProjects integration --runInBand",
|
|
170
169
|
"test:template": "scripts/test-template.sh",
|
|
171
170
|
"test:watch": "pnpm --silent skuba test --runInBand --watch"
|
|
172
171
|
}
|
|
@@ -9,9 +9,10 @@ configs:
|
|
|
9
9
|
NPM_READ_TOKEN: arn:aws:secretsmanager:ap-southeast-2:987872074697:secret:npm/npm-read-token
|
|
10
10
|
|
|
11
11
|
- &docker-ecr-cache
|
|
12
|
-
seek-oss/docker-ecr-cache#v2.
|
|
12
|
+
seek-oss/docker-ecr-cache#v2.2.0: &docker-ecr-cache-defaults
|
|
13
13
|
cache-on:
|
|
14
14
|
- .npmrc
|
|
15
|
+
- package.json#.packageManager
|
|
15
16
|
- pnpm-lock.yaml
|
|
16
17
|
dockerfile: Dockerfile.dev-deps
|
|
17
18
|
secrets: id=npm,src=tmp/.npmrc
|
|
@@ -37,7 +38,7 @@ steps:
|
|
|
37
38
|
plugins:
|
|
38
39
|
- *aws-sm
|
|
39
40
|
- *private-npm
|
|
40
|
-
- seek-oss/docker-ecr-cache#v2.
|
|
41
|
+
- seek-oss/docker-ecr-cache#v2.2.0:
|
|
41
42
|
<<: *docker-ecr-cache-defaults
|
|
42
43
|
skip-pull-from-cache: true
|
|
43
44
|
|
|
@@ -2,7 +2,9 @@
|
|
|
2
2
|
|
|
3
3
|
FROM --platform=<%- platformName %> node:20-alpine AS dev-deps
|
|
4
4
|
|
|
5
|
-
RUN
|
|
5
|
+
RUN --mount=type=bind,source=package.json,target=package.json \
|
|
6
|
+
corepack enable pnpm && corepack install
|
|
7
|
+
|
|
6
8
|
RUN pnpm config set store-dir /root/.pnpm-store
|
|
7
9
|
|
|
8
10
|
WORKDIR /workdir
|
|
@@ -20,11 +20,12 @@
|
|
|
20
20
|
"@types/express": "^4.17.13",
|
|
21
21
|
"@types/node": "^20.9.0",
|
|
22
22
|
"@types/supertest": "^6.0.0",
|
|
23
|
-
"
|
|
23
|
+
"mime": "^4.0.1",
|
|
24
|
+
"pino-pretty": "^11.0.0",
|
|
24
25
|
"skuba": "*",
|
|
25
26
|
"supertest": "^6.1.6"
|
|
26
27
|
},
|
|
27
|
-
"packageManager": "pnpm@
|
|
28
|
+
"packageManager": "pnpm@9.0.2",
|
|
28
29
|
"engines": {
|
|
29
30
|
"node": ">=20"
|
|
30
31
|
}
|
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
agents:
|
|
2
2
|
queue: <%- prodBuildkiteQueueName %>
|
|
3
3
|
|
|
4
|
+
# Allow conditional unblock + dodgy CI stuffs
|
|
5
|
+
|
|
4
6
|
configs:
|
|
5
7
|
plugins:
|
|
6
8
|
- &aws-sm
|
|
@@ -9,9 +11,10 @@ configs:
|
|
|
9
11
|
NPM_READ_TOKEN: arn:aws:secretsmanager:ap-southeast-2:987872074697:secret:npm/npm-read-token
|
|
10
12
|
|
|
11
13
|
- &docker-ecr-cache
|
|
12
|
-
seek-oss/docker-ecr-cache#v2.
|
|
14
|
+
seek-oss/docker-ecr-cache#v2.2.0:
|
|
13
15
|
cache-on:
|
|
14
16
|
- .npmrc
|
|
17
|
+
- package.json#.packageManager
|
|
15
18
|
- pnpm-lock.yaml
|
|
16
19
|
secrets: id=npm,src=tmp/.npmrc
|
|
17
20
|
|
|
@@ -2,7 +2,9 @@
|
|
|
2
2
|
|
|
3
3
|
FROM --platform=<%- platformName %> node:20-alpine AS dev-deps
|
|
4
4
|
|
|
5
|
-
RUN
|
|
5
|
+
RUN --mount=type=bind,source=package.json,target=package.json \
|
|
6
|
+
corepack enable pnpm && corepack install
|
|
7
|
+
|
|
6
8
|
RUN pnpm config set store-dir /root/.pnpm-store
|
|
7
9
|
|
|
8
10
|
WORKDIR /workdir
|
|
@@ -9,9 +9,10 @@ configs:
|
|
|
9
9
|
NPM_READ_TOKEN: arn:aws:secretsmanager:ap-southeast-2:987872074697:secret:npm/npm-read-token
|
|
10
10
|
|
|
11
11
|
- &docker-ecr-cache
|
|
12
|
-
seek-oss/docker-ecr-cache#v2.
|
|
12
|
+
seek-oss/docker-ecr-cache#v2.2.0: &docker-ecr-cache-defaults
|
|
13
13
|
cache-on:
|
|
14
14
|
- .npmrc
|
|
15
|
+
- package.json#.packageManager
|
|
15
16
|
- pnpm-lock.yaml
|
|
16
17
|
dockerfile: Dockerfile.dev-deps
|
|
17
18
|
secrets: id=npm,src=tmp/.npmrc
|
|
@@ -37,7 +38,7 @@ steps:
|
|
|
37
38
|
plugins:
|
|
38
39
|
- *aws-sm
|
|
39
40
|
- *private-npm
|
|
40
|
-
- seek-oss/docker-ecr-cache#v2.
|
|
41
|
+
- seek-oss/docker-ecr-cache#v2.2.0:
|
|
41
42
|
<<: *docker-ecr-cache-defaults
|
|
42
43
|
skip-pull-from-cache: true
|
|
43
44
|
|
|
@@ -2,7 +2,9 @@
|
|
|
2
2
|
|
|
3
3
|
FROM --platform=<%- platformName %> node:20-alpine AS dev-deps
|
|
4
4
|
|
|
5
|
-
RUN
|
|
5
|
+
RUN --mount=type=bind,source=package.json,target=package.json \
|
|
6
|
+
corepack enable pnpm && corepack install
|
|
7
|
+
|
|
6
8
|
RUN pnpm config set store-dir /root/.pnpm-store
|
|
7
9
|
|
|
8
10
|
WORKDIR /workdir
|
|
@@ -15,11 +15,11 @@
|
|
|
15
15
|
"@koa/router": "^12.0.0",
|
|
16
16
|
"@opentelemetry/api": "^1.1.0",
|
|
17
17
|
"@opentelemetry/core": "^1.18.1",
|
|
18
|
-
"@opentelemetry/exporter-trace-otlp-grpc": "^0.
|
|
19
|
-
"@opentelemetry/instrumentation-aws-sdk": "^0.
|
|
20
|
-
"@opentelemetry/instrumentation-http": "^0.
|
|
18
|
+
"@opentelemetry/exporter-trace-otlp-grpc": "^0.50.0",
|
|
19
|
+
"@opentelemetry/instrumentation-aws-sdk": "^0.40.0",
|
|
20
|
+
"@opentelemetry/instrumentation-http": "^0.50.0",
|
|
21
21
|
"@opentelemetry/propagator-b3": "^1.18.1",
|
|
22
|
-
"@opentelemetry/sdk-node": "^0.
|
|
22
|
+
"@opentelemetry/sdk-node": "^0.50.0",
|
|
23
23
|
"@seek/logger": "^6.0.0",
|
|
24
24
|
"aws-sdk": "^2.1039.0",
|
|
25
25
|
"hot-shots": "^10.0.0",
|
|
@@ -39,11 +39,12 @@
|
|
|
39
39
|
"@types/node": "^20.9.0",
|
|
40
40
|
"@types/supertest": "^6.0.0",
|
|
41
41
|
"chance": "^1.1.8",
|
|
42
|
-
"
|
|
42
|
+
"mime": "^4.0.1",
|
|
43
|
+
"pino-pretty": "^11.0.0",
|
|
43
44
|
"skuba": "*",
|
|
44
45
|
"supertest": "^6.1.6"
|
|
45
46
|
},
|
|
46
|
-
"packageManager": "pnpm@
|
|
47
|
+
"packageManager": "pnpm@9.0.2",
|
|
47
48
|
"engines": {
|
|
48
49
|
"node": ">=20"
|
|
49
50
|
}
|
|
@@ -9,9 +9,10 @@ configs:
|
|
|
9
9
|
NPM_READ_TOKEN: arn:aws:secretsmanager:ap-southeast-2:987872074697:secret:npm/npm-read-token
|
|
10
10
|
|
|
11
11
|
- &docker-ecr-cache
|
|
12
|
-
seek-oss/docker-ecr-cache#v2.
|
|
12
|
+
seek-oss/docker-ecr-cache#v2.2.0: &docker-ecr-cache-defaults
|
|
13
13
|
cache-on:
|
|
14
14
|
- .npmrc
|
|
15
|
+
- package.json#.packageManager
|
|
15
16
|
- pnpm-lock.yaml
|
|
16
17
|
secrets: id=npm,src=tmp/.npmrc
|
|
17
18
|
|
|
@@ -81,7 +82,7 @@ steps:
|
|
|
81
82
|
plugins:
|
|
82
83
|
- *aws-sm
|
|
83
84
|
- *private-npm
|
|
84
|
-
- seek-oss/docker-ecr-cache#v2.
|
|
85
|
+
- seek-oss/docker-ecr-cache#v2.2.0:
|
|
85
86
|
<<: *docker-ecr-cache-defaults
|
|
86
87
|
skip-pull-from-cache: true
|
|
87
88
|
|
|
@@ -2,7 +2,9 @@
|
|
|
2
2
|
|
|
3
3
|
FROM --platform=<%- platformName %> node:20-alpine AS dev-deps
|
|
4
4
|
|
|
5
|
-
RUN
|
|
5
|
+
RUN --mount=type=bind,source=package.json,target=package.json \
|
|
6
|
+
corepack enable pnpm && corepack install
|
|
7
|
+
|
|
6
8
|
RUN pnpm config set store-dir /root/.pnpm-store
|
|
7
9
|
|
|
8
10
|
WORKDIR /workdir
|
|
@@ -28,16 +28,16 @@
|
|
|
28
28
|
"@types/chance": "^1.1.3",
|
|
29
29
|
"@types/node": "^20.9.0",
|
|
30
30
|
"aws-sdk-client-mock": "^4.0.0",
|
|
31
|
-
"aws-sdk-client-mock-jest": "^
|
|
31
|
+
"aws-sdk-client-mock-jest": "^4.0.0",
|
|
32
32
|
"chance": "^1.1.8",
|
|
33
|
-
"pino-pretty": "^
|
|
33
|
+
"pino-pretty": "^11.0.0",
|
|
34
34
|
"serverless": "^3.37.0",
|
|
35
35
|
"serverless-plugin-canary-deployments": "^0.8.0",
|
|
36
36
|
"serverless-plugin-datadog": "^5.12.0",
|
|
37
37
|
"serverless-prune-plugin": "^2.0.0",
|
|
38
38
|
"skuba": "*"
|
|
39
39
|
},
|
|
40
|
-
"packageManager": "pnpm@
|
|
40
|
+
"packageManager": "pnpm@9.0.2",
|
|
41
41
|
"engines": {
|
|
42
42
|
"node": ">=20"
|
|
43
43
|
}
|
|
@@ -9,9 +9,10 @@ configs:
|
|
|
9
9
|
NPM_READ_TOKEN: arn:aws:secretsmanager:ap-southeast-2:987872074697:secret:npm/npm-read-token
|
|
10
10
|
|
|
11
11
|
- &docker-ecr-cache
|
|
12
|
-
seek-oss/docker-ecr-cache#v2.
|
|
12
|
+
seek-oss/docker-ecr-cache#v2.2.0: &docker-ecr-cache-defaults
|
|
13
13
|
cache-on:
|
|
14
14
|
- .npmrc
|
|
15
|
+
- package.json#.packageManager
|
|
15
16
|
- pnpm-lock.yaml
|
|
16
17
|
secrets: id=npm,src=tmp/.npmrc
|
|
17
18
|
|
|
@@ -77,7 +78,7 @@ steps:
|
|
|
77
78
|
plugins:
|
|
78
79
|
- *aws-sm
|
|
79
80
|
- *private-npm
|
|
80
|
-
- seek-oss/docker-ecr-cache#v2.
|
|
81
|
+
- seek-oss/docker-ecr-cache#v2.2.0:
|
|
81
82
|
<<: *docker-ecr-cache-defaults
|
|
82
83
|
skip-pull-from-cache: true
|
|
83
84
|
|
|
@@ -5,7 +5,9 @@ FROM --platform=<%- platformName %> node:20-alpine AS dev-deps
|
|
|
5
5
|
# Needed for cdk
|
|
6
6
|
RUN apk add --no-cache bash
|
|
7
7
|
|
|
8
|
-
RUN
|
|
8
|
+
RUN --mount=type=bind,source=package.json,target=package.json \
|
|
9
|
+
corepack enable pnpm && corepack install
|
|
10
|
+
|
|
9
11
|
RUN pnpm config set store-dir /root/.pnpm-store
|
|
10
12
|
|
|
11
13
|
WORKDIR /workdir
|
|
@@ -1,28 +1,5 @@
|
|
|
1
1
|
{
|
|
2
2
|
"app": "pnpm exec skuba node infra/index.ts",
|
|
3
|
-
"context": {
|
|
4
|
-
"global": {
|
|
5
|
-
"appName": "<%- serviceName %>"
|
|
6
|
-
},
|
|
7
|
-
"dev": {
|
|
8
|
-
"workerLambda": {
|
|
9
|
-
"reservedConcurrency": 2,
|
|
10
|
-
"environment": {
|
|
11
|
-
"ENVIRONMENT": "dev"
|
|
12
|
-
}
|
|
13
|
-
},
|
|
14
|
-
"sourceSnsTopicArn": "TODO: sourceSnsTopicArn"
|
|
15
|
-
},
|
|
16
|
-
"prod": {
|
|
17
|
-
"workerLambda": {
|
|
18
|
-
"reservedConcurrency": 20,
|
|
19
|
-
"environment": {
|
|
20
|
-
"ENVIRONMENT": "prod"
|
|
21
|
-
}
|
|
22
|
-
},
|
|
23
|
-
"sourceSnsTopicArn": "TODO: sourceSnsTopicArn"
|
|
24
|
-
}
|
|
25
|
-
},
|
|
26
3
|
"progress": "events",
|
|
27
4
|
"watch": {
|
|
28
5
|
"include": "src/**/*.ts",
|
|
@@ -934,7 +934,7 @@ exports[`returns expected CloudFormation stack for dev 1`] = `
|
|
|
934
934
|
"RedrivePolicy": {
|
|
935
935
|
"deadLetterTargetArn": {
|
|
936
936
|
"Fn::GetAtt": [
|
|
937
|
-
"
|
|
937
|
+
"workerqueuedeadletters83F3505C",
|
|
938
938
|
"Arn",
|
|
939
939
|
],
|
|
940
940
|
},
|
|
@@ -997,7 +997,7 @@ exports[`returns expected CloudFormation stack for dev 1`] = `
|
|
|
997
997
|
},
|
|
998
998
|
"Type": "AWS::SNS::Subscription",
|
|
999
999
|
},
|
|
1000
|
-
"
|
|
1000
|
+
"workerqueuedeadletters83F3505C": {
|
|
1001
1001
|
"DeletionPolicy": "Delete",
|
|
1002
1002
|
"Properties": {
|
|
1003
1003
|
"KmsMasterKeyId": {
|
|
@@ -1006,7 +1006,7 @@ exports[`returns expected CloudFormation stack for dev 1`] = `
|
|
|
1006
1006
|
"Arn",
|
|
1007
1007
|
],
|
|
1008
1008
|
},
|
|
1009
|
-
"QueueName": "serviceName-
|
|
1009
|
+
"QueueName": "serviceName-dead-letters",
|
|
1010
1010
|
},
|
|
1011
1011
|
"Type": "AWS::SQS::Queue",
|
|
1012
1012
|
"UpdateReplacePolicy": "Delete",
|
|
@@ -1976,7 +1976,7 @@ exports[`returns expected CloudFormation stack for prod 1`] = `
|
|
|
1976
1976
|
"RedrivePolicy": {
|
|
1977
1977
|
"deadLetterTargetArn": {
|
|
1978
1978
|
"Fn::GetAtt": [
|
|
1979
|
-
"
|
|
1979
|
+
"workerqueuedeadletters83F3505C",
|
|
1980
1980
|
"Arn",
|
|
1981
1981
|
],
|
|
1982
1982
|
},
|
|
@@ -2039,7 +2039,7 @@ exports[`returns expected CloudFormation stack for prod 1`] = `
|
|
|
2039
2039
|
},
|
|
2040
2040
|
"Type": "AWS::SNS::Subscription",
|
|
2041
2041
|
},
|
|
2042
|
-
"
|
|
2042
|
+
"workerqueuedeadletters83F3505C": {
|
|
2043
2043
|
"DeletionPolicy": "Delete",
|
|
2044
2044
|
"Properties": {
|
|
2045
2045
|
"KmsMasterKeyId": {
|
|
@@ -2048,7 +2048,7 @@ exports[`returns expected CloudFormation stack for prod 1`] = `
|
|
|
2048
2048
|
"Arn",
|
|
2049
2049
|
],
|
|
2050
2050
|
},
|
|
2051
|
-
"QueueName": "serviceName-
|
|
2051
|
+
"QueueName": "serviceName-dead-letters",
|
|
2052
2052
|
},
|
|
2053
2053
|
"Type": "AWS::SQS::Queue",
|
|
2054
2054
|
"UpdateReplacePolicy": "Delete",
|
|
@@ -1,22 +1,6 @@
|
|
|
1
1
|
import { App, aws_sns } from 'aws-cdk-lib';
|
|
2
2
|
import { Template } from 'aws-cdk-lib/assertions';
|
|
3
3
|
|
|
4
|
-
import cdkJson from '../cdk.json';
|
|
5
|
-
|
|
6
|
-
import { AppStack } from './appStack';
|
|
7
|
-
|
|
8
|
-
const contexts = [
|
|
9
|
-
{
|
|
10
|
-
stage: 'dev',
|
|
11
|
-
...cdkJson.context,
|
|
12
|
-
},
|
|
13
|
-
|
|
14
|
-
{
|
|
15
|
-
stage: 'prod',
|
|
16
|
-
...cdkJson.context,
|
|
17
|
-
},
|
|
18
|
-
];
|
|
19
|
-
|
|
20
4
|
const currentDate = '1212-12-12T12:12:12.121Z';
|
|
21
5
|
|
|
22
6
|
jest.useFakeTimers({
|
|
@@ -31,14 +15,28 @@ jest.useFakeTimers({
|
|
|
31
15
|
now: new Date(currentDate),
|
|
32
16
|
});
|
|
33
17
|
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
18
|
+
const originalEnv = process.env.ENVIRONMENT;
|
|
19
|
+
|
|
20
|
+
afterAll(() => {
|
|
21
|
+
process.env.ENVIRONMENT = originalEnv;
|
|
22
|
+
});
|
|
23
|
+
|
|
24
|
+
afterEach(() => {
|
|
25
|
+
jest.resetModules();
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
it.each(['dev', 'prod'])(
|
|
29
|
+
'returns expected CloudFormation stack for %s',
|
|
30
|
+
async (env) => {
|
|
31
|
+
process.env.ENVIRONMENT = env;
|
|
32
|
+
|
|
33
|
+
const { AppStack } = await import('./appStack');
|
|
34
|
+
|
|
37
35
|
jest
|
|
38
36
|
.spyOn(aws_sns.Topic, 'fromTopicArn')
|
|
39
37
|
.mockImplementation((scope, id) => new aws_sns.Topic(scope, id));
|
|
40
38
|
|
|
41
|
-
const app = new App(
|
|
39
|
+
const app = new App();
|
|
42
40
|
|
|
43
41
|
const stack = new AppStack(app, 'appStack');
|
|
44
42
|
|
|
@@ -15,15 +15,12 @@ import {
|
|
|
15
15
|
} from 'aws-cdk-lib';
|
|
16
16
|
import type { Construct } from 'constructs';
|
|
17
17
|
|
|
18
|
-
import {
|
|
18
|
+
import { config } from './config';
|
|
19
19
|
|
|
20
20
|
export class AppStack extends Stack {
|
|
21
21
|
constructor(scope: Construct, id: string, props?: StackProps) {
|
|
22
22
|
super(scope, id, props);
|
|
23
23
|
|
|
24
|
-
const stage = StageContextSchema.parse(this.node.tryGetContext('stage'));
|
|
25
|
-
const context = EnvContextSchema.parse(this.node.tryGetContext(stage));
|
|
26
|
-
|
|
27
24
|
const accountPrincipal = new aws_iam.AccountPrincipal(this.account);
|
|
28
25
|
|
|
29
26
|
const kmsKey = new aws_kms.Key(this, 'kms-key', {
|
|
@@ -35,10 +32,14 @@ export class AppStack extends Stack {
|
|
|
35
32
|
|
|
36
33
|
kmsKey.grantEncrypt(accountPrincipal);
|
|
37
34
|
|
|
38
|
-
const deadLetterQueue = new aws_sqs.Queue(
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
35
|
+
const deadLetterQueue = new aws_sqs.Queue(
|
|
36
|
+
this,
|
|
37
|
+
'worker-queue-dead-letters',
|
|
38
|
+
{
|
|
39
|
+
queueName: '<%- serviceName %>-dead-letters',
|
|
40
|
+
encryptionMasterKey: kmsKey,
|
|
41
|
+
},
|
|
42
|
+
);
|
|
42
43
|
|
|
43
44
|
const queue = new aws_sqs.Queue(this, 'worker-queue', {
|
|
44
45
|
queueName: '<%- serviceName %>',
|
|
@@ -52,7 +53,7 @@ export class AppStack extends Stack {
|
|
|
52
53
|
const topic = aws_sns.Topic.fromTopicArn(
|
|
53
54
|
this,
|
|
54
55
|
'source-topic',
|
|
55
|
-
|
|
56
|
+
config.sourceSnsTopicArn,
|
|
56
57
|
);
|
|
57
58
|
|
|
58
59
|
topic.addSubscription(new aws_sns_subscriptions.SqsSubscription(queue));
|
|
@@ -89,13 +90,13 @@ export class AppStack extends Stack {
|
|
|
89
90
|
functionName: '<%- serviceName %>',
|
|
90
91
|
environment: {
|
|
91
92
|
...defaultWorkerEnvironment,
|
|
92
|
-
...
|
|
93
|
+
...config.workerLambda.environment,
|
|
93
94
|
},
|
|
94
95
|
// https://github.com/aws/aws-cdk/issues/28237
|
|
95
96
|
// This forces the lambda to be updated on every deployment
|
|
96
97
|
// If you do not wish to use hotswap, you can remove the new Date().toISOString() from the description
|
|
97
98
|
description: `Updated at ${new Date().toISOString()}`,
|
|
98
|
-
reservedConcurrentExecutions:
|
|
99
|
+
reservedConcurrentExecutions: config.workerLambda.reservedConcurrency,
|
|
99
100
|
});
|
|
100
101
|
|
|
101
102
|
const alias = worker.addAlias('live', {
|
|
@@ -104,7 +105,7 @@ export class AppStack extends Stack {
|
|
|
104
105
|
|
|
105
106
|
alias.addEventSource(
|
|
106
107
|
new aws_lambda_event_sources.SqsEventSource(queue, {
|
|
107
|
-
maxConcurrency:
|
|
108
|
+
maxConcurrency: config.workerLambda.reservedConcurrency,
|
|
108
109
|
}),
|
|
109
110
|
);
|
|
110
111
|
|
|
@@ -119,7 +120,7 @@ export class AppStack extends Stack {
|
|
|
119
120
|
functionName: '<%- serviceName %>-pre-hook',
|
|
120
121
|
environment: {
|
|
121
122
|
...defaultWorkerEnvironment,
|
|
122
|
-
...
|
|
123
|
+
...config.workerLambda.environment,
|
|
123
124
|
FUNCTION_NAME_TO_INVOKE: worker.functionName,
|
|
124
125
|
},
|
|
125
126
|
},
|
|
@@ -138,7 +139,7 @@ export class AppStack extends Stack {
|
|
|
138
139
|
functionName: '<%- serviceName %>-post-hook',
|
|
139
140
|
environment: {
|
|
140
141
|
...defaultWorkerEnvironment,
|
|
141
|
-
...
|
|
142
|
+
...config.workerLambda.environment,
|
|
142
143
|
FUNCTION_NAME_TO_PRUNE: worker.functionName,
|
|
143
144
|
},
|
|
144
145
|
},
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import { z } from 'zod';
|
|
2
|
+
|
|
3
|
+
const environment = z.enum(['dev', 'prod']).parse(process.env.ENVIRONMENT);
|
|
4
|
+
|
|
5
|
+
type Environment = typeof environment;
|
|
6
|
+
|
|
7
|
+
export interface Config {
|
|
8
|
+
appName: string;
|
|
9
|
+
workerLambda: {
|
|
10
|
+
reservedConcurrency: number;
|
|
11
|
+
environment: {
|
|
12
|
+
ENVIRONMENT: Environment;
|
|
13
|
+
};
|
|
14
|
+
};
|
|
15
|
+
sourceSnsTopicArn: string;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
export const configs: Record<Environment, Config> = {
|
|
19
|
+
dev: {
|
|
20
|
+
appName: '<%- serviceName %>',
|
|
21
|
+
workerLambda: {
|
|
22
|
+
reservedConcurrency: 2,
|
|
23
|
+
environment: {
|
|
24
|
+
ENVIRONMENT: 'dev',
|
|
25
|
+
},
|
|
26
|
+
},
|
|
27
|
+
sourceSnsTopicArn: 'TODO: sourceSnsTopicArn',
|
|
28
|
+
},
|
|
29
|
+
prod: {
|
|
30
|
+
appName: '<%- serviceName %>',
|
|
31
|
+
workerLambda: {
|
|
32
|
+
reservedConcurrency: 20,
|
|
33
|
+
environment: {
|
|
34
|
+
ENVIRONMENT: 'prod',
|
|
35
|
+
},
|
|
36
|
+
},
|
|
37
|
+
sourceSnsTopicArn: 'TODO: sourceSnsTopicArn',
|
|
38
|
+
},
|
|
39
|
+
};
|
|
40
|
+
|
|
41
|
+
export const config = configs[environment];
|
|
@@ -1,14 +1,11 @@
|
|
|
1
1
|
import { App } from 'aws-cdk-lib';
|
|
2
2
|
|
|
3
|
-
import { GlobalContextSchema } from '../shared/context-types';
|
|
4
|
-
|
|
5
3
|
import { AppStack } from './appStack';
|
|
4
|
+
import { config } from './config';
|
|
6
5
|
|
|
7
6
|
const app = new App();
|
|
8
7
|
|
|
9
|
-
const context = GlobalContextSchema.parse(app.node.tryGetContext('global'));
|
|
10
|
-
|
|
11
8
|
// eslint-disable-next-line no-new
|
|
12
9
|
new AppStack(app, 'appStack', {
|
|
13
|
-
stackName:
|
|
10
|
+
stackName: config.appName,
|
|
14
11
|
});
|
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
"private": true,
|
|
3
3
|
"license": "UNLICENSED",
|
|
4
4
|
"scripts": {
|
|
5
|
-
"deploy": "cdk deploy appStack --require-approval never
|
|
5
|
+
"deploy": "cdk deploy appStack --require-approval never",
|
|
6
6
|
"deploy:hotswap": "pnpm --silent deploy --hotswap",
|
|
7
7
|
"deploy:watch": "pnpm --silent deploy:hotswap --watch",
|
|
8
8
|
"format": "skuba format",
|
|
@@ -26,7 +26,7 @@
|
|
|
26
26
|
"constructs": "^10.0.17",
|
|
27
27
|
"skuba": "*"
|
|
28
28
|
},
|
|
29
|
-
"packageManager": "pnpm@
|
|
29
|
+
"packageManager": "pnpm@9.0.2",
|
|
30
30
|
"engines": {
|
|
31
31
|
"node": ">=20"
|
|
32
32
|
}
|
|
@@ -1,22 +0,0 @@
|
|
|
1
|
-
import { z } from 'zod';
|
|
2
|
-
|
|
3
|
-
export const StageContextSchema = z.enum(['dev', 'prod']);
|
|
4
|
-
export type StageContext = z.infer<typeof StageContextSchema>;
|
|
5
|
-
|
|
6
|
-
export const EnvContextSchema = z.object({
|
|
7
|
-
workerLambda: z.object({
|
|
8
|
-
reservedConcurrency: z.number(),
|
|
9
|
-
environment: z.object({
|
|
10
|
-
ENVIRONMENT: z.string(),
|
|
11
|
-
}),
|
|
12
|
-
}),
|
|
13
|
-
sourceSnsTopicArn: z.string(),
|
|
14
|
-
});
|
|
15
|
-
|
|
16
|
-
export type EnvContext = z.infer<typeof EnvContextSchema>;
|
|
17
|
-
|
|
18
|
-
export const GlobalContextSchema = z.object({
|
|
19
|
-
appName: z.string(),
|
|
20
|
-
});
|
|
21
|
-
|
|
22
|
-
export type GlobalContext = z.infer<typeof GlobalContextSchema>;
|