skuba 12.1.0-hoist-less-20250722131939 → 12.1.0-no-sync-in-promise-iterable-20250801105434
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/cli/lint/internalLints/upgrade/patches/12.0.2/index.d.ts +2 -0
- package/lib/cli/lint/internalLints/upgrade/patches/12.0.2/index.js +35 -0
- package/lib/cli/lint/internalLints/upgrade/patches/12.0.2/index.js.map +7 -0
- package/lib/cli/lint/internalLints/upgrade/patches/12.0.2/unhandledRejections.d.ts +4 -0
- package/lib/cli/lint/internalLints/upgrade/patches/12.0.2/unhandledRejections.js +162 -0
- package/lib/cli/lint/internalLints/upgrade/patches/12.0.2/unhandledRejections.js.map +7 -0
- package/lib/cli/node/index.js +2 -2
- package/lib/cli/node/index.js.map +2 -2
- package/lib/cli/start/index.js +2 -2
- package/lib/cli/start/index.js.map +1 -1
- package/package.json +6 -7
- package/template/express-rest-api/package.json +1 -1
- package/template/express-rest-api/src/listen.ts +6 -0
- package/template/greeter/package.json +2 -2
- package/template/koa-rest-api/package.json +5 -5
- package/template/koa-rest-api/src/listen.ts +6 -0
- package/template/lambda-sqs-worker-cdk/infra/__snapshots__/appStack.test.ts.snap +15 -1
- package/template/lambda-sqs-worker-cdk/infra/appStack.ts +5 -1
- package/template/lambda-sqs-worker-cdk/infra/config.ts +3 -0
- package/template/lambda-sqs-worker-cdk/package.json +3 -3
- package/template/lambda-sqs-worker-cdk/src/app.test.ts +88 -48
- package/template/lambda-sqs-worker-cdk/src/app.ts +7 -9
- package/template/lambda-sqs-worker-cdk/src/framework/handler.test.ts +8 -3
- package/template/lambda-sqs-worker-cdk/src/framework/handler.ts +38 -5
- package/template/lambda-sqs-worker-cdk/src/framework/logging.ts +11 -3
- package/template/lambda-sqs-worker-cdk/src/testing/handler.ts +4 -1
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
var __exports = {};
|
|
20
|
+
__export(__exports, {
|
|
21
|
+
patches: () => patches
|
|
22
|
+
});
|
|
23
|
+
module.exports = __toCommonJS(__exports);
|
|
24
|
+
var import_unhandledRejections = require("./unhandledRejections.js");
|
|
25
|
+
const patches = [
|
|
26
|
+
{
|
|
27
|
+
apply: import_unhandledRejections.tryPatchUnhandledRejections,
|
|
28
|
+
description: "Add event handler to log unhandled promise rejections instead of crashing the process"
|
|
29
|
+
}
|
|
30
|
+
];
|
|
31
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
32
|
+
0 && (module.exports = {
|
|
33
|
+
patches
|
|
34
|
+
});
|
|
35
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../../../../../../src/cli/lint/internalLints/upgrade/patches/12.0.2/index.ts"],
|
|
4
|
+
"sourcesContent": ["import type { Patches } from '../../index.js';\n\nimport { tryPatchUnhandledRejections } from './unhandledRejections.js';\n\nexport const patches: Patches = [\n {\n apply: tryPatchUnhandledRejections,\n description:\n 'Add event handler to log unhandled promise rejections instead of crashing the process',\n },\n];\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,iCAA4C;AAErC,MAAM,UAAmB;AAAA,EAC9B;AAAA,IACE,OAAO;AAAA,IACP,aACE;AAAA,EACJ;AACF;",
|
|
6
|
+
"names": []
|
|
7
|
+
}
|
|
@@ -0,0 +1,162 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
var unhandledRejections_exports = {};
|
|
30
|
+
__export(unhandledRejections_exports, {
|
|
31
|
+
IMPORT_REGEX: () => IMPORT_REGEX,
|
|
32
|
+
NAMED_EXPORT_REGEX: () => NAMED_EXPORT_REGEX,
|
|
33
|
+
tryPatchUnhandledRejections: () => tryPatchUnhandledRejections
|
|
34
|
+
});
|
|
35
|
+
module.exports = __toCommonJS(unhandledRejections_exports);
|
|
36
|
+
var import_path = __toESM(require("path"));
|
|
37
|
+
var import_util = require("util");
|
|
38
|
+
var import_fast_glob = require("fast-glob");
|
|
39
|
+
var import_fs_extra = __toESM(require("fs-extra"));
|
|
40
|
+
var import_error = require("../../../../../../utils/error.js");
|
|
41
|
+
var import_logging = require("../../../../../../utils/logging.js");
|
|
42
|
+
var import_prettier = require("../../../../../configure/processing/prettier.js");
|
|
43
|
+
const addListener = (identifier) => `
|
|
44
|
+
// Report unhandled rejections instead of crashing the process
|
|
45
|
+
// Make sure to monitor these reports and alert as appropriate
|
|
46
|
+
process.on('unhandledRejection', (err) =>
|
|
47
|
+
${identifier}.error(err, 'Unhandled promise rejection'),
|
|
48
|
+
);
|
|
49
|
+
`.trim();
|
|
50
|
+
const tryReadFilesSequentially = async (filepaths) => {
|
|
51
|
+
for (const filepath of filepaths) {
|
|
52
|
+
try {
|
|
53
|
+
const contents = await import_fs_extra.default.promises.readFile(filepath, "utf8");
|
|
54
|
+
return { contents, filepath };
|
|
55
|
+
} catch (err) {
|
|
56
|
+
if ((0, import_error.isErrorWithCode)(err, "ENOENT")) {
|
|
57
|
+
continue;
|
|
58
|
+
}
|
|
59
|
+
throw err;
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
return;
|
|
63
|
+
};
|
|
64
|
+
const IMPORT_REGEX = /import\s+(?:\{\s*(\w*[Ll]ogger)(?:\s+as\s+(\w*[Ll]ogger))?\s*\}|(\w*[Ll]ogger))\s+from\s+['"][^'"]+\/(?:logger|logging)(?:\.js)?['"]/u;
|
|
65
|
+
const NAMED_EXPORT_REGEX = /export\s+(?:const\s+|\{[^{}]*)\b(\w*[Ll]ogger)\b/u;
|
|
66
|
+
const findLogger = async ({
|
|
67
|
+
contents,
|
|
68
|
+
root
|
|
69
|
+
}) => {
|
|
70
|
+
const importResult = IMPORT_REGEX.exec(contents);
|
|
71
|
+
{
|
|
72
|
+
const identifier = importResult?.[3] ?? importResult?.[2] ?? importResult?.[1];
|
|
73
|
+
if (identifier) {
|
|
74
|
+
return { identifier };
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
const loggerPaths = await (0, import_fast_glob.glob)("**/{logger,logging}.ts", {
|
|
78
|
+
cwd: root,
|
|
79
|
+
ignore: ["**/.git", "**/node_modules"]
|
|
80
|
+
});
|
|
81
|
+
const loggingModule = await tryReadFilesSequentially(loggerPaths);
|
|
82
|
+
if (!loggingModule) {
|
|
83
|
+
return { identifier: "console" };
|
|
84
|
+
}
|
|
85
|
+
const parsedPath = import_path.default.parse(import_path.default.relative(root, loggingModule.filepath));
|
|
86
|
+
const importPath = import_path.default.join(parsedPath.dir, parsedPath.name);
|
|
87
|
+
const namedExportResult = NAMED_EXPORT_REGEX.exec(loggingModule.contents);
|
|
88
|
+
if (namedExportResult?.[1]) {
|
|
89
|
+
const identifier = namedExportResult[1];
|
|
90
|
+
return {
|
|
91
|
+
identifier: namedExportResult[1],
|
|
92
|
+
import: `import { ${identifier} } from '${importPath}';`
|
|
93
|
+
};
|
|
94
|
+
}
|
|
95
|
+
if (loggingModule.contents.includes("export default")) {
|
|
96
|
+
return {
|
|
97
|
+
identifier: "logger",
|
|
98
|
+
import: `import logger from '${importPath}';`
|
|
99
|
+
};
|
|
100
|
+
}
|
|
101
|
+
return { identifier: "console" };
|
|
102
|
+
};
|
|
103
|
+
const patchUnhandledRejections = async (mode) => {
|
|
104
|
+
const filepaths = await (0, import_fast_glob.glob)("**/src/listen.ts", {
|
|
105
|
+
ignore: ["**/.git", "**/node_modules"]
|
|
106
|
+
});
|
|
107
|
+
let hasPatched = false;
|
|
108
|
+
for (const filepath of filepaths) {
|
|
109
|
+
const contents = await import_fs_extra.default.promises.readFile(filepath, "utf8");
|
|
110
|
+
if (contents.includes("unhandledRejection")) {
|
|
111
|
+
import_logging.log.subtle(
|
|
112
|
+
"Skipping entry point that appears to have an unhandled rejection listener:",
|
|
113
|
+
filepath
|
|
114
|
+
);
|
|
115
|
+
continue;
|
|
116
|
+
}
|
|
117
|
+
const root = import_path.default.dirname(import_path.default.dirname(filepath));
|
|
118
|
+
const logger = await findLogger({ contents, root });
|
|
119
|
+
import_logging.log.subtle(
|
|
120
|
+
"Logging unhandled rejections to",
|
|
121
|
+
logger.identifier,
|
|
122
|
+
"in file:",
|
|
123
|
+
filepath
|
|
124
|
+
);
|
|
125
|
+
const patched = [
|
|
126
|
+
contents,
|
|
127
|
+
...[logger.import ? [logger.import] : []],
|
|
128
|
+
addListener(logger.identifier)
|
|
129
|
+
].join("\n\n");
|
|
130
|
+
const newContents = await (0, import_prettier.formatPrettier)(patched, { parser: "typescript" });
|
|
131
|
+
if (mode === "lint") {
|
|
132
|
+
return { result: "apply" };
|
|
133
|
+
}
|
|
134
|
+
await import_fs_extra.default.promises.writeFile(filepath, newContents);
|
|
135
|
+
hasPatched = true;
|
|
136
|
+
}
|
|
137
|
+
if (hasPatched) {
|
|
138
|
+
return { result: "apply" };
|
|
139
|
+
}
|
|
140
|
+
return {
|
|
141
|
+
result: "skip",
|
|
142
|
+
reason: "no applicable src/listen.ts entry points found"
|
|
143
|
+
};
|
|
144
|
+
};
|
|
145
|
+
const tryPatchUnhandledRejections = async ({
|
|
146
|
+
mode
|
|
147
|
+
}) => {
|
|
148
|
+
try {
|
|
149
|
+
return await patchUnhandledRejections(mode);
|
|
150
|
+
} catch (err) {
|
|
151
|
+
import_logging.log.warn("Failed to patch listeners for unhandled promise rejections");
|
|
152
|
+
import_logging.log.subtle((0, import_util.inspect)(err));
|
|
153
|
+
return { result: "skip", reason: "due to an error" };
|
|
154
|
+
}
|
|
155
|
+
};
|
|
156
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
157
|
+
0 && (module.exports = {
|
|
158
|
+
IMPORT_REGEX,
|
|
159
|
+
NAMED_EXPORT_REGEX,
|
|
160
|
+
tryPatchUnhandledRejections
|
|
161
|
+
});
|
|
162
|
+
//# sourceMappingURL=unhandledRejections.js.map
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 3,
|
|
3
|
+
"sources": ["../../../../../../../src/cli/lint/internalLints/upgrade/patches/12.0.2/unhandledRejections.ts"],
|
|
4
|
+
"sourcesContent": ["import path from 'path';\nimport { inspect } from 'util';\n\nimport { glob } from 'fast-glob';\nimport fs from 'fs-extra';\n\nimport { isErrorWithCode } from '../../../../../../utils/error.js';\nimport { log } from '../../../../../../utils/logging.js';\nimport { formatPrettier } from '../../../../../configure/processing/prettier.js';\nimport type { PatchFunction, PatchReturnType } from '../../index.js';\n\nconst addListener = (identifier: string) =>\n `\n// Report unhandled rejections instead of crashing the process\n// Make sure to monitor these reports and alert as appropriate\nprocess.on('unhandledRejection', (err) =>\n ${identifier}.error(err, 'Unhandled promise rejection'),\n);\n`.trim();\n\nconst tryReadFilesSequentially = async (\n filepaths: string[],\n): Promise<{ contents: string; filepath: string } | undefined> => {\n for (const filepath of filepaths) {\n try {\n const contents = await fs.promises.readFile(filepath, 'utf8');\n\n return { contents, filepath };\n } catch (err) {\n if (isErrorWithCode(err, 'ENOENT')) {\n continue;\n }\n\n throw err;\n }\n }\n\n return;\n};\n\nexport const IMPORT_REGEX =\n /import\\s+(?:\\{\\s*(\\w*[Ll]ogger)(?:\\s+as\\s+(\\w*[Ll]ogger))?\\s*\\}|(\\w*[Ll]ogger))\\s+from\\s+['\"][^'\"]+\\/(?:logger|logging)(?:\\.js)?['\"]/u;\n\nexport const NAMED_EXPORT_REGEX =\n /export\\s+(?:const\\s+|\\{[^{}]*)\\b(\\w*[Ll]ogger)\\b/u;\n\nconst findLogger = async ({\n contents,\n root,\n}: {\n contents: string;\n root: string;\n}): Promise<{ identifier: string; import?: string }> => {\n const importResult = IMPORT_REGEX.exec(contents);\n\n {\n const identifier =\n importResult?.[3] ?? importResult?.[2] ?? importResult?.[1];\n\n if (identifier) {\n return { identifier };\n }\n }\n\n const loggerPaths = await glob('**/{logger,logging}.ts', {\n cwd: root,\n ignore: ['**/.git', '**/node_modules'],\n });\n\n const loggingModule = await tryReadFilesSequentially(loggerPaths);\n\n if (!loggingModule) {\n return { identifier: 'console' };\n }\n\n const parsedPath = path.parse(path.relative(root, loggingModule.filepath));\n\n const importPath = path.join(parsedPath.dir, parsedPath.name);\n\n const namedExportResult = NAMED_EXPORT_REGEX.exec(loggingModule.contents);\n\n if (namedExportResult?.[1]) {\n const identifier = namedExportResult[1];\n\n return {\n identifier: namedExportResult[1],\n import: `import { ${identifier} } from '${importPath}';`,\n };\n }\n\n if (loggingModule.contents.includes('export default')) {\n return {\n identifier: 'logger',\n import: `import logger from '${importPath}';`,\n };\n }\n\n return { identifier: 'console' };\n};\n\nconst patchUnhandledRejections = async (\n mode: 'format' | 'lint',\n): Promise<PatchReturnType> => {\n const filepaths = await glob('**/src/listen.ts', {\n ignore: ['**/.git', '**/node_modules'],\n });\n\n let hasPatched = false;\n\n for (const filepath of filepaths) {\n const contents = await fs.promises.readFile(filepath, 'utf8');\n\n if (contents.includes('unhandledRejection')) {\n log.subtle(\n 'Skipping entry point that appears to have an unhandled rejection listener:',\n filepath,\n );\n continue;\n }\n\n const root = path.dirname(path.dirname(filepath));\n\n const logger = await findLogger({ contents, root });\n\n log.subtle(\n 'Logging unhandled rejections to',\n logger.identifier,\n 'in file:',\n filepath,\n );\n\n const patched = [\n contents,\n\n ...[logger.import ? [logger.import] : []],\n\n addListener(logger.identifier),\n ].join('\\n\\n');\n\n const newContents = await formatPrettier(patched, { parser: 'typescript' });\n\n if (mode === 'lint') {\n return { result: 'apply' };\n }\n\n await fs.promises.writeFile(filepath, newContents);\n\n hasPatched = true;\n }\n\n if (hasPatched) {\n return { result: 'apply' };\n }\n\n return {\n result: 'skip',\n reason: 'no applicable src/listen.ts entry points found',\n };\n};\n\nexport const tryPatchUnhandledRejections: PatchFunction = async ({\n mode,\n}): Promise<PatchReturnType> => {\n try {\n return await patchUnhandledRejections(mode);\n } catch (err) {\n log.warn('Failed to patch listeners for unhandled promise rejections');\n log.subtle(inspect(err));\n return { result: 'skip', reason: 'due to an error' };\n }\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AACjB,kBAAwB;AAExB,uBAAqB;AACrB,sBAAe;AAEf,mBAAgC;AAChC,qBAAoB;AACpB,sBAA+B;AAG/B,MAAM,cAAc,CAAC,eACnB;AAAA;AAAA;AAAA;AAAA,IAIE,UAAU;AAAA;AAAA,EAEZ,KAAK;AAEP,MAAM,2BAA2B,OAC/B,cACgE;AAChE,aAAW,YAAY,WAAW;AAChC,QAAI;AACF,YAAM,WAAW,MAAM,gBAAAA,QAAG,SAAS,SAAS,UAAU,MAAM;AAE5D,aAAO,EAAE,UAAU,SAAS;AAAA,IAC9B,SAAS,KAAK;AACZ,cAAI,8BAAgB,KAAK,QAAQ,GAAG;AAClC;AAAA,MACF;AAEA,YAAM;AAAA,IACR;AAAA,EACF;AAEA;AACF;AAEO,MAAM,eACX;AAEK,MAAM,qBACX;AAEF,MAAM,aAAa,OAAO;AAAA,EACxB;AAAA,EACA;AACF,MAGwD;AACtD,QAAM,eAAe,aAAa,KAAK,QAAQ;AAE/C;AACE,UAAM,aACJ,eAAe,CAAC,KAAK,eAAe,CAAC,KAAK,eAAe,CAAC;AAE5D,QAAI,YAAY;AACd,aAAO,EAAE,WAAW;AAAA,IACtB;AAAA,EACF;AAEA,QAAM,cAAc,UAAM,uBAAK,0BAA0B;AAAA,IACvD,KAAK;AAAA,IACL,QAAQ,CAAC,WAAW,iBAAiB;AAAA,EACvC,CAAC;AAED,QAAM,gBAAgB,MAAM,yBAAyB,WAAW;AAEhE,MAAI,CAAC,eAAe;AAClB,WAAO,EAAE,YAAY,UAAU;AAAA,EACjC;AAEA,QAAM,aAAa,YAAAC,QAAK,MAAM,YAAAA,QAAK,SAAS,MAAM,cAAc,QAAQ,CAAC;AAEzE,QAAM,aAAa,YAAAA,QAAK,KAAK,WAAW,KAAK,WAAW,IAAI;AAE5D,QAAM,oBAAoB,mBAAmB,KAAK,cAAc,QAAQ;AAExE,MAAI,oBAAoB,CAAC,GAAG;AAC1B,UAAM,aAAa,kBAAkB,CAAC;AAEtC,WAAO;AAAA,MACL,YAAY,kBAAkB,CAAC;AAAA,MAC/B,QAAQ,YAAY,UAAU,YAAY,UAAU;AAAA,IACtD;AAAA,EACF;AAEA,MAAI,cAAc,SAAS,SAAS,gBAAgB,GAAG;AACrD,WAAO;AAAA,MACL,YAAY;AAAA,MACZ,QAAQ,uBAAuB,UAAU;AAAA,IAC3C;AAAA,EACF;AAEA,SAAO,EAAE,YAAY,UAAU;AACjC;AAEA,MAAM,2BAA2B,OAC/B,SAC6B;AAC7B,QAAM,YAAY,UAAM,uBAAK,oBAAoB;AAAA,IAC/C,QAAQ,CAAC,WAAW,iBAAiB;AAAA,EACvC,CAAC;AAED,MAAI,aAAa;AAEjB,aAAW,YAAY,WAAW;AAChC,UAAM,WAAW,MAAM,gBAAAD,QAAG,SAAS,SAAS,UAAU,MAAM;AAE5D,QAAI,SAAS,SAAS,oBAAoB,GAAG;AAC3C,yBAAI;AAAA,QACF;AAAA,QACA;AAAA,MACF;AACA;AAAA,IACF;AAEA,UAAM,OAAO,YAAAC,QAAK,QAAQ,YAAAA,QAAK,QAAQ,QAAQ,CAAC;AAEhD,UAAM,SAAS,MAAM,WAAW,EAAE,UAAU,KAAK,CAAC;AAElD,uBAAI;AAAA,MACF;AAAA,MACA,OAAO;AAAA,MACP;AAAA,MACA;AAAA,IACF;AAEA,UAAM,UAAU;AAAA,MACd;AAAA,MAEA,GAAG,CAAC,OAAO,SAAS,CAAC,OAAO,MAAM,IAAI,CAAC,CAAC;AAAA,MAExC,YAAY,OAAO,UAAU;AAAA,IAC/B,EAAE,KAAK,MAAM;AAEb,UAAM,cAAc,UAAM,gCAAe,SAAS,EAAE,QAAQ,aAAa,CAAC;AAE1E,QAAI,SAAS,QAAQ;AACnB,aAAO,EAAE,QAAQ,QAAQ;AAAA,IAC3B;AAEA,UAAM,gBAAAD,QAAG,SAAS,UAAU,UAAU,WAAW;AAEjD,iBAAa;AAAA,EACf;AAEA,MAAI,YAAY;AACd,WAAO,EAAE,QAAQ,QAAQ;AAAA,EAC3B;AAEA,SAAO;AAAA,IACL,QAAQ;AAAA,IACR,QAAQ;AAAA,EACV;AACF;AAEO,MAAM,8BAA6C,OAAO;AAAA,EAC/D;AACF,MAAgC;AAC9B,MAAI;AACF,WAAO,MAAM,yBAAyB,IAAI;AAAA,EAC5C,SAAS,KAAK;AACZ,uBAAI,KAAK,4DAA4D;AACrE,uBAAI,WAAO,qBAAQ,GAAG,CAAC;AACvB,WAAO,EAAE,QAAQ,QAAQ,QAAQ,kBAAkB;AAAA,EACrD;AACF;",
|
|
6
|
+
"names": ["fs", "path"]
|
|
7
|
+
}
|
package/lib/cli/node/index.js
CHANGED
|
@@ -44,8 +44,8 @@ const node = async () => {
|
|
|
44
44
|
const availablePort = await (0, import_get_port.default)();
|
|
45
45
|
const commonArgs = [
|
|
46
46
|
...args.node,
|
|
47
|
-
"--
|
|
48
|
-
|
|
47
|
+
"--env-file-if-exists",
|
|
48
|
+
".env",
|
|
49
49
|
"--require",
|
|
50
50
|
require.resolve("tsconfig-paths/register")
|
|
51
51
|
];
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/node/index.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport execa from 'execa';\nimport getPort from 'get-port';\n\nimport { parseRunArgs } from '../../utils/args.js';\nimport { createExec } from '../../utils/exec.js';\nimport { isIpPort } from '../../utils/validation.js';\n\nexport const longRunning = true;\n\nexport const node = async () => {\n const args = parseRunArgs(process.argv.slice(2));\n\n const availablePort = await getPort();\n\n const commonArgs = [\n ...args.node,\n '--
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,sBAAoB;AAEpB,kBAA6B;AAC7B,kBAA2B;AAC3B,wBAAyB;AAElB,MAAM,cAAc;AAEpB,MAAM,OAAO,YAAY;AAC9B,QAAM,WAAO,0BAAa,QAAQ,KAAK,MAAM,CAAC,CAAC;AAE/C,QAAM,gBAAgB,UAAM,gBAAAA,SAAQ;AAEpC,QAAM,aAAa;AAAA,IACjB,GAAG,KAAK;AAAA,IACR;AAAA,IACA
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport execa from 'execa';\nimport getPort from 'get-port';\n\nimport { parseRunArgs } from '../../utils/args.js';\nimport { createExec } from '../../utils/exec.js';\nimport { isIpPort } from '../../utils/validation.js';\n\nexport const longRunning = true;\n\nexport const node = async () => {\n const args = parseRunArgs(process.argv.slice(2));\n\n const availablePort = await getPort();\n\n const commonArgs = [\n ...args.node,\n '--env-file-if-exists',\n '.env',\n '--require',\n require.resolve('tsconfig-paths/register'),\n ];\n\n if (args.entryPoint) {\n const exec = createExec({\n env: {\n __SKUBA_ENTRY_POINT: args.entryPoint,\n __SKUBA_PORT: String(isIpPort(args.port) ? args.port : availablePort),\n },\n });\n\n return exec(\n 'tsx',\n ...commonArgs,\n path.join(__dirname, '..', '..', 'wrapper', 'index.js'),\n ...args.script,\n );\n }\n\n return execa(\n require.resolve('tsx/cli'),\n [\n ...commonArgs,\n '--require',\n // Unsure if bug or feature that this is needed, but tsx appears to not do anything typescript in the REPL without this!\n // Doesn't occur when just running the tsx binary directly \uD83E\uDDD0\n require.resolve('tsx/patch-repl'),\n ],\n {\n stdio: 'inherit',\n },\n );\n};\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,mBAAkB;AAClB,sBAAoB;AAEpB,kBAA6B;AAC7B,kBAA2B;AAC3B,wBAAyB;AAElB,MAAM,cAAc;AAEpB,MAAM,OAAO,YAAY;AAC9B,QAAM,WAAO,0BAAa,QAAQ,KAAK,MAAM,CAAC,CAAC;AAE/C,QAAM,gBAAgB,UAAM,gBAAAA,SAAQ;AAEpC,QAAM,aAAa;AAAA,IACjB,GAAG,KAAK;AAAA,IACR;AAAA,IACA;AAAA,IACA;AAAA,IACA,gBAAgB,yBAAyB;AAAA,EAC3C;AAEA,MAAI,KAAK,YAAY;AACnB,UAAM,WAAO,wBAAW;AAAA,MACtB,KAAK;AAAA,QACH,qBAAqB,KAAK;AAAA,QAC1B,cAAc,WAAO,4BAAS,KAAK,IAAI,IAAI,KAAK,OAAO,aAAa;AAAA,MACtE;AAAA,IACF,CAAC;AAED,WAAO;AAAA,MACL;AAAA,MACA,GAAG;AAAA,MACH,YAAAC,QAAK,KAAK,WAAW,MAAM,MAAM,WAAW,UAAU;AAAA,MACtD,GAAG,KAAK;AAAA,IACV;AAAA,EACF;AAEA,aAAO,aAAAC;AAAA,IACL,gBAAgB,SAAS;AAAA,IACzB;AAAA,MACE,GAAG;AAAA,MACH;AAAA;AAAA;AAAA,MAGA,gBAAgB,gBAAgB;AAAA,IAClC;AAAA,IACA;AAAA,MACE,OAAO;AAAA,IACT;AAAA,EACF;AACF;",
|
|
6
6
|
"names": ["getPort", "path", "execa"]
|
|
7
7
|
}
|
package/lib/cli/start/index.js
CHANGED
|
@@ -54,8 +54,8 @@ const start = async () => {
|
|
|
54
54
|
"watch",
|
|
55
55
|
"--clear-screen=false",
|
|
56
56
|
...args.node,
|
|
57
|
-
"--
|
|
58
|
-
"
|
|
57
|
+
"--env-file-if-exists",
|
|
58
|
+
".env",
|
|
59
59
|
"--require",
|
|
60
60
|
"tsconfig-paths/register",
|
|
61
61
|
import_path.default.join(__dirname, "..", "..", "wrapper", "index.js"),
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../../src/cli/start/index.ts"],
|
|
4
|
-
"sourcesContent": ["import path from 'path';\n\nimport getPort from 'get-port';\n\nimport { parseRunArgs } from '../../utils/args.js';\nimport { createExec } from '../../utils/exec.js';\nimport { getEntryPointFromManifest } from '../../utils/manifest.js';\nimport { isIpPort } from '../../utils/validation.js';\n\nexport const start = async () => {\n const [args, availablePort] = await Promise.all([\n parseRunArgs(process.argv.slice(2)),\n getPort(),\n ]);\n\n args.entryPoint ??= await getEntryPointFromManifest();\n\n const execProcess = createExec({\n env: {\n __SKUBA_ENTRY_POINT: args.entryPoint,\n __SKUBA_PORT: String(isIpPort(args.port) ? args.port : availablePort),\n },\n });\n\n return execProcess(\n 'tsx',\n 'watch',\n '--clear-screen=false',\n ...args.node,\n '--
|
|
4
|
+
"sourcesContent": ["import path from 'path';\n\nimport getPort from 'get-port';\n\nimport { parseRunArgs } from '../../utils/args.js';\nimport { createExec } from '../../utils/exec.js';\nimport { getEntryPointFromManifest } from '../../utils/manifest.js';\nimport { isIpPort } from '../../utils/validation.js';\n\nexport const start = async () => {\n const [args, availablePort] = await Promise.all([\n parseRunArgs(process.argv.slice(2)),\n getPort(),\n ]);\n\n args.entryPoint ??= await getEntryPointFromManifest();\n\n const execProcess = createExec({\n env: {\n __SKUBA_ENTRY_POINT: args.entryPoint,\n __SKUBA_PORT: String(isIpPort(args.port) ? args.port : availablePort),\n },\n });\n\n return execProcess(\n 'tsx',\n 'watch',\n '--clear-screen=false',\n ...args.node,\n '--env-file-if-exists',\n '.env',\n '--require',\n 'tsconfig-paths/register',\n path.join(__dirname, '..', '..', 'wrapper', 'index.js'),\n ...args.script,\n );\n};\n"],
|
|
5
5
|
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAAiB;AAEjB,sBAAoB;AAEpB,kBAA6B;AAC7B,kBAA2B;AAC3B,sBAA0C;AAC1C,wBAAyB;AAElB,MAAM,QAAQ,YAAY;AAC/B,QAAM,CAAC,MAAM,aAAa,IAAI,MAAM,QAAQ,IAAI;AAAA,QAC9C,0BAAa,QAAQ,KAAK,MAAM,CAAC,CAAC;AAAA,QAClC,gBAAAA,SAAQ;AAAA,EACV,CAAC;AAED,OAAK,eAAe,UAAM,2CAA0B;AAEpD,QAAM,kBAAc,wBAAW;AAAA,IAC7B,KAAK;AAAA,MACH,qBAAqB,KAAK;AAAA,MAC1B,cAAc,WAAO,4BAAS,KAAK,IAAI,IAAI,KAAK,OAAO,aAAa;AAAA,IACtE;AAAA,EACF,CAAC;AAED,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA,GAAG,KAAK;AAAA,IACR;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA,YAAAC,QAAK,KAAK,WAAW,MAAM,MAAM,WAAW,UAAU;AAAA,IACtD,GAAG,KAAK;AAAA,EACV;AACF;",
|
|
6
6
|
"names": ["getPort", "path"]
|
|
7
7
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "skuba",
|
|
3
|
-
"version": "12.1.0-
|
|
3
|
+
"version": "12.1.0-no-sync-in-promise-iterable-20250801105434",
|
|
4
4
|
"private": false,
|
|
5
5
|
"description": "SEEK development toolkit for backend applications and packages",
|
|
6
6
|
"homepage": "https://github.com/seek-oss/skuba#readme",
|
|
@@ -61,7 +61,6 @@
|
|
|
61
61
|
"@types/node": "^22.0.0",
|
|
62
62
|
"chalk": "^4.1.0",
|
|
63
63
|
"concurrently": "^9.0.0",
|
|
64
|
-
"dotenv": "^16.0.0",
|
|
65
64
|
"ejs": "^3.1.6",
|
|
66
65
|
"enquirer": "^2.3.6",
|
|
67
66
|
"esbuild": "~0.25.0",
|
|
@@ -97,8 +96,8 @@
|
|
|
97
96
|
"tsconfig-seek": "2.0.0",
|
|
98
97
|
"tsx": "^4.16.2",
|
|
99
98
|
"typescript": "~5.8.0",
|
|
100
|
-
"zod": "^
|
|
101
|
-
"eslint-config-skuba": "7.0
|
|
99
|
+
"zod": "^4.0.0",
|
|
100
|
+
"eslint-config-skuba": "7.1.0-no-sync-in-promise-iterable-20250801105434"
|
|
102
101
|
},
|
|
103
102
|
"devDependencies": {
|
|
104
103
|
"@changesets/cli": "2.29.5",
|
|
@@ -108,7 +107,7 @@
|
|
|
108
107
|
"@types/ejs": "3.1.5",
|
|
109
108
|
"@types/express": "5.0.3",
|
|
110
109
|
"@types/fs-extra": "11.0.4",
|
|
111
|
-
"@types/koa": "
|
|
110
|
+
"@types/koa": "3.0.0",
|
|
112
111
|
"@types/lodash.mergewith": "4.6.9",
|
|
113
112
|
"@types/minimist": "1.2.5",
|
|
114
113
|
"@types/module-alias": "2.0.4",
|
|
@@ -122,7 +121,7 @@
|
|
|
122
121
|
"fastify": "5.4.0",
|
|
123
122
|
"jest-diff": "30.0.4",
|
|
124
123
|
"jsonfile": "6.1.0",
|
|
125
|
-
"koa": "3.0.
|
|
124
|
+
"koa": "3.0.1",
|
|
126
125
|
"memfs": "4.17.2",
|
|
127
126
|
"remark-cli": "12.0.1",
|
|
128
127
|
"remark-preset-lint-recommended": "7.0.1",
|
|
@@ -149,7 +148,7 @@
|
|
|
149
148
|
"entryPoint": "src/index.ts",
|
|
150
149
|
"template": null,
|
|
151
150
|
"type": "package",
|
|
152
|
-
"version": "
|
|
151
|
+
"version": "12.0.2"
|
|
153
152
|
},
|
|
154
153
|
"scripts": {
|
|
155
154
|
"build": "scripts/build.sh",
|
|
@@ -21,3 +21,9 @@ const listener = app.listen(config.port, () => {
|
|
|
21
21
|
// https://docs.aws.amazon.com/elasticloadbalancing/latest/application/application-load-balancers.html#connection-idle-timeout
|
|
22
22
|
// AWS recommends setting an application timeout larger than the load balancer
|
|
23
23
|
listener.keepAliveTimeout = 31000;
|
|
24
|
+
|
|
25
|
+
// Report unhandled rejections instead of crashing the process
|
|
26
|
+
// Make sure to monitor these reports and alert as appropriate
|
|
27
|
+
process.on('unhandledRejection', (err) =>
|
|
28
|
+
logger.error(err, 'Unhandled promise rejection'),
|
|
29
|
+
);
|
|
@@ -17,9 +17,9 @@
|
|
|
17
17
|
},
|
|
18
18
|
"devDependencies": {
|
|
19
19
|
"@types/node": "^22.13.10",
|
|
20
|
-
"skuba": "12.1.0-
|
|
20
|
+
"skuba": "12.1.0-no-sync-in-promise-iterable-20250801105434"
|
|
21
21
|
},
|
|
22
|
-
"packageManager": "pnpm@10.
|
|
22
|
+
"packageManager": "pnpm@10.13.1",
|
|
23
23
|
"engines": {
|
|
24
24
|
"node": ">=22"
|
|
25
25
|
}
|
|
@@ -24,17 +24,17 @@
|
|
|
24
24
|
"@opentelemetry/sdk-node": "^0.203.0",
|
|
25
25
|
"@seek/logger": "^10.0.0",
|
|
26
26
|
"hot-shots": "^10.0.0",
|
|
27
|
-
"koa": "^
|
|
27
|
+
"koa": "^3.0.1",
|
|
28
28
|
"koa-compose": "^4.1.0",
|
|
29
29
|
"seek-datadog-custom-metrics": "^4.6.3",
|
|
30
|
-
"seek-koala": "^7.
|
|
30
|
+
"seek-koala": "^7.1.0",
|
|
31
31
|
"skuba-dive": "^2.0.0",
|
|
32
|
-
"zod": "^
|
|
32
|
+
"zod": "^4.0.0"
|
|
33
33
|
},
|
|
34
34
|
"devDependencies": {
|
|
35
35
|
"@types/chance": "^1.1.3",
|
|
36
36
|
"@types/co-body": "^6.1.3",
|
|
37
|
-
"@types/koa": "^
|
|
37
|
+
"@types/koa": "^3.0.0",
|
|
38
38
|
"@types/koa__router": "^12.0.0",
|
|
39
39
|
"@types/node": "^22.13.10",
|
|
40
40
|
"@types/supertest": "^6.0.0",
|
|
@@ -44,7 +44,7 @@
|
|
|
44
44
|
"skuba": "*",
|
|
45
45
|
"supertest": "^7.0.0"
|
|
46
46
|
},
|
|
47
|
-
"packageManager": "pnpm@10.
|
|
47
|
+
"packageManager": "pnpm@10.13.1",
|
|
48
48
|
"engines": {
|
|
49
49
|
"node": ">=22"
|
|
50
50
|
}
|
|
@@ -22,3 +22,9 @@ const listener = app.listen(config.port, () => {
|
|
|
22
22
|
// https://docs.aws.amazon.com/elasticloadbalancing/latest/application/application-load-balancers.html#connection-idle-timeout
|
|
23
23
|
// AWS recommends setting an application timeout larger than the load balancer
|
|
24
24
|
listener.keepAliveTimeout = 31000;
|
|
25
|
+
|
|
26
|
+
// Report unhandled rejections instead of crashing the process
|
|
27
|
+
// Make sure to monitor these reports and alert as appropriate
|
|
28
|
+
process.on('unhandledRejection', (err) =>
|
|
29
|
+
logger.error(err, 'Unhandled promise rejection'),
|
|
30
|
+
);
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
// Jest Snapshot v1, https://
|
|
1
|
+
// Jest Snapshot v1, https://jestjs.io/docs/snapshot-testing
|
|
2
2
|
|
|
3
3
|
exports[`returns expected CloudFormation stack for dev 1`] = `
|
|
4
4
|
{
|
|
@@ -332,6 +332,7 @@ exports[`returns expected CloudFormation stack for dev 1`] = `
|
|
|
332
332
|
},
|
|
333
333
|
"workerAliasLiveSqsEventSourceappStackworkerqueue8281B9F443B0CF93": {
|
|
334
334
|
"Properties": {
|
|
335
|
+
"BatchSize": 10,
|
|
335
336
|
"EventSourceArn": {
|
|
336
337
|
"Fn::GetAtt": [
|
|
337
338
|
"workerqueueA05CE5C6",
|
|
@@ -359,6 +360,12 @@ exports[`returns expected CloudFormation stack for dev 1`] = `
|
|
|
359
360
|
],
|
|
360
361
|
],
|
|
361
362
|
},
|
|
363
|
+
"FunctionResponseTypes": [
|
|
364
|
+
"ReportBatchItemFailures",
|
|
365
|
+
],
|
|
366
|
+
"ScalingConfig": {
|
|
367
|
+
"MaximumConcurrency": 2,
|
|
368
|
+
},
|
|
362
369
|
"Tags": [
|
|
363
370
|
{
|
|
364
371
|
"Key": "aws-codedeploy-hooks",
|
|
@@ -1065,6 +1072,7 @@ exports[`returns expected CloudFormation stack for prod 1`] = `
|
|
|
1065
1072
|
},
|
|
1066
1073
|
"workerAliasLiveSqsEventSourceappStackworkerqueue8281B9F443B0CF93": {
|
|
1067
1074
|
"Properties": {
|
|
1075
|
+
"BatchSize": 10,
|
|
1068
1076
|
"EventSourceArn": {
|
|
1069
1077
|
"Fn::GetAtt": [
|
|
1070
1078
|
"workerqueueA05CE5C6",
|
|
@@ -1092,6 +1100,12 @@ exports[`returns expected CloudFormation stack for prod 1`] = `
|
|
|
1092
1100
|
],
|
|
1093
1101
|
],
|
|
1094
1102
|
},
|
|
1103
|
+
"FunctionResponseTypes": [
|
|
1104
|
+
"ReportBatchItemFailures",
|
|
1105
|
+
],
|
|
1106
|
+
"ScalingConfig": {
|
|
1107
|
+
"MaximumConcurrency": 20,
|
|
1108
|
+
},
|
|
1095
1109
|
"Tags": [
|
|
1096
1110
|
{
|
|
1097
1111
|
"Key": "aws-codedeploy-hooks",
|
|
@@ -141,7 +141,11 @@ export class AppStack extends Stack {
|
|
|
141
141
|
});
|
|
142
142
|
|
|
143
143
|
workerDeployment.alias.addEventSource(
|
|
144
|
-
new aws_lambda_event_sources.SqsEventSource(queue
|
|
144
|
+
new aws_lambda_event_sources.SqsEventSource(queue, {
|
|
145
|
+
maxConcurrency: config.workerLambda.reservedConcurrency,
|
|
146
|
+
batchSize: config.workerLambda.batchSize,
|
|
147
|
+
reportBatchItemFailures: true,
|
|
148
|
+
}),
|
|
145
149
|
);
|
|
146
150
|
}
|
|
147
151
|
}
|
|
@@ -9,6 +9,7 @@ const environment = Env.oneOf(ENVIRONMENTS)('ENVIRONMENT');
|
|
|
9
9
|
interface Config {
|
|
10
10
|
appName: string;
|
|
11
11
|
workerLambda: {
|
|
12
|
+
batchSize: number;
|
|
12
13
|
reservedConcurrency: number;
|
|
13
14
|
environment: {
|
|
14
15
|
ENVIRONMENT: Environment;
|
|
@@ -24,6 +25,7 @@ const configs: Record<Environment, Config> = {
|
|
|
24
25
|
dev: {
|
|
25
26
|
appName: '<%- serviceName %>',
|
|
26
27
|
workerLambda: {
|
|
28
|
+
batchSize: 10,
|
|
27
29
|
reservedConcurrency: 2,
|
|
28
30
|
environment: {
|
|
29
31
|
ENVIRONMENT: 'dev',
|
|
@@ -37,6 +39,7 @@ const configs: Record<Environment, Config> = {
|
|
|
37
39
|
prod: {
|
|
38
40
|
appName: '<%- serviceName %>',
|
|
39
41
|
workerLambda: {
|
|
42
|
+
batchSize: 10,
|
|
40
43
|
reservedConcurrency: 20,
|
|
41
44
|
environment: {
|
|
42
45
|
ENVIRONMENT: 'prod',
|
|
@@ -22,7 +22,7 @@
|
|
|
22
22
|
"datadog-lambda-js": "^10.0.0",
|
|
23
23
|
"dd-trace": "^5.0.0",
|
|
24
24
|
"skuba-dive": "^2.0.0",
|
|
25
|
-
"zod": "^
|
|
25
|
+
"zod": "^4.0.0"
|
|
26
26
|
},
|
|
27
27
|
"devDependencies": {
|
|
28
28
|
"@seek/aws-codedeploy-infra": "^3.0.0",
|
|
@@ -37,9 +37,9 @@
|
|
|
37
37
|
"constructs": "^10.0.17",
|
|
38
38
|
"datadog-cdk-constructs-v2": "^2.0.0",
|
|
39
39
|
"pino-pretty": "^13.0.0",
|
|
40
|
-
"skuba": "12.1.0-
|
|
40
|
+
"skuba": "12.1.0-no-sync-in-promise-iterable-20250801105434"
|
|
41
41
|
},
|
|
42
|
-
"packageManager": "pnpm@10.
|
|
42
|
+
"packageManager": "pnpm@10.13.1",
|
|
43
43
|
"engines": {
|
|
44
44
|
"node": ">=22"
|
|
45
45
|
}
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { PublishCommand } from '@aws-sdk/client-sns';
|
|
2
|
+
import type { SQSBatchResponse } from 'aws-lambda';
|
|
2
3
|
|
|
3
4
|
import { metricsClient } from 'src/framework/metrics.js';
|
|
4
5
|
import { createCtx, createSqsEvent } from 'src/testing/handler.js';
|
|
@@ -40,42 +41,100 @@ describe('handler', () => {
|
|
|
40
41
|
it('handles one record', async () => {
|
|
41
42
|
const event = createSqsEvent([JSON.stringify(jobPublished)]);
|
|
42
43
|
|
|
43
|
-
await expect(app.handler(event, ctx)).resolves.
|
|
44
|
+
await expect(app.handler(event, ctx)).resolves.toEqual<SQSBatchResponse>({
|
|
45
|
+
batchItemFailures: [],
|
|
46
|
+
});
|
|
44
47
|
|
|
45
48
|
expect(scoringService.request).toHaveBeenCalledTimes(1);
|
|
46
49
|
|
|
47
50
|
expect(stdoutMock.calls).toMatchObject([
|
|
51
|
+
{ count: 1, level: 20, msg: 'Received jobs' },
|
|
48
52
|
{
|
|
49
|
-
awsRequestId: '-',
|
|
50
|
-
count: 1,
|
|
51
53
|
level: 20,
|
|
52
|
-
msg: '
|
|
54
|
+
msg: 'Scored job',
|
|
55
|
+
snsMessageId: expect.any(String),
|
|
56
|
+
sqsMessageId: event.Records[0]!.messageId,
|
|
53
57
|
},
|
|
58
|
+
{ level: 20, msg: 'Function completed' },
|
|
59
|
+
]);
|
|
60
|
+
|
|
61
|
+
expect(distribution.mock.calls).toEqual([
|
|
62
|
+
['job.received', 1],
|
|
63
|
+
['job.scored', 1],
|
|
64
|
+
]);
|
|
65
|
+
|
|
66
|
+
expect(sns.client).toReceiveCommandTimes(PublishCommand, 1);
|
|
67
|
+
});
|
|
68
|
+
|
|
69
|
+
it('handles multiple records', async () => {
|
|
70
|
+
const event = createSqsEvent([
|
|
71
|
+
JSON.stringify(jobPublished),
|
|
72
|
+
JSON.stringify(jobPublished),
|
|
73
|
+
]);
|
|
74
|
+
|
|
75
|
+
await expect(app.handler(event, ctx)).resolves.toEqual<SQSBatchResponse>({
|
|
76
|
+
batchItemFailures: [],
|
|
77
|
+
});
|
|
78
|
+
|
|
79
|
+
expect(stdoutMock.calls).toMatchObject([
|
|
80
|
+
{ count: 2, level: 20, msg: 'Received jobs' },
|
|
54
81
|
{
|
|
55
|
-
awsRequestId: '-',
|
|
56
82
|
level: 20,
|
|
57
83
|
msg: 'Scored job',
|
|
58
84
|
snsMessageId: expect.any(String),
|
|
85
|
+
sqsMessageId: event.Records[0]!.messageId,
|
|
59
86
|
},
|
|
60
87
|
{
|
|
61
|
-
awsRequestId: '-',
|
|
62
88
|
level: 20,
|
|
63
|
-
msg: '
|
|
89
|
+
msg: 'Scored job',
|
|
90
|
+
snsMessageId: expect.any(String),
|
|
91
|
+
sqsMessageId: event.Records[1]!.messageId,
|
|
64
92
|
},
|
|
93
|
+
{ level: 20, msg: 'Function completed' },
|
|
65
94
|
]);
|
|
95
|
+
});
|
|
66
96
|
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
97
|
+
it('handles partial batch failure', async () => {
|
|
98
|
+
const event = createSqsEvent([
|
|
99
|
+
JSON.stringify('}'),
|
|
100
|
+
JSON.stringify(jobPublished),
|
|
70
101
|
]);
|
|
71
102
|
|
|
72
|
-
expect(
|
|
103
|
+
await expect(app.handler(event, ctx)).resolves.toEqual<SQSBatchResponse>({
|
|
104
|
+
batchItemFailures: [{ itemIdentifier: event.Records[0]!.messageId }],
|
|
105
|
+
});
|
|
106
|
+
|
|
107
|
+
expect(stdoutMock.calls).toMatchObject([
|
|
108
|
+
{ count: 2, level: 20, msg: 'Received jobs' },
|
|
109
|
+
{
|
|
110
|
+
err: {
|
|
111
|
+
name: 'ZodError',
|
|
112
|
+
type: 'ZodError',
|
|
113
|
+
},
|
|
114
|
+
level: 50,
|
|
115
|
+
msg: 'Processing record failed',
|
|
116
|
+
sqsMessageId: event.Records[0]!.messageId,
|
|
117
|
+
},
|
|
118
|
+
{
|
|
119
|
+
level: 20,
|
|
120
|
+
msg: 'Scored job',
|
|
121
|
+
snsMessageId: expect.any(String),
|
|
122
|
+
sqsMessageId: event.Records[1]!.messageId,
|
|
123
|
+
},
|
|
124
|
+
{ level: 20, msg: 'Function completed' },
|
|
125
|
+
]);
|
|
73
126
|
});
|
|
74
127
|
|
|
75
|
-
it('
|
|
128
|
+
it('returns a batchItemFailure on invalid input', () => {
|
|
76
129
|
const event = createSqsEvent(['}']);
|
|
77
130
|
|
|
78
|
-
return expect(app.handler(event, ctx)).
|
|
131
|
+
return expect(app.handler(event, ctx)).resolves.toEqual<SQSBatchResponse>({
|
|
132
|
+
batchItemFailures: [
|
|
133
|
+
{
|
|
134
|
+
itemIdentifier: event.Records[0]!.messageId,
|
|
135
|
+
},
|
|
136
|
+
],
|
|
137
|
+
});
|
|
79
138
|
});
|
|
80
139
|
|
|
81
140
|
it('bubbles up scoring service error', async () => {
|
|
@@ -85,24 +144,22 @@ describe('handler', () => {
|
|
|
85
144
|
|
|
86
145
|
const event = createSqsEvent([JSON.stringify(jobPublished)]);
|
|
87
146
|
|
|
88
|
-
await expect(app.handler(event, ctx)).
|
|
147
|
+
await expect(app.handler(event, ctx)).resolves.toEqual<SQSBatchResponse>({
|
|
148
|
+
batchItemFailures: [{ itemIdentifier: event.Records[0]!.messageId }],
|
|
149
|
+
});
|
|
89
150
|
|
|
90
151
|
expect(stdoutMock.calls).toMatchObject([
|
|
152
|
+
{ count: 1, level: 20, msg: 'Received jobs' },
|
|
91
153
|
{
|
|
92
|
-
awsRequestId: '-',
|
|
93
|
-
count: 1,
|
|
94
|
-
level: 20,
|
|
95
|
-
msg: 'Received jobs',
|
|
96
|
-
},
|
|
97
|
-
{
|
|
98
|
-
awsRequestId: '-',
|
|
99
154
|
err: {
|
|
100
155
|
message: err.message,
|
|
101
156
|
type: 'Error',
|
|
102
157
|
},
|
|
103
158
|
level: 50,
|
|
104
|
-
msg: '
|
|
159
|
+
msg: 'Processing record failed',
|
|
160
|
+
sqsMessageId: event.Records[0]!.messageId,
|
|
105
161
|
},
|
|
162
|
+
{ level: 20, msg: 'Function completed' },
|
|
106
163
|
]);
|
|
107
164
|
});
|
|
108
165
|
|
|
@@ -113,23 +170,28 @@ describe('handler', () => {
|
|
|
113
170
|
|
|
114
171
|
const event = createSqsEvent([JSON.stringify(jobPublished)]);
|
|
115
172
|
|
|
116
|
-
await expect(app.handler(event, ctx)).
|
|
173
|
+
await expect(app.handler(event, ctx)).resolves.toEqual<SQSBatchResponse>({
|
|
174
|
+
batchItemFailures: [{ itemIdentifier: event.Records[0]!.messageId }],
|
|
175
|
+
});
|
|
117
176
|
|
|
118
177
|
expect(stdoutMock.calls).toMatchObject([
|
|
119
178
|
{
|
|
120
|
-
awsRequestId: '-',
|
|
121
179
|
count: 1,
|
|
122
180
|
level: 20,
|
|
123
181
|
msg: 'Received jobs',
|
|
124
182
|
},
|
|
125
183
|
{
|
|
126
|
-
awsRequestId: '-',
|
|
127
184
|
err: {
|
|
128
185
|
message: err.message,
|
|
129
186
|
type: 'Error',
|
|
130
187
|
},
|
|
131
188
|
level: 50,
|
|
132
|
-
msg: '
|
|
189
|
+
msg: 'Processing record failed',
|
|
190
|
+
sqsMessageId: event.Records[0]!.messageId,
|
|
191
|
+
},
|
|
192
|
+
{
|
|
193
|
+
level: 20,
|
|
194
|
+
msg: 'Function completed',
|
|
133
195
|
},
|
|
134
196
|
]);
|
|
135
197
|
});
|
|
@@ -141,7 +203,6 @@ describe('handler', () => {
|
|
|
141
203
|
|
|
142
204
|
expect(stdoutMock.calls).toMatchObject([
|
|
143
205
|
{
|
|
144
|
-
awsRequestId: '-',
|
|
145
206
|
err: {
|
|
146
207
|
message: 'Received 0 records',
|
|
147
208
|
type: 'Error',
|
|
@@ -151,25 +212,4 @@ describe('handler', () => {
|
|
|
151
212
|
},
|
|
152
213
|
]);
|
|
153
214
|
});
|
|
154
|
-
|
|
155
|
-
it('throws on multiple records', async () => {
|
|
156
|
-
const event = createSqsEvent([
|
|
157
|
-
JSON.stringify(jobPublished),
|
|
158
|
-
JSON.stringify(jobPublished),
|
|
159
|
-
]);
|
|
160
|
-
|
|
161
|
-
await expect(app.handler(event, ctx)).rejects.toThrow('Function failed');
|
|
162
|
-
|
|
163
|
-
expect(stdoutMock.calls).toMatchObject([
|
|
164
|
-
{
|
|
165
|
-
awsRequestId: '-',
|
|
166
|
-
err: {
|
|
167
|
-
message: 'Received 2 records',
|
|
168
|
-
type: 'Error',
|
|
169
|
-
},
|
|
170
|
-
level: 50,
|
|
171
|
-
msg: 'Function failed',
|
|
172
|
-
},
|
|
173
|
-
]);
|
|
174
|
-
});
|
|
175
215
|
});
|
|
@@ -3,7 +3,7 @@ import 'skuba-dive/register';
|
|
|
3
3
|
import { isLambdaHook } from '@seek/aws-codedeploy-hooks';
|
|
4
4
|
import type { SQSEvent } from 'aws-lambda';
|
|
5
5
|
|
|
6
|
-
import { createHandler } from 'src/framework/handler.js';
|
|
6
|
+
import { createBatchSQSHandler, createHandler } from 'src/framework/handler.js';
|
|
7
7
|
import { logger } from 'src/framework/logging.js';
|
|
8
8
|
import { metricsClient } from 'src/framework/metrics.js';
|
|
9
9
|
import { validateJson } from 'src/framework/validation.js';
|
|
@@ -36,19 +36,17 @@ export const handler = createHandler<SQSEvent>(async (event, ctx) => {
|
|
|
36
36
|
|
|
37
37
|
const count = event.Records.length;
|
|
38
38
|
|
|
39
|
-
if (count
|
|
40
|
-
throw Error(
|
|
39
|
+
if (!count) {
|
|
40
|
+
throw Error('Received 0 records');
|
|
41
41
|
}
|
|
42
|
-
|
|
43
42
|
logger.debug({ count }, 'Received jobs');
|
|
44
43
|
|
|
45
|
-
metricsClient.distribution('job.received',
|
|
44
|
+
metricsClient.distribution('job.received', count);
|
|
46
45
|
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
throw new Error('Malformed SQS event with no records');
|
|
50
|
-
}
|
|
46
|
+
return recordHandler(event, ctx);
|
|
47
|
+
});
|
|
51
48
|
|
|
49
|
+
const recordHandler = createBatchSQSHandler(async (record, _ctx) => {
|
|
52
50
|
const { body } = record;
|
|
53
51
|
|
|
54
52
|
// TODO: this throws an error, which will cause the Lambda function to retry
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
import type { SQSEvent } from 'aws-lambda';
|
|
2
|
+
|
|
1
3
|
import { createCtx } from 'src/testing/handler.js';
|
|
2
4
|
import { chance } from 'src/testing/types.js';
|
|
3
5
|
|
|
@@ -6,12 +8,14 @@ import { logger, stdoutMock } from './logging.js';
|
|
|
6
8
|
|
|
7
9
|
describe('createHandler', () => {
|
|
8
10
|
const ctx = createCtx();
|
|
9
|
-
const input =
|
|
11
|
+
const input: SQSEvent = {
|
|
12
|
+
Records: [],
|
|
13
|
+
};
|
|
10
14
|
|
|
11
15
|
afterEach(stdoutMock.clear);
|
|
12
16
|
|
|
13
17
|
it('handles happy path', async () => {
|
|
14
|
-
const output = chance.
|
|
18
|
+
const output = chance.sentence();
|
|
15
19
|
|
|
16
20
|
const handler = createHandler((event) => {
|
|
17
21
|
expect(event).toBe(input);
|
|
@@ -32,7 +36,8 @@ describe('createHandler', () => {
|
|
|
32
36
|
{
|
|
33
37
|
awsRequestId: '-',
|
|
34
38
|
level: 20,
|
|
35
|
-
|
|
39
|
+
output,
|
|
40
|
+
msg: 'Function completed',
|
|
36
41
|
},
|
|
37
42
|
]);
|
|
38
43
|
});
|
|
@@ -1,8 +1,14 @@
|
|
|
1
|
-
import type {
|
|
1
|
+
import type {
|
|
2
|
+
Context as LambdaContext,
|
|
3
|
+
SQSBatchItemFailure,
|
|
4
|
+
SQSBatchResponse,
|
|
5
|
+
SQSEvent,
|
|
6
|
+
SQSRecord,
|
|
7
|
+
} from 'aws-lambda';
|
|
2
8
|
import { datadog } from 'datadog-lambda-js';
|
|
3
9
|
|
|
4
10
|
import { config } from 'src/config.js';
|
|
5
|
-
import { logger,
|
|
11
|
+
import { lambdaContext, logger, recordContext } from 'src/framework/logging.js';
|
|
6
12
|
|
|
7
13
|
type Handler<Event, Output> = (
|
|
8
14
|
event: Event,
|
|
@@ -20,15 +26,15 @@ const withDatadog = <Event, Output = unknown>(
|
|
|
20
26
|
// istanbul ignore next
|
|
21
27
|
config.metrics ? (datadog(fn) as Handler<Event, Output>) : fn;
|
|
22
28
|
|
|
23
|
-
export const createHandler = <Event, Output = unknown>(
|
|
29
|
+
export const createHandler = <Event extends SQSEvent, Output = unknown>(
|
|
24
30
|
fn: (event: Event, ctx: LambdaContext) => Promise<Output>,
|
|
25
31
|
) =>
|
|
26
32
|
withDatadog<Event>((event, ctx) =>
|
|
27
|
-
|
|
33
|
+
lambdaContext.run({ awsRequestId: ctx.awsRequestId }, async () => {
|
|
28
34
|
try {
|
|
29
35
|
const output = await fn(event, ctx);
|
|
30
36
|
|
|
31
|
-
logger.debug('Function
|
|
37
|
+
logger.debug({ output }, 'Function completed');
|
|
32
38
|
|
|
33
39
|
return output;
|
|
34
40
|
} catch (err) {
|
|
@@ -38,3 +44,30 @@ export const createHandler = <Event, Output = unknown>(
|
|
|
38
44
|
}
|
|
39
45
|
}),
|
|
40
46
|
);
|
|
47
|
+
|
|
48
|
+
export const createBatchSQSHandler =
|
|
49
|
+
(
|
|
50
|
+
fn: (record: SQSRecord, ctx: LambdaContext) => Promise<unknown>,
|
|
51
|
+
): Handler<SQSEvent, SQSBatchResponse> =>
|
|
52
|
+
async (event, ctx) => {
|
|
53
|
+
const processRecord = (
|
|
54
|
+
record: SQSRecord,
|
|
55
|
+
): Promise<SQSBatchItemFailure | undefined> =>
|
|
56
|
+
recordContext.run({ sqsMessageId: record.messageId }, async () => {
|
|
57
|
+
try {
|
|
58
|
+
await fn(record, ctx);
|
|
59
|
+
return;
|
|
60
|
+
} catch (err) {
|
|
61
|
+
logger.error({ err }, 'Processing record failed');
|
|
62
|
+
return {
|
|
63
|
+
itemIdentifier: record.messageId,
|
|
64
|
+
};
|
|
65
|
+
}
|
|
66
|
+
});
|
|
67
|
+
|
|
68
|
+
const results = await Promise.all(event.Records.map(processRecord));
|
|
69
|
+
|
|
70
|
+
return {
|
|
71
|
+
batchItemFailures: results.filter((item) => item !== undefined),
|
|
72
|
+
};
|
|
73
|
+
};
|
|
@@ -4,11 +4,16 @@ import createLogger, { createDestination } from '@seek/logger';
|
|
|
4
4
|
|
|
5
5
|
import { config } from 'src/config.js';
|
|
6
6
|
|
|
7
|
-
interface
|
|
7
|
+
interface LambdaContext {
|
|
8
8
|
awsRequestId: string;
|
|
9
9
|
}
|
|
10
10
|
|
|
11
|
-
|
|
11
|
+
interface RecordContext {
|
|
12
|
+
sqsMessageId: string;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export const lambdaContext = new AsyncLocalStorage<LambdaContext>();
|
|
16
|
+
export const recordContext = new AsyncLocalStorage<RecordContext>();
|
|
12
17
|
|
|
13
18
|
const { destination, stdoutMock } = createDestination({
|
|
14
19
|
mock: config.environment === 'test' && {
|
|
@@ -27,7 +32,10 @@ export const logger = createLogger(
|
|
|
27
32
|
|
|
28
33
|
level: config.logLevel,
|
|
29
34
|
|
|
30
|
-
mixin: () => ({
|
|
35
|
+
mixin: () => ({
|
|
36
|
+
...lambdaContext.getStore(),
|
|
37
|
+
...recordContext.getStore(),
|
|
38
|
+
}),
|
|
31
39
|
|
|
32
40
|
name: config.name,
|
|
33
41
|
|