skuba 12.1.0-hoist-less-20250722131939 → 12.1.0-main-20250812041011
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -2
- package/config/tsconfig.json +3 -2
- package/lib/cli/build/assets.js +1 -1
- package/lib/cli/build/assets.js.map +2 -2
- package/lib/cli/build/tsc.d.ts +5 -1
- package/lib/cli/build/tsc.js +12 -0
- package/lib/cli/build/tsc.js.map +3 -3
- package/lib/cli/init/getConfig.js +1 -1
- package/lib/cli/init/getConfig.js.map +2 -2
- package/lib/cli/lint/internal.js +1 -1
- package/lib/cli/lint/internal.js.map +2 -2
- package/lib/cli/lint/internalLints/upgrade/patches/12.0.2/index.d.ts +2 -0
- package/lib/cli/lint/internalLints/upgrade/patches/12.0.2/index.js +35 -0
- package/lib/cli/lint/internalLints/upgrade/patches/12.0.2/index.js.map +7 -0
- package/lib/cli/lint/internalLints/upgrade/patches/12.0.2/unhandledRejections.d.ts +4 -0
- package/lib/cli/lint/internalLints/upgrade/patches/12.0.2/unhandledRejections.js +162 -0
- package/lib/cli/lint/internalLints/upgrade/patches/12.0.2/unhandledRejections.js.map +7 -0
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/upgradeESLint.js +1 -1
- package/lib/cli/lint/internalLints/upgrade/patches/8.2.1/upgradeESLint.js.map +2 -2
- package/lib/cli/node/index.js +8 -2
- package/lib/cli/node/index.js.map +2 -2
- package/lib/cli/start/index.js +8 -2
- package/lib/cli/start/index.js.map +2 -2
- package/lib/cli/test/index.d.ts +1 -1
- package/lib/cli/test/index.js +18 -4
- package/lib/cli/test/index.js.map +2 -2
- package/lib/utils/args.d.ts +2 -0
- package/lib/utils/args.js +5 -0
- package/lib/utils/args.js.map +2 -2
- package/package.json +16 -17
- package/template/base/_pnpm-workspace.yaml +1 -0
- package/template/base/jest.setup.ts +1 -1
- package/template/express-rest-api/.buildkite/pipeline.yml +6 -0
- package/template/express-rest-api/.env +1 -1
- package/template/express-rest-api/.gantry/dev.yml +5 -1
- package/template/express-rest-api/.gantry/prod.yml +5 -1
- package/template/express-rest-api/Dockerfile +1 -1
- package/template/express-rest-api/README.md +5 -5
- package/template/express-rest-api/gantry.apply.yml +17 -1
- package/template/express-rest-api/package.json +11 -5
- package/template/express-rest-api/src/api/healthCheck.ts +2 -2
- package/template/express-rest-api/src/config.ts +7 -7
- package/template/express-rest-api/src/framework/logging.ts +11 -7
- package/template/express-rest-api/src/framework/metrics.ts +1 -1
- package/template/express-rest-api/src/listen.ts +6 -0
- package/template/express-rest-api/src/tracing.ts +56 -0
- package/template/greeter/README.md +2 -2
- package/template/greeter/package.json +2 -2
- package/template/koa-rest-api/.buildkite/pipeline.yml +6 -0
- package/template/koa-rest-api/.env +1 -1
- package/template/koa-rest-api/.gantry/dev.yml +3 -3
- package/template/koa-rest-api/.gantry/prod.yml +3 -3
- package/template/koa-rest-api/README.md +6 -6
- package/template/koa-rest-api/gantry.apply.yml +15 -3
- package/template/koa-rest-api/package.json +9 -10
- package/template/koa-rest-api/src/api/healthCheck.ts +2 -2
- package/template/koa-rest-api/src/config.ts +7 -7
- package/template/koa-rest-api/src/framework/logging.ts +12 -8
- package/template/koa-rest-api/src/framework/metrics.ts +1 -1
- package/template/koa-rest-api/src/framework/server.test.ts +7 -8
- package/template/koa-rest-api/src/framework/server.ts +1 -4
- package/template/koa-rest-api/src/listen.ts +6 -0
- package/template/lambda-sqs-worker-cdk/.buildkite/pipeline.yml +6 -2
- package/template/lambda-sqs-worker-cdk/.env +1 -1
- package/template/lambda-sqs-worker-cdk/README.md +8 -8
- package/template/lambda-sqs-worker-cdk/infra/__snapshots__/appStack.test.ts.snap +50 -10
- package/template/lambda-sqs-worker-cdk/infra/appStack.test.ts +5 -8
- package/template/lambda-sqs-worker-cdk/infra/appStack.ts +15 -5
- package/template/lambda-sqs-worker-cdk/infra/config.ts +30 -18
- package/template/lambda-sqs-worker-cdk/infra/index.ts +1 -1
- package/template/lambda-sqs-worker-cdk/package.json +7 -7
- package/template/lambda-sqs-worker-cdk/src/app.test.ts +91 -51
- package/template/lambda-sqs-worker-cdk/src/app.ts +7 -9
- package/template/lambda-sqs-worker-cdk/src/config.ts +11 -16
- package/template/lambda-sqs-worker-cdk/src/framework/handler.test.ts +10 -5
- package/template/lambda-sqs-worker-cdk/src/framework/handler.ts +44 -24
- package/template/lambda-sqs-worker-cdk/src/framework/logging.ts +23 -11
- package/template/lambda-sqs-worker-cdk/src/framework/metrics.ts +1 -4
- package/template/lambda-sqs-worker-cdk/src/testing/handler.ts +4 -1
- package/template/oss-npm-package/.github/workflows/release.yml +1 -1
- package/template/oss-npm-package/.github/workflows/validate.yml +1 -1
|
@@ -19,7 +19,10 @@ import { DatadogLambda } from 'datadog-cdk-constructs-v2';
|
|
|
19
19
|
import { config } from './config.js';
|
|
20
20
|
|
|
21
21
|
// Updated by https://github.com/seek-oss/rynovate
|
|
22
|
-
const DATADOG_EXTENSION_LAYER_VERSION =
|
|
22
|
+
const DATADOG_EXTENSION_LAYER_VERSION = 84;
|
|
23
|
+
|
|
24
|
+
// Updated by https://github.com/seek-oss/rynovate
|
|
25
|
+
const DATADOG_NODE_LAYER_VERSION = 126;
|
|
23
26
|
|
|
24
27
|
export class AppStack extends Stack {
|
|
25
28
|
constructor(scope: Construct, id: string, props?: StackProps) {
|
|
@@ -95,7 +98,6 @@ export class AppStack extends Stack {
|
|
|
95
98
|
target: 'node22',
|
|
96
99
|
// aws-sdk-v3 is set as an external module by default, but we want it to be bundled with the function
|
|
97
100
|
externalModules: [],
|
|
98
|
-
nodeModules: ['datadog-lambda-js', 'dd-trace'],
|
|
99
101
|
},
|
|
100
102
|
functionName: '<%- serviceName %>',
|
|
101
103
|
environment: {
|
|
@@ -127,11 +129,15 @@ export class AppStack extends Stack {
|
|
|
127
129
|
);
|
|
128
130
|
|
|
129
131
|
const datadog = new DatadogLambda(this, 'datadog', {
|
|
132
|
+
env: config.env,
|
|
133
|
+
service: config.service,
|
|
134
|
+
version: config.version,
|
|
135
|
+
|
|
130
136
|
apiKeySecret: datadogSecret,
|
|
131
|
-
addLayers: false,
|
|
132
137
|
enableDatadogLogs: false,
|
|
133
|
-
flushMetricsToLogs: false,
|
|
134
138
|
extensionLayerVersion: DATADOG_EXTENSION_LAYER_VERSION,
|
|
139
|
+
flushMetricsToLogs: false,
|
|
140
|
+
nodeLayerVersion: DATADOG_NODE_LAYER_VERSION,
|
|
135
141
|
});
|
|
136
142
|
|
|
137
143
|
datadog.addLambdaFunctions([worker]);
|
|
@@ -141,7 +147,11 @@ export class AppStack extends Stack {
|
|
|
141
147
|
});
|
|
142
148
|
|
|
143
149
|
workerDeployment.alias.addEventSource(
|
|
144
|
-
new aws_lambda_event_sources.SqsEventSource(queue
|
|
150
|
+
new aws_lambda_event_sources.SqsEventSource(queue, {
|
|
151
|
+
batchSize: config.workerLambda.batchSize,
|
|
152
|
+
maxConcurrency: config.workerLambda.reservedConcurrency - 1, // Ensure we have capacity reserved for our blue/green deployment
|
|
153
|
+
reportBatchItemFailures: true,
|
|
154
|
+
}),
|
|
145
155
|
);
|
|
146
156
|
}
|
|
147
157
|
}
|
|
@@ -1,52 +1,64 @@
|
|
|
1
1
|
import { Env } from 'skuba-dive';
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
type Deployment = (typeof deployments)[number];
|
|
4
4
|
|
|
5
|
-
|
|
5
|
+
const deployments = ['dev', 'prod'] as const;
|
|
6
6
|
|
|
7
|
-
const
|
|
7
|
+
const deployment = Env.oneOf(deployments)('DEPLOYMENT');
|
|
8
8
|
|
|
9
9
|
interface Config {
|
|
10
|
-
|
|
10
|
+
env: 'development' | 'production';
|
|
11
|
+
service: string;
|
|
12
|
+
version: string;
|
|
13
|
+
|
|
11
14
|
workerLambda: {
|
|
15
|
+
batchSize: number;
|
|
12
16
|
reservedConcurrency: number;
|
|
13
17
|
environment: {
|
|
14
|
-
|
|
15
|
-
SERVICE: string;
|
|
16
|
-
VERSION: string;
|
|
18
|
+
DEPLOYMENT: Deployment;
|
|
17
19
|
};
|
|
18
20
|
};
|
|
21
|
+
|
|
19
22
|
datadogApiKeySecretArn: string;
|
|
20
23
|
sourceSnsTopicArn: string;
|
|
21
24
|
}
|
|
22
25
|
|
|
23
|
-
const
|
|
26
|
+
const service = '<%- serviceName %>';
|
|
27
|
+
const version = Env.string('VERSION');
|
|
28
|
+
|
|
29
|
+
const configs: Record<Deployment, Config> = {
|
|
24
30
|
dev: {
|
|
25
|
-
|
|
31
|
+
env: 'development',
|
|
32
|
+
service,
|
|
33
|
+
version,
|
|
34
|
+
|
|
26
35
|
workerLambda: {
|
|
27
|
-
|
|
36
|
+
batchSize: 10,
|
|
37
|
+
reservedConcurrency: 3,
|
|
28
38
|
environment: {
|
|
29
|
-
|
|
30
|
-
SERVICE: '<%- serviceName %>',
|
|
31
|
-
VERSION: Env.string('VERSION', { default: 'local' }),
|
|
39
|
+
DEPLOYMENT: 'dev',
|
|
32
40
|
},
|
|
33
41
|
},
|
|
42
|
+
|
|
34
43
|
datadogApiKeySecretArn: 'TODO: datadogApiKeySecretArn',
|
|
35
44
|
sourceSnsTopicArn: 'TODO: sourceSnsTopicArn',
|
|
36
45
|
},
|
|
37
46
|
prod: {
|
|
38
|
-
|
|
47
|
+
env: 'production',
|
|
48
|
+
service,
|
|
49
|
+
version,
|
|
50
|
+
|
|
39
51
|
workerLambda: {
|
|
52
|
+
batchSize: 10,
|
|
40
53
|
reservedConcurrency: 20,
|
|
41
54
|
environment: {
|
|
42
|
-
|
|
43
|
-
SERVICE: '<%- serviceName %>',
|
|
44
|
-
VERSION: Env.string('VERSION', { default: 'local' }),
|
|
55
|
+
DEPLOYMENT: 'prod',
|
|
45
56
|
},
|
|
46
57
|
},
|
|
58
|
+
|
|
47
59
|
datadogApiKeySecretArn: 'TODO: datadogApiKeySecretArn',
|
|
48
60
|
sourceSnsTopicArn: 'TODO: sourceSnsTopicArn',
|
|
49
61
|
},
|
|
50
62
|
};
|
|
51
63
|
|
|
52
|
-
export const config: Config = configs[
|
|
64
|
+
export const config: Config = configs[deployment];
|
|
@@ -7,7 +7,7 @@ import { config } from './config.js';
|
|
|
7
7
|
const app = new App();
|
|
8
8
|
|
|
9
9
|
const appStack = new AppStack(app, 'appStack', {
|
|
10
|
-
stackName: config.
|
|
10
|
+
stackName: config.service,
|
|
11
11
|
tags: {
|
|
12
12
|
'seek:source:url': 'https://github.com/SEEK-Jobs/<%- repoName %>',
|
|
13
13
|
// 'seek:system:name': 'TODO: https://rfc.skinfra.xyz/RFC051-AWS-Tagging-Standard.html#tagging-schema',
|
|
@@ -18,11 +18,9 @@
|
|
|
18
18
|
"@aws-sdk/client-lambda": "^3.363.0",
|
|
19
19
|
"@aws-sdk/client-sns": "^3.363.0",
|
|
20
20
|
"@seek/aws-codedeploy-hooks": "^2.0.0",
|
|
21
|
-
"@seek/logger": "
|
|
22
|
-
"datadog-lambda-js": "^10.0.0",
|
|
23
|
-
"dd-trace": "^5.0.0",
|
|
21
|
+
"@seek/logger": "11.0.0",
|
|
24
22
|
"skuba-dive": "^2.0.0",
|
|
25
|
-
"zod": "^
|
|
23
|
+
"zod": "^4.0.0"
|
|
26
24
|
},
|
|
27
25
|
"devDependencies": {
|
|
28
26
|
"@seek/aws-codedeploy-infra": "^3.0.0",
|
|
@@ -35,11 +33,13 @@
|
|
|
35
33
|
"aws-sdk-client-mock-jest": "^4.0.0",
|
|
36
34
|
"chance": "^1.1.8",
|
|
37
35
|
"constructs": "^10.0.17",
|
|
38
|
-
"datadog-cdk-constructs-v2": "^
|
|
36
|
+
"datadog-cdk-constructs-v2": "^3.0.0",
|
|
37
|
+
"datadog-lambda-js": "^12.0.0",
|
|
38
|
+
"dd-trace": "^5.0.0",
|
|
39
39
|
"pino-pretty": "^13.0.0",
|
|
40
|
-
"skuba": "12.1.0-
|
|
40
|
+
"skuba": "12.1.0-main-20250812041011"
|
|
41
41
|
},
|
|
42
|
-
"packageManager": "pnpm@10.
|
|
42
|
+
"packageManager": "pnpm@10.14.0",
|
|
43
43
|
"engines": {
|
|
44
44
|
"node": ">=22"
|
|
45
45
|
}
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { PublishCommand } from '@aws-sdk/client-sns';
|
|
2
|
+
import type { SQSBatchResponse } from 'aws-lambda';
|
|
2
3
|
|
|
3
4
|
import { metricsClient } from 'src/framework/metrics.js';
|
|
4
5
|
import { createCtx, createSqsEvent } from 'src/testing/handler.js';
|
|
@@ -40,42 +41,100 @@ describe('handler', () => {
|
|
|
40
41
|
it('handles one record', async () => {
|
|
41
42
|
const event = createSqsEvent([JSON.stringify(jobPublished)]);
|
|
42
43
|
|
|
43
|
-
await expect(app.handler(event, ctx)).resolves.
|
|
44
|
+
await expect(app.handler(event, ctx)).resolves.toEqual<SQSBatchResponse>({
|
|
45
|
+
batchItemFailures: [],
|
|
46
|
+
});
|
|
44
47
|
|
|
45
48
|
expect(scoringService.request).toHaveBeenCalledTimes(1);
|
|
46
49
|
|
|
47
50
|
expect(stdoutMock.calls).toMatchObject([
|
|
51
|
+
{ count: 1, level: 20, msg: 'Received jobs' },
|
|
48
52
|
{
|
|
49
|
-
awsRequestId: '-',
|
|
50
|
-
count: 1,
|
|
51
53
|
level: 20,
|
|
52
|
-
msg: '
|
|
54
|
+
msg: 'Scored job',
|
|
55
|
+
snsMessageId: expect.any(String),
|
|
56
|
+
sqsMessageId: event.Records[0]!.messageId,
|
|
53
57
|
},
|
|
58
|
+
{ level: 20, msg: 'Function completed' },
|
|
59
|
+
]);
|
|
60
|
+
|
|
61
|
+
expect(distribution.mock.calls).toEqual([
|
|
62
|
+
['job.received', 1],
|
|
63
|
+
['job.scored', 1],
|
|
64
|
+
]);
|
|
65
|
+
|
|
66
|
+
expect(sns.client).toReceiveCommandTimes(PublishCommand, 1);
|
|
67
|
+
});
|
|
68
|
+
|
|
69
|
+
it('handles multiple records', async () => {
|
|
70
|
+
const event = createSqsEvent([
|
|
71
|
+
JSON.stringify(jobPublished),
|
|
72
|
+
JSON.stringify(jobPublished),
|
|
73
|
+
]);
|
|
74
|
+
|
|
75
|
+
await expect(app.handler(event, ctx)).resolves.toEqual<SQSBatchResponse>({
|
|
76
|
+
batchItemFailures: [],
|
|
77
|
+
});
|
|
78
|
+
|
|
79
|
+
expect(stdoutMock.calls).toMatchObject([
|
|
80
|
+
{ count: 2, level: 20, msg: 'Received jobs' },
|
|
54
81
|
{
|
|
55
|
-
awsRequestId: '-',
|
|
56
82
|
level: 20,
|
|
57
83
|
msg: 'Scored job',
|
|
58
84
|
snsMessageId: expect.any(String),
|
|
85
|
+
sqsMessageId: event.Records[0]!.messageId,
|
|
59
86
|
},
|
|
60
87
|
{
|
|
61
|
-
awsRequestId: '-',
|
|
62
88
|
level: 20,
|
|
63
|
-
msg: '
|
|
89
|
+
msg: 'Scored job',
|
|
90
|
+
snsMessageId: expect.any(String),
|
|
91
|
+
sqsMessageId: event.Records[1]!.messageId,
|
|
64
92
|
},
|
|
93
|
+
{ level: 20, msg: 'Function completed' },
|
|
65
94
|
]);
|
|
95
|
+
});
|
|
66
96
|
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
97
|
+
it('handles partial batch failure', async () => {
|
|
98
|
+
const event = createSqsEvent([
|
|
99
|
+
JSON.stringify('}'),
|
|
100
|
+
JSON.stringify(jobPublished),
|
|
70
101
|
]);
|
|
71
102
|
|
|
72
|
-
expect(
|
|
103
|
+
await expect(app.handler(event, ctx)).resolves.toEqual<SQSBatchResponse>({
|
|
104
|
+
batchItemFailures: [{ itemIdentifier: event.Records[0]!.messageId }],
|
|
105
|
+
});
|
|
106
|
+
|
|
107
|
+
expect(stdoutMock.calls).toMatchObject([
|
|
108
|
+
{ count: 2, level: 20, msg: 'Received jobs' },
|
|
109
|
+
{
|
|
110
|
+
error: {
|
|
111
|
+
name: 'ZodError',
|
|
112
|
+
type: 'ZodError',
|
|
113
|
+
},
|
|
114
|
+
level: 50,
|
|
115
|
+
msg: 'Processing record failed',
|
|
116
|
+
sqsMessageId: event.Records[0]!.messageId,
|
|
117
|
+
},
|
|
118
|
+
{
|
|
119
|
+
level: 20,
|
|
120
|
+
msg: 'Scored job',
|
|
121
|
+
snsMessageId: expect.any(String),
|
|
122
|
+
sqsMessageId: event.Records[1]!.messageId,
|
|
123
|
+
},
|
|
124
|
+
{ level: 20, msg: 'Function completed' },
|
|
125
|
+
]);
|
|
73
126
|
});
|
|
74
127
|
|
|
75
|
-
it('
|
|
128
|
+
it('returns a batchItemFailure on invalid input', () => {
|
|
76
129
|
const event = createSqsEvent(['}']);
|
|
77
130
|
|
|
78
|
-
return expect(app.handler(event, ctx)).
|
|
131
|
+
return expect(app.handler(event, ctx)).resolves.toEqual<SQSBatchResponse>({
|
|
132
|
+
batchItemFailures: [
|
|
133
|
+
{
|
|
134
|
+
itemIdentifier: event.Records[0]!.messageId,
|
|
135
|
+
},
|
|
136
|
+
],
|
|
137
|
+
});
|
|
79
138
|
});
|
|
80
139
|
|
|
81
140
|
it('bubbles up scoring service error', async () => {
|
|
@@ -85,24 +144,22 @@ describe('handler', () => {
|
|
|
85
144
|
|
|
86
145
|
const event = createSqsEvent([JSON.stringify(jobPublished)]);
|
|
87
146
|
|
|
88
|
-
await expect(app.handler(event, ctx)).
|
|
147
|
+
await expect(app.handler(event, ctx)).resolves.toEqual<SQSBatchResponse>({
|
|
148
|
+
batchItemFailures: [{ itemIdentifier: event.Records[0]!.messageId }],
|
|
149
|
+
});
|
|
89
150
|
|
|
90
151
|
expect(stdoutMock.calls).toMatchObject([
|
|
152
|
+
{ count: 1, level: 20, msg: 'Received jobs' },
|
|
91
153
|
{
|
|
92
|
-
|
|
93
|
-
count: 1,
|
|
94
|
-
level: 20,
|
|
95
|
-
msg: 'Received jobs',
|
|
96
|
-
},
|
|
97
|
-
{
|
|
98
|
-
awsRequestId: '-',
|
|
99
|
-
err: {
|
|
154
|
+
error: {
|
|
100
155
|
message: err.message,
|
|
101
156
|
type: 'Error',
|
|
102
157
|
},
|
|
103
158
|
level: 50,
|
|
104
|
-
msg: '
|
|
159
|
+
msg: 'Processing record failed',
|
|
160
|
+
sqsMessageId: event.Records[0]!.messageId,
|
|
105
161
|
},
|
|
162
|
+
{ level: 20, msg: 'Function completed' },
|
|
106
163
|
]);
|
|
107
164
|
});
|
|
108
165
|
|
|
@@ -113,23 +170,28 @@ describe('handler', () => {
|
|
|
113
170
|
|
|
114
171
|
const event = createSqsEvent([JSON.stringify(jobPublished)]);
|
|
115
172
|
|
|
116
|
-
await expect(app.handler(event, ctx)).
|
|
173
|
+
await expect(app.handler(event, ctx)).resolves.toEqual<SQSBatchResponse>({
|
|
174
|
+
batchItemFailures: [{ itemIdentifier: event.Records[0]!.messageId }],
|
|
175
|
+
});
|
|
117
176
|
|
|
118
177
|
expect(stdoutMock.calls).toMatchObject([
|
|
119
178
|
{
|
|
120
|
-
awsRequestId: '-',
|
|
121
179
|
count: 1,
|
|
122
180
|
level: 20,
|
|
123
181
|
msg: 'Received jobs',
|
|
124
182
|
},
|
|
125
183
|
{
|
|
126
|
-
|
|
127
|
-
err: {
|
|
184
|
+
error: {
|
|
128
185
|
message: err.message,
|
|
129
186
|
type: 'Error',
|
|
130
187
|
},
|
|
131
188
|
level: 50,
|
|
132
|
-
msg: '
|
|
189
|
+
msg: 'Processing record failed',
|
|
190
|
+
sqsMessageId: event.Records[0]!.messageId,
|
|
191
|
+
},
|
|
192
|
+
{
|
|
193
|
+
level: 20,
|
|
194
|
+
msg: 'Function completed',
|
|
133
195
|
},
|
|
134
196
|
]);
|
|
135
197
|
});
|
|
@@ -141,8 +203,7 @@ describe('handler', () => {
|
|
|
141
203
|
|
|
142
204
|
expect(stdoutMock.calls).toMatchObject([
|
|
143
205
|
{
|
|
144
|
-
|
|
145
|
-
err: {
|
|
206
|
+
error: {
|
|
146
207
|
message: 'Received 0 records',
|
|
147
208
|
type: 'Error',
|
|
148
209
|
},
|
|
@@ -151,25 +212,4 @@ describe('handler', () => {
|
|
|
151
212
|
},
|
|
152
213
|
]);
|
|
153
214
|
});
|
|
154
|
-
|
|
155
|
-
it('throws on multiple records', async () => {
|
|
156
|
-
const event = createSqsEvent([
|
|
157
|
-
JSON.stringify(jobPublished),
|
|
158
|
-
JSON.stringify(jobPublished),
|
|
159
|
-
]);
|
|
160
|
-
|
|
161
|
-
await expect(app.handler(event, ctx)).rejects.toThrow('Function failed');
|
|
162
|
-
|
|
163
|
-
expect(stdoutMock.calls).toMatchObject([
|
|
164
|
-
{
|
|
165
|
-
awsRequestId: '-',
|
|
166
|
-
err: {
|
|
167
|
-
message: 'Received 2 records',
|
|
168
|
-
type: 'Error',
|
|
169
|
-
},
|
|
170
|
-
level: 50,
|
|
171
|
-
msg: 'Function failed',
|
|
172
|
-
},
|
|
173
|
-
]);
|
|
174
|
-
});
|
|
175
215
|
});
|
|
@@ -3,7 +3,7 @@ import 'skuba-dive/register';
|
|
|
3
3
|
import { isLambdaHook } from '@seek/aws-codedeploy-hooks';
|
|
4
4
|
import type { SQSEvent } from 'aws-lambda';
|
|
5
5
|
|
|
6
|
-
import { createHandler } from 'src/framework/handler.js';
|
|
6
|
+
import { createBatchSQSHandler, createHandler } from 'src/framework/handler.js';
|
|
7
7
|
import { logger } from 'src/framework/logging.js';
|
|
8
8
|
import { metricsClient } from 'src/framework/metrics.js';
|
|
9
9
|
import { validateJson } from 'src/framework/validation.js';
|
|
@@ -36,19 +36,17 @@ export const handler = createHandler<SQSEvent>(async (event, ctx) => {
|
|
|
36
36
|
|
|
37
37
|
const count = event.Records.length;
|
|
38
38
|
|
|
39
|
-
if (count
|
|
40
|
-
throw Error(
|
|
39
|
+
if (!count) {
|
|
40
|
+
throw Error('Received 0 records');
|
|
41
41
|
}
|
|
42
|
-
|
|
43
42
|
logger.debug({ count }, 'Received jobs');
|
|
44
43
|
|
|
45
|
-
metricsClient.distribution('job.received',
|
|
44
|
+
metricsClient.distribution('job.received', count);
|
|
46
45
|
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
throw new Error('Malformed SQS event with no records');
|
|
50
|
-
}
|
|
46
|
+
return recordHandler(event, ctx);
|
|
47
|
+
});
|
|
51
48
|
|
|
49
|
+
const recordHandler = createBatchSQSHandler(async (record, _ctx) => {
|
|
52
50
|
const { body } = record;
|
|
53
51
|
|
|
54
52
|
// TODO: this throws an error, which will cause the Lambda function to retry
|
|
@@ -1,27 +1,25 @@
|
|
|
1
1
|
import { Env } from 'skuba-dive';
|
|
2
2
|
|
|
3
3
|
interface Config {
|
|
4
|
-
|
|
4
|
+
deployment: Deployment;
|
|
5
5
|
|
|
6
6
|
logLevel: string;
|
|
7
|
-
metrics: boolean;
|
|
8
7
|
name: string;
|
|
9
8
|
version: string;
|
|
10
9
|
|
|
11
10
|
destinationSnsTopicArn: string;
|
|
12
11
|
}
|
|
13
12
|
|
|
14
|
-
type
|
|
13
|
+
type Deployment = (typeof deployments)[number];
|
|
15
14
|
|
|
16
|
-
const
|
|
15
|
+
const deployments = ['local', 'test', 'dev', 'prod'] as const;
|
|
17
16
|
|
|
18
|
-
const
|
|
17
|
+
const deployment = Env.oneOf(deployments)('DEPLOYMENT');
|
|
19
18
|
|
|
20
19
|
/* istanbul ignore next: config verification makes more sense in a smoke test */
|
|
21
|
-
const configs: Record<
|
|
20
|
+
const configs: Record<Deployment, () => Omit<Config, 'deployment'>> = {
|
|
22
21
|
local: () => ({
|
|
23
22
|
logLevel: 'debug',
|
|
24
|
-
metrics: false,
|
|
25
23
|
name: '<%- serviceName %>',
|
|
26
24
|
version: 'local',
|
|
27
25
|
|
|
@@ -30,7 +28,6 @@ const configs: Record<Environment, () => Omit<Config, 'environment'>> = {
|
|
|
30
28
|
|
|
31
29
|
test: () => ({
|
|
32
30
|
logLevel: Env.string('LOG_LEVEL', { default: 'debug' }),
|
|
33
|
-
metrics: false,
|
|
34
31
|
name: '<%- serviceName %>',
|
|
35
32
|
version: 'test',
|
|
36
33
|
|
|
@@ -39,24 +36,22 @@ const configs: Record<Environment, () => Omit<Config, 'environment'>> = {
|
|
|
39
36
|
|
|
40
37
|
dev: () => ({
|
|
41
38
|
logLevel: 'debug',
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
version: Env.string('VERSION'),
|
|
39
|
+
name: Env.string('DD_SERVICE'),
|
|
40
|
+
version: Env.string('DD_VERSION'),
|
|
45
41
|
|
|
46
42
|
destinationSnsTopicArn: Env.string('DESTINATION_SNS_TOPIC_ARN'),
|
|
47
43
|
}),
|
|
48
44
|
|
|
49
45
|
prod: () => ({
|
|
50
46
|
logLevel: 'info',
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
version: Env.string('VERSION'),
|
|
47
|
+
name: Env.string('DD_SERVICE'),
|
|
48
|
+
version: Env.string('DD_VERSION'),
|
|
54
49
|
|
|
55
50
|
destinationSnsTopicArn: Env.string('DESTINATION_SNS_TOPIC_ARN'),
|
|
56
51
|
}),
|
|
57
52
|
};
|
|
58
53
|
|
|
59
54
|
export const config: Config = {
|
|
60
|
-
...configs[
|
|
61
|
-
|
|
55
|
+
...configs[deployment](),
|
|
56
|
+
deployment,
|
|
62
57
|
};
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
import type { SQSEvent } from 'aws-lambda';
|
|
2
|
+
|
|
1
3
|
import { createCtx } from 'src/testing/handler.js';
|
|
2
4
|
import { chance } from 'src/testing/types.js';
|
|
3
5
|
|
|
@@ -6,12 +8,14 @@ import { logger, stdoutMock } from './logging.js';
|
|
|
6
8
|
|
|
7
9
|
describe('createHandler', () => {
|
|
8
10
|
const ctx = createCtx();
|
|
9
|
-
const input =
|
|
11
|
+
const input: SQSEvent = {
|
|
12
|
+
Records: [],
|
|
13
|
+
};
|
|
10
14
|
|
|
11
15
|
afterEach(stdoutMock.clear);
|
|
12
16
|
|
|
13
17
|
it('handles happy path', async () => {
|
|
14
|
-
const output = chance.
|
|
18
|
+
const output = chance.sentence();
|
|
15
19
|
|
|
16
20
|
const handler = createHandler((event) => {
|
|
17
21
|
expect(event).toBe(input);
|
|
@@ -32,7 +36,8 @@ describe('createHandler', () => {
|
|
|
32
36
|
{
|
|
33
37
|
awsRequestId: '-',
|
|
34
38
|
level: 20,
|
|
35
|
-
|
|
39
|
+
output,
|
|
40
|
+
msg: 'Function completed',
|
|
36
41
|
},
|
|
37
42
|
]);
|
|
38
43
|
});
|
|
@@ -47,7 +52,7 @@ describe('createHandler', () => {
|
|
|
47
52
|
expect(stdoutMock.calls).toMatchObject([
|
|
48
53
|
{
|
|
49
54
|
awsRequestId: '-',
|
|
50
|
-
|
|
55
|
+
error: {
|
|
51
56
|
message: err.message,
|
|
52
57
|
type: 'Error',
|
|
53
58
|
},
|
|
@@ -69,7 +74,7 @@ describe('createHandler', () => {
|
|
|
69
74
|
expect(stdoutMock.calls).toMatchObject([
|
|
70
75
|
{
|
|
71
76
|
awsRequestId: '-',
|
|
72
|
-
|
|
77
|
+
error: {
|
|
73
78
|
message: err.message,
|
|
74
79
|
type: 'Error',
|
|
75
80
|
},
|
|
@@ -1,40 +1,60 @@
|
|
|
1
|
-
import type {
|
|
2
|
-
|
|
1
|
+
import type {
|
|
2
|
+
Context as LambdaContext,
|
|
3
|
+
SQSBatchItemFailure,
|
|
4
|
+
SQSBatchResponse,
|
|
5
|
+
SQSEvent,
|
|
6
|
+
SQSRecord,
|
|
7
|
+
} from 'aws-lambda';
|
|
3
8
|
|
|
4
|
-
import {
|
|
5
|
-
import { logger, loggerContext } from 'src/framework/logging.js';
|
|
9
|
+
import { lambdaContext, logger, recordContext } from 'src/framework/logging.js';
|
|
6
10
|
|
|
7
11
|
type Handler<Event, Output> = (
|
|
8
12
|
event: Event,
|
|
9
13
|
ctx: LambdaContext,
|
|
10
14
|
) => Promise<Output>;
|
|
11
15
|
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
fn: Handler<Event, Output>,
|
|
19
|
-
): Handler<Event, Output> =>
|
|
20
|
-
// istanbul ignore next
|
|
21
|
-
config.metrics ? (datadog(fn) as Handler<Event, Output>) : fn;
|
|
22
|
-
|
|
23
|
-
export const createHandler = <Event, Output = unknown>(
|
|
24
|
-
fn: (event: Event, ctx: LambdaContext) => Promise<Output>,
|
|
25
|
-
) =>
|
|
26
|
-
withDatadog<Event>((event, ctx) =>
|
|
27
|
-
loggerContext.run({ awsRequestId: ctx.awsRequestId }, async () => {
|
|
16
|
+
export const createHandler =
|
|
17
|
+
<Event extends SQSEvent, Output = unknown>(
|
|
18
|
+
fn: (event: Event, ctx: LambdaContext) => Promise<Output>,
|
|
19
|
+
): Handler<Event, Output> =>
|
|
20
|
+
async (event, ctx) =>
|
|
21
|
+
lambdaContext.run({ awsRequestId: ctx.awsRequestId }, async () => {
|
|
28
22
|
try {
|
|
29
23
|
const output = await fn(event, ctx);
|
|
30
24
|
|
|
31
|
-
logger.debug('Function
|
|
25
|
+
logger.debug({ output }, 'Function completed');
|
|
32
26
|
|
|
33
27
|
return output;
|
|
34
28
|
} catch (err) {
|
|
35
|
-
logger.error(
|
|
29
|
+
logger.error(err, 'Function failed');
|
|
36
30
|
|
|
37
31
|
throw new Error('Function failed');
|
|
38
32
|
}
|
|
39
|
-
})
|
|
40
|
-
|
|
33
|
+
});
|
|
34
|
+
|
|
35
|
+
export const createBatchSQSHandler =
|
|
36
|
+
(
|
|
37
|
+
fn: (record: SQSRecord, ctx: LambdaContext) => Promise<unknown>,
|
|
38
|
+
): Handler<SQSEvent, SQSBatchResponse> =>
|
|
39
|
+
async (event, ctx) => {
|
|
40
|
+
const processRecord = (
|
|
41
|
+
record: SQSRecord,
|
|
42
|
+
): Promise<SQSBatchItemFailure | undefined> =>
|
|
43
|
+
recordContext.run({ sqsMessageId: record.messageId }, async () => {
|
|
44
|
+
try {
|
|
45
|
+
await fn(record, ctx);
|
|
46
|
+
return;
|
|
47
|
+
} catch (err) {
|
|
48
|
+
logger.error(err, 'Processing record failed');
|
|
49
|
+
return {
|
|
50
|
+
itemIdentifier: record.messageId,
|
|
51
|
+
};
|
|
52
|
+
}
|
|
53
|
+
});
|
|
54
|
+
|
|
55
|
+
const results = await Promise.all(event.Records.map(processRecord));
|
|
56
|
+
|
|
57
|
+
return {
|
|
58
|
+
batchItemFailures: results.filter((item) => item !== undefined),
|
|
59
|
+
};
|
|
60
|
+
};
|