skuba 9.0.1 → 9.1.0-make-aaron-hate-me-20241019044405

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/README.md +2 -6
  2. package/lib/cli/lint/annotate/github/tsc.js +2 -12
  3. package/lib/cli/lint/annotate/github/tsc.js.map +2 -2
  4. package/lib/cli/lint/internalLints/refreshConfigFiles.js +1 -2
  5. package/lib/cli/lint/internalLints/refreshConfigFiles.js.map +2 -2
  6. package/lib/cli/lint/internalLints/upgrade/patches/9.0.1/index.d.ts +2 -0
  7. package/lib/cli/lint/internalLints/upgrade/patches/9.0.1/index.js +35 -0
  8. package/lib/cli/lint/internalLints/upgrade/patches/9.0.1/index.js.map +7 -0
  9. package/lib/cli/lint/internalLints/upgrade/patches/9.0.1/patchPnpmDockerImages.d.ts +2 -0
  10. package/lib/cli/lint/internalLints/upgrade/patches/9.0.1/patchPnpmDockerImages.js +102 -0
  11. package/lib/cli/lint/internalLints/upgrade/patches/9.0.1/patchPnpmDockerImages.js.map +7 -0
  12. package/lib/cli/test/reporters/github/annotations.js +3 -3
  13. package/lib/cli/test/reporters/github/annotations.js.map +2 -2
  14. package/package.json +5 -6
  15. package/template/express-rest-api/.buildkite/pipeline.yml +1 -1
  16. package/template/express-rest-api/Dockerfile.dev-deps +3 -1
  17. package/template/express-rest-api/package.json +1 -1
  18. package/template/greeter/.buildkite/pipeline.yml +1 -1
  19. package/template/greeter/Dockerfile +3 -1
  20. package/template/greeter/package.json +2 -2
  21. package/template/koa-rest-api/.buildkite/pipeline.yml +1 -1
  22. package/template/koa-rest-api/Dockerfile.dev-deps +3 -1
  23. package/template/koa-rest-api/package.json +1 -1
  24. package/template/lambda-sqs-worker/.buildkite/pipeline.yml +2 -2
  25. package/template/lambda-sqs-worker/Dockerfile +3 -1
  26. package/template/lambda-sqs-worker/package.json +1 -1
  27. package/template/lambda-sqs-worker/serverless.yml +1 -1
  28. package/template/lambda-sqs-worker-cdk/.buildkite/pipeline.yml +2 -2
  29. package/template/lambda-sqs-worker-cdk/.env +1 -0
  30. package/template/lambda-sqs-worker-cdk/Dockerfile +3 -1
  31. package/template/lambda-sqs-worker-cdk/README.md +145 -0
  32. package/template/lambda-sqs-worker-cdk/infra/__snapshots__/appStack.test.ts.snap +158 -136
  33. package/template/lambda-sqs-worker-cdk/infra/appStack.test.ts +18 -2
  34. package/template/lambda-sqs-worker-cdk/infra/appStack.ts +52 -25
  35. package/template/lambda-sqs-worker-cdk/infra/config.ts +3 -0
  36. package/template/lambda-sqs-worker-cdk/package.json +9 -2
  37. package/template/lambda-sqs-worker-cdk/src/app.test.ts +116 -0
  38. package/template/lambda-sqs-worker-cdk/src/app.ts +43 -21
  39. package/template/lambda-sqs-worker-cdk/src/config.ts +15 -0
  40. package/template/lambda-sqs-worker-cdk/src/framework/handler.test.ts +61 -0
  41. package/template/lambda-sqs-worker-cdk/src/framework/handler.ts +43 -0
  42. package/template/lambda-sqs-worker-cdk/src/framework/logging.ts +27 -0
  43. package/template/lambda-sqs-worker-cdk/src/framework/metrics.ts +14 -0
  44. package/template/lambda-sqs-worker-cdk/src/framework/validation.test.ts +84 -0
  45. package/template/lambda-sqs-worker-cdk/src/framework/validation.ts +10 -0
  46. package/template/lambda-sqs-worker-cdk/src/mapping/jobScorer.ts +22 -0
  47. package/template/lambda-sqs-worker-cdk/src/services/aws.ts +5 -0
  48. package/template/lambda-sqs-worker-cdk/src/services/jobScorer.test.ts +44 -0
  49. package/template/lambda-sqs-worker-cdk/src/services/jobScorer.ts +59 -0
  50. package/template/lambda-sqs-worker-cdk/src/services/pipelineEventSender.test.ts +40 -0
  51. package/template/lambda-sqs-worker-cdk/src/services/pipelineEventSender.ts +33 -0
  52. package/template/lambda-sqs-worker-cdk/src/testing/handler.ts +13 -0
  53. package/template/lambda-sqs-worker-cdk/src/testing/logging.ts +19 -0
  54. package/template/lambda-sqs-worker-cdk/src/testing/services.ts +28 -0
  55. package/template/lambda-sqs-worker-cdk/src/testing/types.ts +33 -0
  56. package/template/lambda-sqs-worker-cdk/src/types/jobScorer.ts +15 -0
  57. package/template/lambda-sqs-worker-cdk/src/types/pipelineEvents.ts +21 -0
@@ -8,14 +8,18 @@ import {
8
8
  aws_lambda,
9
9
  aws_lambda_event_sources,
10
10
  aws_lambda_nodejs,
11
+ aws_secretsmanager,
11
12
  aws_sns,
12
- aws_sns_subscriptions,
13
13
  aws_sqs,
14
14
  } from 'aws-cdk-lib';
15
15
  import type { Construct } from 'constructs';
16
+ import { Datadog } from 'datadog-cdk-constructs-v2';
16
17
 
17
18
  import { config } from './config';
18
19
 
20
+ // Updated by https://github.com/seek-oss/rynovate
21
+ const DATADOG_EXTENSION_LAYER_VERSION = 64;
22
+
19
23
  export class AppStack extends Stack {
20
24
  constructor(scope: Construct, id: string, props?: StackProps) {
21
25
  super(scope, id, props);
@@ -49,47 +53,54 @@ export class AppStack extends Stack {
49
53
  encryptionMasterKey: kmsKey,
50
54
  });
51
55
 
52
- const topic = aws_sns.Topic.fromTopicArn(
56
+ // const topic = aws_sns.Topic.fromTopicArn(
57
+ // this,
58
+ // 'source-topic',
59
+ // config.sourceSnsTopicArn,
60
+ // );
61
+
62
+ // topic.addSubscription(
63
+ // new aws_sns_subscriptions.SqsSubscription(queue, {
64
+ // rawMessageDelivery: true, // Remove this property if you require end to end datadog tracing
65
+ // }),
66
+ // );
67
+
68
+ const snsKey = aws_kms.Alias.fromAliasName(
53
69
  this,
54
- 'source-topic',
55
- config.sourceSnsTopicArn,
70
+ 'alias-aws-sns',
71
+ 'alias/aws/sns',
56
72
  );
57
73
 
58
- topic.addSubscription(new aws_sns_subscriptions.SqsSubscription(queue));
74
+ const destinationTopic = new aws_sns.Topic(this, 'destination-topic', {
75
+ masterKey: snsKey,
76
+ topicName: '<%- serviceName %>',
77
+ });
59
78
 
60
79
  const architecture = '<%- lambdaCdkArchitecture %>';
61
80
 
62
- const defaultWorkerConfig: aws_lambda_nodejs.NodejsFunctionProps = {
81
+ const worker = new aws_lambda_nodejs.NodejsFunction(this, 'worker', {
63
82
  architecture: aws_lambda.Architecture[architecture],
64
83
  runtime: aws_lambda.Runtime.NODEJS_20_X,
65
84
  environmentEncryption: kmsKey,
66
85
  // aws-sdk-v3 sets this to true by default, so it is not necessary to set the environment variable
67
86
  // https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/node-reusing-connections.html
68
87
  awsSdkConnectionReuse: false,
69
- };
70
-
71
- const defaultWorkerBundlingConfig: aws_lambda_nodejs.BundlingOptions = {
72
- sourceMap: true,
73
- target: 'node20',
74
- // aws-sdk-v3 is set as an external module by default, but we want it to be bundled with the function
75
- externalModules: [],
76
- };
77
-
78
- const defaultWorkerEnvironment: Record<string, string> = {
79
- NODE_ENV: 'production',
80
- // https://nodejs.org/api/cli.html#cli_node_options_options
81
- NODE_OPTIONS: '--enable-source-maps',
82
- };
83
-
84
- const worker = new aws_lambda_nodejs.NodejsFunction(this, 'worker', {
85
- ...defaultWorkerConfig,
86
88
  entry: './src/app.ts',
87
89
  timeout: Duration.seconds(30),
88
- bundling: defaultWorkerBundlingConfig,
90
+ bundling: {
91
+ sourceMap: true,
92
+ target: 'node20',
93
+ // aws-sdk-v3 is set as an external module by default, but we want it to be bundled with the function
94
+ externalModules: [],
95
+ nodeModules: ['datadog-lambda-js', 'dd-trace'],
96
+ },
89
97
  functionName: '<%- serviceName %>',
90
98
  environment: {
91
- ...defaultWorkerEnvironment,
92
99
  ...config.workerLambda.environment,
100
+ NODE_ENV: 'production',
101
+ // https://nodejs.org/api/cli.html#cli_node_options_options
102
+ NODE_OPTIONS: '--enable-source-maps',
103
+ DESTINATION_SNS_TOPIC_ARN: destinationTopic.topicArn,
93
104
  },
94
105
  // https://github.com/aws/aws-cdk/issues/28237
95
106
  // This forces the lambda to be updated on every deployment
@@ -98,6 +109,22 @@ export class AppStack extends Stack {
98
109
  reservedConcurrentExecutions: config.workerLambda.reservedConcurrency,
99
110
  });
100
111
 
112
+ const datadogSecret = aws_secretsmanager.Secret.fromSecretPartialArn(
113
+ this,
114
+ 'datadog-api-key-secret',
115
+ config.datadogApiKeySecretArn,
116
+ );
117
+
118
+ const datadog = new Datadog(this, 'datadog', {
119
+ apiKeySecret: datadogSecret,
120
+ addLayers: false,
121
+ enableDatadogLogs: false,
122
+ flushMetricsToLogs: false,
123
+ extensionLayerVersion: DATADOG_EXTENSION_LAYER_VERSION,
124
+ });
125
+
126
+ datadog.addLambdaFunctions([worker]);
127
+
101
128
  const workerDeployment = new LambdaDeployment(this, 'workerDeployment', {
102
129
  lambdaFunction: worker,
103
130
  });
@@ -16,6 +16,7 @@ interface Config {
16
16
  VERSION: string;
17
17
  };
18
18
  };
19
+ datadogApiKeySecretArn: string;
19
20
  sourceSnsTopicArn: string;
20
21
  }
21
22
 
@@ -30,6 +31,7 @@ const configs: Record<Environment, Config> = {
30
31
  VERSION: Env.string('VERSION', { default: 'local' }),
31
32
  },
32
33
  },
34
+ datadogApiKeySecretArn: 'TODO: datadogApiKeySecretArn',
33
35
  sourceSnsTopicArn: 'TODO: sourceSnsTopicArn',
34
36
  },
35
37
  prod: {
@@ -42,6 +44,7 @@ const configs: Record<Environment, Config> = {
42
44
  VERSION: Env.string('VERSION', { default: 'local' }),
43
45
  },
44
46
  },
47
+ datadogApiKeySecretArn: 'TODO: datadogApiKeySecretArn',
45
48
  sourceSnsTopicArn: 'TODO: sourceSnsTopicArn',
46
49
  },
47
50
  };
@@ -17,20 +17,27 @@
17
17
  "@aws-sdk/client-lambda": "^3.363.0",
18
18
  "@aws-sdk/client-sns": "^3.363.0",
19
19
  "@seek/logger": "^9.0.0",
20
+ "datadog-lambda-js": "^9.0.0",
21
+ "dd-trace": "^5.0.0",
20
22
  "skuba-dive": "^2.0.0",
21
23
  "zod": "^3.19.1"
22
24
  },
23
25
  "devDependencies": {
24
26
  "@seek/aws-codedeploy-infra": "^2.1.0",
25
27
  "@types/aws-lambda": "^8.10.82",
28
+ "@types/chance": "^1.1.3",
26
29
  "@types/node": "^20.16.5",
27
30
  "aws-cdk": "^2.109.0",
28
31
  "aws-cdk-lib": "^2.109.0",
32
+ "aws-sdk-client-mock": "^4.0.0",
33
+ "aws-sdk-client-mock-jest": "^4.0.0",
34
+ "chance": "^1.1.8",
29
35
  "constructs": "^10.0.17",
36
+ "datadog-cdk-constructs-v2": "^1.18.0",
30
37
  "pino-pretty": "^11.0.0",
31
- "skuba": "*"
38
+ "skuba": "9.1.0-make-aaron-hate-me-20241019044405"
32
39
  },
33
- "packageManager": "pnpm@9.12.0",
40
+ "packageManager": "pnpm@9.12.2",
34
41
  "engines": {
35
42
  "node": ">=20"
36
43
  }
@@ -0,0 +1,116 @@
1
+ import { PublishCommand } from '@aws-sdk/client-sns';
2
+
3
+ import { metricsClient } from 'src/framework/metrics';
4
+ import { createCtx, createSqsEvent } from 'src/testing/handler';
5
+ import { logger } from 'src/testing/logging';
6
+ import { scoringService, sns } from 'src/testing/services';
7
+ import { chance, mockJobPublishedEvent } from 'src/testing/types';
8
+
9
+ import * as app from './app';
10
+
11
+ describe('app', () => {
12
+ it('exports a handler', () => expect(app).toHaveProperty('handler'));
13
+ });
14
+
15
+ describe('handler', () => {
16
+ const ctx = createCtx();
17
+
18
+ const jobPublished = mockJobPublishedEvent({ entityId: chance.name() });
19
+
20
+ const score = chance.floating({ max: 1, min: 0 });
21
+
22
+ const distribution = jest
23
+ .spyOn(metricsClient, 'distribution')
24
+ .mockReturnValue();
25
+
26
+ beforeAll(logger.spy);
27
+ beforeAll(scoringService.spy);
28
+
29
+ beforeEach(() => {
30
+ scoringService.request.mockResolvedValue(score);
31
+ sns.publish.resolves({ MessageId: chance.guid({ version: 4 }) });
32
+ });
33
+
34
+ afterEach(() => {
35
+ logger.clear();
36
+ distribution.mockClear();
37
+ scoringService.clear();
38
+ sns.clear();
39
+ });
40
+
41
+ it('handles one record', async () => {
42
+ const event = createSqsEvent([JSON.stringify(jobPublished)]);
43
+
44
+ await expect(app.handler(event, ctx)).resolves.toBeUndefined();
45
+
46
+ expect(scoringService.request).toHaveBeenCalledTimes(1);
47
+
48
+ expect(logger.error).not.toHaveBeenCalled();
49
+
50
+ expect(logger.debug.mock.calls).toEqual([
51
+ [{ count: 1 }, 'Received jobs'],
52
+ [{ snsMessageId: expect.any(String) }, 'Scored job'],
53
+ ['Function succeeded'],
54
+ ]);
55
+
56
+ expect(distribution.mock.calls).toEqual([
57
+ ['job.received', 1],
58
+ ['job.scored', 1],
59
+ ]);
60
+
61
+ expect(sns.client).toReceiveCommandTimes(PublishCommand, 1);
62
+ });
63
+
64
+ it('throws on invalid input', () => {
65
+ const event = createSqsEvent(['}']);
66
+
67
+ return expect(app.handler(event, ctx)).rejects.toThrow('Function failed');
68
+ });
69
+
70
+ it('bubbles up scoring service error', async () => {
71
+ const err = Error(chance.sentence());
72
+
73
+ scoringService.request.mockRejectedValue(err);
74
+
75
+ const event = createSqsEvent([JSON.stringify(jobPublished)]);
76
+
77
+ await expect(app.handler(event, ctx)).rejects.toThrow('Function failed');
78
+
79
+ expect(logger.error).toHaveBeenCalledWith({ err }, 'Function failed');
80
+ });
81
+
82
+ it('bubbles up SNS error', async () => {
83
+ const err = Error(chance.sentence());
84
+
85
+ sns.publish.rejects(err);
86
+
87
+ const event = createSqsEvent([JSON.stringify(jobPublished)]);
88
+
89
+ await expect(app.handler(event, ctx)).rejects.toThrow('Function failed');
90
+
91
+ expect(logger.error).toHaveBeenCalledWith({ err }, 'Function failed');
92
+ });
93
+
94
+ it('throws on zero records', async () => {
95
+ const err = new Error('Received 0 records');
96
+
97
+ const event = createSqsEvent([]);
98
+
99
+ await expect(app.handler(event, ctx)).rejects.toThrow('Function failed');
100
+
101
+ expect(logger.error).toHaveBeenCalledWith({ err }, 'Function failed');
102
+ });
103
+
104
+ it('throws on multiple records', async () => {
105
+ const err = new Error('Received 2 records');
106
+
107
+ const event = createSqsEvent([
108
+ JSON.stringify(jobPublished),
109
+ JSON.stringify(jobPublished),
110
+ ]);
111
+
112
+ await expect(app.handler(event, ctx)).rejects.toThrow('Function failed');
113
+
114
+ expect(logger.error).toHaveBeenCalledWith({ err }, 'Function failed');
115
+ });
116
+ });
@@ -1,35 +1,57 @@
1
- import createLogger from '@seek/logger';
2
- import type { SQSEvent, SQSHandler } from 'aws-lambda';
1
+ import 'skuba-dive/register';
3
2
 
4
- import { config } from './config';
3
+ import type { SQSEvent } from 'aws-lambda';
5
4
 
6
- export const logger = createLogger({
7
- base: {
8
- environment: config.environment,
9
- version: config.version,
10
- },
11
-
12
- level: config.logLevel,
13
-
14
- name: config.name,
15
-
16
- transport:
17
- config.environment === 'local' ? { target: 'pino-pretty' } : undefined,
18
- });
5
+ import { createHandler } from 'src/framework/handler';
6
+ import { logger } from 'src/framework/logging';
7
+ import { metricsClient } from 'src/framework/metrics';
8
+ import { validateJson } from 'src/framework/validation';
9
+ import { scoreJobPublishedEvent, scoringService } from 'src/services/jobScorer';
10
+ import { sendPipelineEvent } from 'src/services/pipelineEventSender';
11
+ import { JobPublishedEventSchema } from 'src/types/pipelineEvents';
19
12
 
20
13
  /**
21
14
  * Tests connectivity to ensure appropriate access and network configuration.
22
15
  */
23
- const smokeTest = async () => Promise.resolve();
16
+ const smokeTest = async () => {
17
+ await Promise.all([scoringService.smokeTest(), sendPipelineEvent({}, true)]);
18
+ };
24
19
 
25
- export const handler: SQSHandler = (event: SQSEvent) => {
20
+ export const handler = createHandler<SQSEvent>(async (event) => {
26
21
  // Treat an empty object as our smoke test event.
27
22
  if (!Object.keys(event).length) {
28
23
  logger.debug('Received smoke test request');
29
24
  return smokeTest();
30
25
  }
31
26
 
32
- logger.info('Hello World!');
27
+ const count = event.Records.length;
33
28
 
34
- return;
35
- };
29
+ if (count !== 1) {
30
+ throw Error(`Received ${count} records`);
31
+ }
32
+
33
+ logger.debug({ count }, 'Received jobs');
34
+
35
+ metricsClient.distribution('job.received', event.Records.length);
36
+
37
+ const record = event.Records[0];
38
+ if (!record) {
39
+ throw new Error('Malformed SQS event with no records');
40
+ }
41
+
42
+ const { body } = record;
43
+
44
+ // TODO: this throws an error, which will cause the Lambda function to retry
45
+ // the event and eventually send it to your dead-letter queue. If you don't
46
+ // trust your source to provide consistently well-formed input, consider
47
+ // catching and handling this error in code.
48
+ const publishedJob = validateJson(body, JobPublishedEventSchema);
49
+
50
+ const scoredJob = await scoreJobPublishedEvent(publishedJob);
51
+
52
+ const snsMessageId = await sendPipelineEvent(scoredJob);
53
+
54
+ logger.debug({ snsMessageId }, 'Scored job');
55
+
56
+ metricsClient.distribution('job.scored', 1);
57
+ });
@@ -4,8 +4,11 @@ interface Config {
4
4
  environment: Environment;
5
5
 
6
6
  logLevel: string;
7
+ metrics: boolean;
7
8
  name: string;
8
9
  version: string;
10
+
11
+ destinationSnsTopicArn: string;
9
12
  }
10
13
 
11
14
  type Environment = (typeof environments)[number];
@@ -18,26 +21,38 @@ const environment = Env.oneOf(environments)('ENVIRONMENT');
18
21
  const configs: Record<Environment, () => Omit<Config, 'environment'>> = {
19
22
  local: () => ({
20
23
  logLevel: 'debug',
24
+ metrics: false,
21
25
  name: '<%- serviceName %>',
22
26
  version: 'local',
27
+
28
+ destinationSnsTopicArn: 'arn:aws:sns:us-east-2:123456789012:destination',
23
29
  }),
24
30
 
25
31
  test: () => ({
26
32
  logLevel: Env.string('LOG_LEVEL', { default: 'silent' }),
33
+ metrics: false,
27
34
  name: '<%- serviceName %>',
28
35
  version: 'test',
36
+
37
+ destinationSnsTopicArn: 'arn:aws:sns:us-east-2:123456789012:destination',
29
38
  }),
30
39
 
31
40
  dev: () => ({
32
41
  logLevel: 'debug',
42
+ metrics: true,
33
43
  name: Env.string('SERVICE'),
34
44
  version: Env.string('VERSION'),
45
+
46
+ destinationSnsTopicArn: Env.string('DESTINATION_SNS_TOPIC_ARN'),
35
47
  }),
36
48
 
37
49
  prod: () => ({
38
50
  logLevel: 'info',
51
+ metrics: true,
39
52
  name: Env.string('SERVICE'),
40
53
  version: Env.string('VERSION'),
54
+
55
+ destinationSnsTopicArn: Env.string('DESTINATION_SNS_TOPIC_ARN'),
41
56
  }),
42
57
  };
43
58
 
@@ -0,0 +1,61 @@
1
+ import { createCtx } from 'src/testing/handler';
2
+ import { logger } from 'src/testing/logging';
3
+ import { chance } from 'src/testing/types';
4
+
5
+ import { createHandler } from './handler';
6
+
7
+ describe('createHandler', () => {
8
+ const ctx = createCtx();
9
+ const input = chance.paragraph();
10
+
11
+ beforeAll(logger.spy);
12
+
13
+ afterEach(logger.clear);
14
+
15
+ it('handles happy path', async () => {
16
+ const output = chance.paragraph();
17
+
18
+ const handler = createHandler((event) => {
19
+ expect(event).toBe(input);
20
+
21
+ logger.debug('Handler invoked');
22
+
23
+ return Promise.resolve(output);
24
+ });
25
+
26
+ await expect(handler(input, ctx)).resolves.toBe(output);
27
+
28
+ expect(logger.error).not.toHaveBeenCalled();
29
+
30
+ expect(logger.debug.mock.calls).toEqual([
31
+ ['Handler invoked'],
32
+ ['Function succeeded'],
33
+ ]);
34
+ });
35
+
36
+ it('handles async error', async () => {
37
+ const err = Error(chance.sentence());
38
+
39
+ const handler = createHandler(() => Promise.reject(err));
40
+
41
+ await expect(handler(input, ctx)).rejects.toThrow('Function failed');
42
+
43
+ expect(logger.error).toHaveBeenCalledWith({ err }, 'Function failed');
44
+
45
+ expect(logger.debug).not.toHaveBeenCalled();
46
+ });
47
+
48
+ it('handles sync error', async () => {
49
+ const err = Error(chance.sentence());
50
+
51
+ const handler = createHandler(() => {
52
+ throw err;
53
+ });
54
+
55
+ await expect(handler(input, ctx)).rejects.toThrow('Function failed');
56
+
57
+ expect(logger.error).toHaveBeenCalledWith({ err }, 'Function failed');
58
+
59
+ expect(logger.debug).not.toHaveBeenCalled();
60
+ });
61
+ });
@@ -0,0 +1,43 @@
1
+ import { datadog } from 'datadog-lambda-js';
2
+
3
+ import { config } from 'src/config';
4
+ import { logger, loggerContext } from 'src/framework/logging';
5
+
6
+ interface LambdaContext {
7
+ awsRequestId: string;
8
+ }
9
+
10
+ type Handler<Event, Output> = (
11
+ event: Event,
12
+ ctx: LambdaContext,
13
+ ) => Promise<Output>;
14
+
15
+ /**
16
+ * Conditionally applies the Datadog wrapper to a Lambda handler.
17
+ *
18
+ * This also "fixes" its broken type definitions.
19
+ */
20
+ const withDatadog = <Event, Output = unknown>(
21
+ fn: Handler<Event, Output>,
22
+ ): Handler<Event, Output> =>
23
+ // istanbul ignore next
24
+ config.metrics ? (datadog(fn) as Handler<Event, Output>) : fn;
25
+
26
+ export const createHandler = <Event, Output = unknown>(
27
+ fn: (event: Event) => Promise<Output>,
28
+ ) =>
29
+ withDatadog<Event>((event, { awsRequestId }) =>
30
+ loggerContext.run({ awsRequestId }, async () => {
31
+ try {
32
+ const output = await fn(event);
33
+
34
+ logger.debug('Function succeeded');
35
+
36
+ return output;
37
+ } catch (err) {
38
+ logger.error({ err }, 'Function failed');
39
+
40
+ throw new Error('Function failed');
41
+ }
42
+ }),
43
+ );
@@ -0,0 +1,27 @@
1
+ import { AsyncLocalStorage } from 'async_hooks';
2
+
3
+ import createLogger from '@seek/logger';
4
+
5
+ import { config } from 'src/config';
6
+
7
+ interface LoggerContext {
8
+ awsRequestId: string;
9
+ }
10
+
11
+ export const loggerContext = new AsyncLocalStorage<LoggerContext>();
12
+
13
+ export const logger = createLogger({
14
+ base: {
15
+ environment: config.environment,
16
+ version: config.version,
17
+ },
18
+
19
+ level: config.logLevel,
20
+
21
+ mixin: () => ({ ...loggerContext.getStore() }),
22
+
23
+ name: config.name,
24
+
25
+ transport:
26
+ config.environment === 'local' ? { target: 'pino-pretty' } : undefined,
27
+ });
@@ -0,0 +1,14 @@
1
+ import { sendDistributionMetric } from 'datadog-lambda-js';
2
+
3
+ import { config } from 'src/config';
4
+
5
+ const prefix = `${config.name}.`;
6
+
7
+ export const metricsClient = {
8
+ distribution: (
9
+ ...[name, ...rest]: Parameters<typeof sendDistributionMetric>
10
+ ) =>
11
+ config.metrics
12
+ ? sendDistributionMetric(`${prefix}${name}`, ...rest)
13
+ : undefined,
14
+ };
@@ -0,0 +1,84 @@
1
+ import {
2
+ IdDescriptionSchema,
3
+ chance,
4
+ mockIdDescription,
5
+ } from 'src/testing/types';
6
+
7
+ import { validateJson } from './validation';
8
+
9
+ describe('validateJson', () => {
10
+ const idDescription = mockIdDescription();
11
+
12
+ it('permits valid input', () => {
13
+ const input = JSON.stringify(idDescription);
14
+
15
+ expect(validateJson(input, IdDescriptionSchema)).toStrictEqual(
16
+ idDescription,
17
+ );
18
+ });
19
+
20
+ it('filters additional properties', () => {
21
+ const input = JSON.stringify({ ...idDescription, hacker: chance.name() });
22
+
23
+ expect(validateJson(input, IdDescriptionSchema)).toStrictEqual(
24
+ idDescription,
25
+ );
26
+ });
27
+
28
+ it('blocks mistyped prop', () => {
29
+ const input = JSON.stringify({ ...idDescription, id: null });
30
+
31
+ expect(() => validateJson(input, IdDescriptionSchema))
32
+ .toThrowErrorMatchingInlineSnapshot(`
33
+ "[
34
+ {
35
+ "code": "invalid_type",
36
+ "expected": "string",
37
+ "received": "null",
38
+ "path": [
39
+ "id"
40
+ ],
41
+ "message": "Expected string, received null"
42
+ }
43
+ ]"
44
+ `);
45
+ });
46
+
47
+ it('blocks missing prop', () => {
48
+ const input = '{}';
49
+
50
+ expect(() => validateJson(input, IdDescriptionSchema))
51
+ .toThrowErrorMatchingInlineSnapshot(`
52
+ "[
53
+ {
54
+ "code": "invalid_type",
55
+ "expected": "string",
56
+ "received": "undefined",
57
+ "path": [
58
+ "id"
59
+ ],
60
+ "message": "Required"
61
+ },
62
+ {
63
+ "code": "invalid_type",
64
+ "expected": "string",
65
+ "received": "undefined",
66
+ "path": [
67
+ "description"
68
+ ],
69
+ "message": "Required"
70
+ }
71
+ ]"
72
+ `);
73
+ });
74
+
75
+ it('blocks invalid JSON', () => {
76
+ const input = '}';
77
+
78
+ expect(() =>
79
+ validateJson(input, IdDescriptionSchema),
80
+ ).toThrowErrorMatchingInlineSnapshot(
81
+ `"Unexpected token '}', "}" is not valid JSON"`,
82
+ );
83
+ });
84
+ });
@@ -0,0 +1,10 @@
1
+ import type { z } from 'zod';
2
+
3
+ export const validateJson = <
4
+ Output,
5
+ Def extends z.ZodTypeDef = z.ZodTypeDef,
6
+ Input = Output,
7
+ >(
8
+ input: string,
9
+ schema: z.ZodSchema<Output, Def, Input>,
10
+ ): Output => schema.parse(JSON.parse(input));
@@ -0,0 +1,22 @@
1
+ import type { JobScorerInput, JobScorerOutput } from 'src/types/jobScorer';
2
+ import type {
3
+ JobPublishedEvent,
4
+ JobScoredEvent,
5
+ } from 'src/types/pipelineEvents';
6
+
7
+ export const jobPublishedEventToScorerInput = (
8
+ record: JobPublishedEvent,
9
+ ): JobScorerInput => ({
10
+ details: record.data.details,
11
+ id: record.entityId,
12
+ });
13
+
14
+ export const jobScorerOutputToScoredEvent = (
15
+ output: JobScorerOutput,
16
+ ): JobScoredEvent => ({
17
+ data: {
18
+ score: output.score,
19
+ },
20
+ entityId: output.id,
21
+ eventType: 'JobScored',
22
+ });