skuba 9.0.0 → 9.0.1-upgrade-cdk-template-20241002233314
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/template/greeter/package.json +1 -1
- package/template/lambda-sqs-worker/serverless.yml +1 -1
- package/template/lambda-sqs-worker-cdk/.env +1 -0
- package/template/lambda-sqs-worker-cdk/README.md +145 -0
- package/template/lambda-sqs-worker-cdk/infra/__snapshots__/appStack.test.ts.snap +102 -134
- package/template/lambda-sqs-worker-cdk/infra/appStack.test.ts +13 -2
- package/template/lambda-sqs-worker-cdk/infra/appStack.ts +52 -6
- package/template/lambda-sqs-worker-cdk/infra/config.ts +3 -0
- package/template/lambda-sqs-worker-cdk/package.json +8 -1
- package/template/lambda-sqs-worker-cdk/src/app.test.ts +116 -0
- package/template/lambda-sqs-worker-cdk/src/app.ts +43 -21
- package/template/lambda-sqs-worker-cdk/src/config.ts +15 -0
- package/template/lambda-sqs-worker-cdk/src/framework/handler.test.ts +61 -0
- package/template/lambda-sqs-worker-cdk/src/framework/handler.ts +43 -0
- package/template/lambda-sqs-worker-cdk/src/framework/logging.ts +27 -0
- package/template/lambda-sqs-worker-cdk/src/framework/metrics.ts +14 -0
- package/template/lambda-sqs-worker-cdk/src/framework/validation.test.ts +84 -0
- package/template/lambda-sqs-worker-cdk/src/framework/validation.ts +10 -0
- package/template/lambda-sqs-worker-cdk/src/mapping/jobScorer.ts +22 -0
- package/template/lambda-sqs-worker-cdk/src/services/aws.ts +5 -0
- package/template/lambda-sqs-worker-cdk/src/services/jobScorer.test.ts +44 -0
- package/template/lambda-sqs-worker-cdk/src/services/jobScorer.ts +59 -0
- package/template/lambda-sqs-worker-cdk/src/services/pipelineEventSender.test.ts +40 -0
- package/template/lambda-sqs-worker-cdk/src/services/pipelineEventSender.ts +33 -0
- package/template/lambda-sqs-worker-cdk/src/testing/handler.ts +13 -0
- package/template/lambda-sqs-worker-cdk/src/testing/logging.ts +19 -0
- package/template/lambda-sqs-worker-cdk/src/testing/services.ts +28 -0
- package/template/lambda-sqs-worker-cdk/src/testing/types.ts +33 -0
- package/template/lambda-sqs-worker-cdk/src/types/jobScorer.ts +15 -0
- package/template/lambda-sqs-worker-cdk/src/types/pipelineEvents.ts +21 -0
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
import { PublishCommand } from '@aws-sdk/client-sns';
|
|
2
|
+
|
|
3
|
+
import { metricsClient } from 'src/framework/metrics';
|
|
4
|
+
import { createCtx, createSqsEvent } from 'src/testing/handler';
|
|
5
|
+
import { logger } from 'src/testing/logging';
|
|
6
|
+
import { scoringService, sns } from 'src/testing/services';
|
|
7
|
+
import { chance, mockJobPublishedEvent } from 'src/testing/types';
|
|
8
|
+
|
|
9
|
+
import * as app from './app';
|
|
10
|
+
|
|
11
|
+
describe('app', () => {
|
|
12
|
+
it('exports a handler', () => expect(app).toHaveProperty('handler'));
|
|
13
|
+
});
|
|
14
|
+
|
|
15
|
+
describe('handler', () => {
|
|
16
|
+
const ctx = createCtx();
|
|
17
|
+
|
|
18
|
+
const jobPublished = mockJobPublishedEvent({ entityId: chance.name() });
|
|
19
|
+
|
|
20
|
+
const score = chance.floating({ max: 1, min: 0 });
|
|
21
|
+
|
|
22
|
+
const distribution = jest
|
|
23
|
+
.spyOn(metricsClient, 'distribution')
|
|
24
|
+
.mockReturnValue();
|
|
25
|
+
|
|
26
|
+
beforeAll(logger.spy);
|
|
27
|
+
beforeAll(scoringService.spy);
|
|
28
|
+
|
|
29
|
+
beforeEach(() => {
|
|
30
|
+
scoringService.request.mockResolvedValue(score);
|
|
31
|
+
sns.publish.resolves({ MessageId: chance.guid({ version: 4 }) });
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
afterEach(() => {
|
|
35
|
+
logger.clear();
|
|
36
|
+
distribution.mockClear();
|
|
37
|
+
scoringService.clear();
|
|
38
|
+
sns.clear();
|
|
39
|
+
});
|
|
40
|
+
|
|
41
|
+
it('handles one record', async () => {
|
|
42
|
+
const event = createSqsEvent([JSON.stringify(jobPublished)]);
|
|
43
|
+
|
|
44
|
+
await expect(app.handler(event, ctx)).resolves.toBeUndefined();
|
|
45
|
+
|
|
46
|
+
expect(scoringService.request).toHaveBeenCalledTimes(1);
|
|
47
|
+
|
|
48
|
+
expect(logger.error).not.toHaveBeenCalled();
|
|
49
|
+
|
|
50
|
+
expect(logger.debug.mock.calls).toEqual([
|
|
51
|
+
[{ count: 1 }, 'Received jobs'],
|
|
52
|
+
[{ snsMessageId: expect.any(String) }, 'Scored job'],
|
|
53
|
+
['Function succeeded'],
|
|
54
|
+
]);
|
|
55
|
+
|
|
56
|
+
expect(distribution.mock.calls).toEqual([
|
|
57
|
+
['job.received', 1],
|
|
58
|
+
['job.scored', 1],
|
|
59
|
+
]);
|
|
60
|
+
|
|
61
|
+
expect(sns.client).toReceiveCommandTimes(PublishCommand, 1);
|
|
62
|
+
});
|
|
63
|
+
|
|
64
|
+
it('throws on invalid input', () => {
|
|
65
|
+
const event = createSqsEvent(['}']);
|
|
66
|
+
|
|
67
|
+
return expect(app.handler(event, ctx)).rejects.toThrow('Function failed');
|
|
68
|
+
});
|
|
69
|
+
|
|
70
|
+
it('bubbles up scoring service error', async () => {
|
|
71
|
+
const err = Error(chance.sentence());
|
|
72
|
+
|
|
73
|
+
scoringService.request.mockRejectedValue(err);
|
|
74
|
+
|
|
75
|
+
const event = createSqsEvent([JSON.stringify(jobPublished)]);
|
|
76
|
+
|
|
77
|
+
await expect(app.handler(event, ctx)).rejects.toThrow('Function failed');
|
|
78
|
+
|
|
79
|
+
expect(logger.error).toHaveBeenCalledWith({ err }, 'Function failed');
|
|
80
|
+
});
|
|
81
|
+
|
|
82
|
+
it('bubbles up SNS error', async () => {
|
|
83
|
+
const err = Error(chance.sentence());
|
|
84
|
+
|
|
85
|
+
sns.publish.rejects(err);
|
|
86
|
+
|
|
87
|
+
const event = createSqsEvent([JSON.stringify(jobPublished)]);
|
|
88
|
+
|
|
89
|
+
await expect(app.handler(event, ctx)).rejects.toThrow('Function failed');
|
|
90
|
+
|
|
91
|
+
expect(logger.error).toHaveBeenCalledWith({ err }, 'Function failed');
|
|
92
|
+
});
|
|
93
|
+
|
|
94
|
+
it('throws on zero records', async () => {
|
|
95
|
+
const err = new Error('Received 0 records');
|
|
96
|
+
|
|
97
|
+
const event = createSqsEvent([]);
|
|
98
|
+
|
|
99
|
+
await expect(app.handler(event, ctx)).rejects.toThrow('Function failed');
|
|
100
|
+
|
|
101
|
+
expect(logger.error).toHaveBeenCalledWith({ err }, 'Function failed');
|
|
102
|
+
});
|
|
103
|
+
|
|
104
|
+
it('throws on multiple records', async () => {
|
|
105
|
+
const err = new Error('Received 2 records');
|
|
106
|
+
|
|
107
|
+
const event = createSqsEvent([
|
|
108
|
+
JSON.stringify(jobPublished),
|
|
109
|
+
JSON.stringify(jobPublished),
|
|
110
|
+
]);
|
|
111
|
+
|
|
112
|
+
await expect(app.handler(event, ctx)).rejects.toThrow('Function failed');
|
|
113
|
+
|
|
114
|
+
expect(logger.error).toHaveBeenCalledWith({ err }, 'Function failed');
|
|
115
|
+
});
|
|
116
|
+
});
|
|
@@ -1,35 +1,57 @@
|
|
|
1
|
-
import
|
|
2
|
-
import type { SQSEvent, SQSHandler } from 'aws-lambda';
|
|
1
|
+
import 'skuba-dive/register';
|
|
3
2
|
|
|
4
|
-
import {
|
|
3
|
+
import type { SQSEvent } from 'aws-lambda';
|
|
5
4
|
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
name: config.name,
|
|
15
|
-
|
|
16
|
-
transport:
|
|
17
|
-
config.environment === 'local' ? { target: 'pino-pretty' } : undefined,
|
|
18
|
-
});
|
|
5
|
+
import { createHandler } from 'src/framework/handler';
|
|
6
|
+
import { logger } from 'src/framework/logging';
|
|
7
|
+
import { metricsClient } from 'src/framework/metrics';
|
|
8
|
+
import { validateJson } from 'src/framework/validation';
|
|
9
|
+
import { scoreJobPublishedEvent, scoringService } from 'src/services/jobScorer';
|
|
10
|
+
import { sendPipelineEvent } from 'src/services/pipelineEventSender';
|
|
11
|
+
import { JobPublishedEventSchema } from 'src/types/pipelineEvents';
|
|
19
12
|
|
|
20
13
|
/**
|
|
21
14
|
* Tests connectivity to ensure appropriate access and network configuration.
|
|
22
15
|
*/
|
|
23
|
-
const smokeTest = async () =>
|
|
16
|
+
const smokeTest = async () => {
|
|
17
|
+
await Promise.all([scoringService.smokeTest(), sendPipelineEvent({}, true)]);
|
|
18
|
+
};
|
|
24
19
|
|
|
25
|
-
export const handler
|
|
20
|
+
export const handler = createHandler<SQSEvent>(async (event) => {
|
|
26
21
|
// Treat an empty object as our smoke test event.
|
|
27
22
|
if (!Object.keys(event).length) {
|
|
28
23
|
logger.debug('Received smoke test request');
|
|
29
24
|
return smokeTest();
|
|
30
25
|
}
|
|
31
26
|
|
|
32
|
-
|
|
27
|
+
const count = event.Records.length;
|
|
33
28
|
|
|
34
|
-
|
|
35
|
-
};
|
|
29
|
+
if (count !== 1) {
|
|
30
|
+
throw Error(`Received ${count} records`);
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
logger.debug({ count }, 'Received jobs');
|
|
34
|
+
|
|
35
|
+
metricsClient.distribution('job.received', event.Records.length);
|
|
36
|
+
|
|
37
|
+
const record = event.Records[0];
|
|
38
|
+
if (!record) {
|
|
39
|
+
throw new Error('Malformed SQS event with no records');
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
const { body } = record;
|
|
43
|
+
|
|
44
|
+
// TODO: this throws an error, which will cause the Lambda function to retry
|
|
45
|
+
// the event and eventually send it to your dead-letter queue. If you don't
|
|
46
|
+
// trust your source to provide consistently well-formed input, consider
|
|
47
|
+
// catching and handling this error in code.
|
|
48
|
+
const publishedJob = validateJson(body, JobPublishedEventSchema);
|
|
49
|
+
|
|
50
|
+
const scoredJob = await scoreJobPublishedEvent(publishedJob);
|
|
51
|
+
|
|
52
|
+
const snsMessageId = await sendPipelineEvent(scoredJob);
|
|
53
|
+
|
|
54
|
+
logger.debug({ snsMessageId }, 'Scored job');
|
|
55
|
+
|
|
56
|
+
metricsClient.distribution('job.scored', 1);
|
|
57
|
+
});
|
|
@@ -4,8 +4,11 @@ interface Config {
|
|
|
4
4
|
environment: Environment;
|
|
5
5
|
|
|
6
6
|
logLevel: string;
|
|
7
|
+
metrics: boolean;
|
|
7
8
|
name: string;
|
|
8
9
|
version: string;
|
|
10
|
+
|
|
11
|
+
destinationSnsTopicArn: string;
|
|
9
12
|
}
|
|
10
13
|
|
|
11
14
|
type Environment = (typeof environments)[number];
|
|
@@ -18,26 +21,38 @@ const environment = Env.oneOf(environments)('ENVIRONMENT');
|
|
|
18
21
|
const configs: Record<Environment, () => Omit<Config, 'environment'>> = {
|
|
19
22
|
local: () => ({
|
|
20
23
|
logLevel: 'debug',
|
|
24
|
+
metrics: false,
|
|
21
25
|
name: '<%- serviceName %>',
|
|
22
26
|
version: 'local',
|
|
27
|
+
|
|
28
|
+
destinationSnsTopicArn: 'arn:aws:sns:us-east-2:123456789012:destination',
|
|
23
29
|
}),
|
|
24
30
|
|
|
25
31
|
test: () => ({
|
|
26
32
|
logLevel: Env.string('LOG_LEVEL', { default: 'silent' }),
|
|
33
|
+
metrics: false,
|
|
27
34
|
name: '<%- serviceName %>',
|
|
28
35
|
version: 'test',
|
|
36
|
+
|
|
37
|
+
destinationSnsTopicArn: 'arn:aws:sns:us-east-2:123456789012:destination',
|
|
29
38
|
}),
|
|
30
39
|
|
|
31
40
|
dev: () => ({
|
|
32
41
|
logLevel: 'debug',
|
|
42
|
+
metrics: true,
|
|
33
43
|
name: Env.string('SERVICE'),
|
|
34
44
|
version: Env.string('VERSION'),
|
|
45
|
+
|
|
46
|
+
destinationSnsTopicArn: Env.string('DESTINATION_SNS_TOPIC_ARN'),
|
|
35
47
|
}),
|
|
36
48
|
|
|
37
49
|
prod: () => ({
|
|
38
50
|
logLevel: 'info',
|
|
51
|
+
metrics: true,
|
|
39
52
|
name: Env.string('SERVICE'),
|
|
40
53
|
version: Env.string('VERSION'),
|
|
54
|
+
|
|
55
|
+
destinationSnsTopicArn: Env.string('DESTINATION_SNS_TOPIC_ARN'),
|
|
41
56
|
}),
|
|
42
57
|
};
|
|
43
58
|
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
import { createCtx } from 'src/testing/handler';
|
|
2
|
+
import { logger } from 'src/testing/logging';
|
|
3
|
+
import { chance } from 'src/testing/types';
|
|
4
|
+
|
|
5
|
+
import { createHandler } from './handler';
|
|
6
|
+
|
|
7
|
+
describe('createHandler', () => {
|
|
8
|
+
const ctx = createCtx();
|
|
9
|
+
const input = chance.paragraph();
|
|
10
|
+
|
|
11
|
+
beforeAll(logger.spy);
|
|
12
|
+
|
|
13
|
+
afterEach(logger.clear);
|
|
14
|
+
|
|
15
|
+
it('handles happy path', async () => {
|
|
16
|
+
const output = chance.paragraph();
|
|
17
|
+
|
|
18
|
+
const handler = createHandler((event) => {
|
|
19
|
+
expect(event).toBe(input);
|
|
20
|
+
|
|
21
|
+
logger.debug('Handler invoked');
|
|
22
|
+
|
|
23
|
+
return Promise.resolve(output);
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
await expect(handler(input, ctx)).resolves.toBe(output);
|
|
27
|
+
|
|
28
|
+
expect(logger.error).not.toHaveBeenCalled();
|
|
29
|
+
|
|
30
|
+
expect(logger.debug.mock.calls).toEqual([
|
|
31
|
+
['Handler invoked'],
|
|
32
|
+
['Function succeeded'],
|
|
33
|
+
]);
|
|
34
|
+
});
|
|
35
|
+
|
|
36
|
+
it('handles async error', async () => {
|
|
37
|
+
const err = Error(chance.sentence());
|
|
38
|
+
|
|
39
|
+
const handler = createHandler(() => Promise.reject(err));
|
|
40
|
+
|
|
41
|
+
await expect(handler(input, ctx)).rejects.toThrow('Function failed');
|
|
42
|
+
|
|
43
|
+
expect(logger.error).toHaveBeenCalledWith({ err }, 'Function failed');
|
|
44
|
+
|
|
45
|
+
expect(logger.debug).not.toHaveBeenCalled();
|
|
46
|
+
});
|
|
47
|
+
|
|
48
|
+
it('handles sync error', async () => {
|
|
49
|
+
const err = Error(chance.sentence());
|
|
50
|
+
|
|
51
|
+
const handler = createHandler(() => {
|
|
52
|
+
throw err;
|
|
53
|
+
});
|
|
54
|
+
|
|
55
|
+
await expect(handler(input, ctx)).rejects.toThrow('Function failed');
|
|
56
|
+
|
|
57
|
+
expect(logger.error).toHaveBeenCalledWith({ err }, 'Function failed');
|
|
58
|
+
|
|
59
|
+
expect(logger.debug).not.toHaveBeenCalled();
|
|
60
|
+
});
|
|
61
|
+
});
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
import { datadog } from 'datadog-lambda-js';
|
|
2
|
+
|
|
3
|
+
import { config } from 'src/config';
|
|
4
|
+
import { logger, loggerContext } from 'src/framework/logging';
|
|
5
|
+
|
|
6
|
+
interface LambdaContext {
|
|
7
|
+
awsRequestId: string;
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
type Handler<Event, Output> = (
|
|
11
|
+
event: Event,
|
|
12
|
+
ctx: LambdaContext,
|
|
13
|
+
) => Promise<Output>;
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Conditionally applies the Datadog wrapper to a Lambda handler.
|
|
17
|
+
*
|
|
18
|
+
* This also "fixes" its broken type definitions.
|
|
19
|
+
*/
|
|
20
|
+
const withDatadog = <Event, Output = unknown>(
|
|
21
|
+
fn: Handler<Event, Output>,
|
|
22
|
+
): Handler<Event, Output> =>
|
|
23
|
+
// istanbul ignore next
|
|
24
|
+
config.metrics ? (datadog(fn) as Handler<Event, Output>) : fn;
|
|
25
|
+
|
|
26
|
+
export const createHandler = <Event, Output = unknown>(
|
|
27
|
+
fn: (event: Event) => Promise<Output>,
|
|
28
|
+
) =>
|
|
29
|
+
withDatadog<Event>((event, { awsRequestId }) =>
|
|
30
|
+
loggerContext.run({ awsRequestId }, async () => {
|
|
31
|
+
try {
|
|
32
|
+
const output = await fn(event);
|
|
33
|
+
|
|
34
|
+
logger.debug('Function succeeded');
|
|
35
|
+
|
|
36
|
+
return output;
|
|
37
|
+
} catch (err) {
|
|
38
|
+
logger.error({ err }, 'Function failed');
|
|
39
|
+
|
|
40
|
+
throw new Error('Function failed');
|
|
41
|
+
}
|
|
42
|
+
}),
|
|
43
|
+
);
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import { AsyncLocalStorage } from 'async_hooks';
|
|
2
|
+
|
|
3
|
+
import createLogger from '@seek/logger';
|
|
4
|
+
|
|
5
|
+
import { config } from 'src/config';
|
|
6
|
+
|
|
7
|
+
interface LoggerContext {
|
|
8
|
+
awsRequestId: string;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
export const loggerContext = new AsyncLocalStorage<LoggerContext>();
|
|
12
|
+
|
|
13
|
+
export const logger = createLogger({
|
|
14
|
+
base: {
|
|
15
|
+
environment: config.environment,
|
|
16
|
+
version: config.version,
|
|
17
|
+
},
|
|
18
|
+
|
|
19
|
+
level: config.logLevel,
|
|
20
|
+
|
|
21
|
+
mixin: () => ({ ...loggerContext.getStore() }),
|
|
22
|
+
|
|
23
|
+
name: config.name,
|
|
24
|
+
|
|
25
|
+
transport:
|
|
26
|
+
config.environment === 'local' ? { target: 'pino-pretty' } : undefined,
|
|
27
|
+
});
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { sendDistributionMetric } from 'datadog-lambda-js';
|
|
2
|
+
|
|
3
|
+
import { config } from 'src/config';
|
|
4
|
+
|
|
5
|
+
const prefix = `${config.name}.`;
|
|
6
|
+
|
|
7
|
+
export const metricsClient = {
|
|
8
|
+
distribution: (
|
|
9
|
+
...[name, ...rest]: Parameters<typeof sendDistributionMetric>
|
|
10
|
+
) =>
|
|
11
|
+
config.metrics
|
|
12
|
+
? sendDistributionMetric(`${prefix}${name}`, ...rest)
|
|
13
|
+
: undefined,
|
|
14
|
+
};
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
import {
|
|
2
|
+
IdDescriptionSchema,
|
|
3
|
+
chance,
|
|
4
|
+
mockIdDescription,
|
|
5
|
+
} from 'src/testing/types';
|
|
6
|
+
|
|
7
|
+
import { validateJson } from './validation';
|
|
8
|
+
|
|
9
|
+
describe('validateJson', () => {
|
|
10
|
+
const idDescription = mockIdDescription();
|
|
11
|
+
|
|
12
|
+
it('permits valid input', () => {
|
|
13
|
+
const input = JSON.stringify(idDescription);
|
|
14
|
+
|
|
15
|
+
expect(validateJson(input, IdDescriptionSchema)).toStrictEqual(
|
|
16
|
+
idDescription,
|
|
17
|
+
);
|
|
18
|
+
});
|
|
19
|
+
|
|
20
|
+
it('filters additional properties', () => {
|
|
21
|
+
const input = JSON.stringify({ ...idDescription, hacker: chance.name() });
|
|
22
|
+
|
|
23
|
+
expect(validateJson(input, IdDescriptionSchema)).toStrictEqual(
|
|
24
|
+
idDescription,
|
|
25
|
+
);
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
it('blocks mistyped prop', () => {
|
|
29
|
+
const input = JSON.stringify({ ...idDescription, id: null });
|
|
30
|
+
|
|
31
|
+
expect(() => validateJson(input, IdDescriptionSchema))
|
|
32
|
+
.toThrowErrorMatchingInlineSnapshot(`
|
|
33
|
+
"[
|
|
34
|
+
{
|
|
35
|
+
"code": "invalid_type",
|
|
36
|
+
"expected": "string",
|
|
37
|
+
"received": "null",
|
|
38
|
+
"path": [
|
|
39
|
+
"id"
|
|
40
|
+
],
|
|
41
|
+
"message": "Expected string, received null"
|
|
42
|
+
}
|
|
43
|
+
]"
|
|
44
|
+
`);
|
|
45
|
+
});
|
|
46
|
+
|
|
47
|
+
it('blocks missing prop', () => {
|
|
48
|
+
const input = '{}';
|
|
49
|
+
|
|
50
|
+
expect(() => validateJson(input, IdDescriptionSchema))
|
|
51
|
+
.toThrowErrorMatchingInlineSnapshot(`
|
|
52
|
+
"[
|
|
53
|
+
{
|
|
54
|
+
"code": "invalid_type",
|
|
55
|
+
"expected": "string",
|
|
56
|
+
"received": "undefined",
|
|
57
|
+
"path": [
|
|
58
|
+
"id"
|
|
59
|
+
],
|
|
60
|
+
"message": "Required"
|
|
61
|
+
},
|
|
62
|
+
{
|
|
63
|
+
"code": "invalid_type",
|
|
64
|
+
"expected": "string",
|
|
65
|
+
"received": "undefined",
|
|
66
|
+
"path": [
|
|
67
|
+
"description"
|
|
68
|
+
],
|
|
69
|
+
"message": "Required"
|
|
70
|
+
}
|
|
71
|
+
]"
|
|
72
|
+
`);
|
|
73
|
+
});
|
|
74
|
+
|
|
75
|
+
it('blocks invalid JSON', () => {
|
|
76
|
+
const input = '}';
|
|
77
|
+
|
|
78
|
+
expect(() =>
|
|
79
|
+
validateJson(input, IdDescriptionSchema),
|
|
80
|
+
).toThrowErrorMatchingInlineSnapshot(
|
|
81
|
+
`"Unexpected token '}', "}" is not valid JSON"`,
|
|
82
|
+
);
|
|
83
|
+
});
|
|
84
|
+
});
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import type { JobScorerInput, JobScorerOutput } from 'src/types/jobScorer';
|
|
2
|
+
import type {
|
|
3
|
+
JobPublishedEvent,
|
|
4
|
+
JobScoredEvent,
|
|
5
|
+
} from 'src/types/pipelineEvents';
|
|
6
|
+
|
|
7
|
+
export const jobPublishedEventToScorerInput = (
|
|
8
|
+
record: JobPublishedEvent,
|
|
9
|
+
): JobScorerInput => ({
|
|
10
|
+
details: record.data.details,
|
|
11
|
+
id: record.entityId,
|
|
12
|
+
});
|
|
13
|
+
|
|
14
|
+
export const jobScorerOutputToScoredEvent = (
|
|
15
|
+
output: JobScorerOutput,
|
|
16
|
+
): JobScoredEvent => ({
|
|
17
|
+
data: {
|
|
18
|
+
score: output.score,
|
|
19
|
+
},
|
|
20
|
+
entityId: output.id,
|
|
21
|
+
eventType: 'JobScored',
|
|
22
|
+
});
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
import { scoringService } from 'src/testing/services';
|
|
2
|
+
import { chance, mockJobPublishedEvent } from 'src/testing/types';
|
|
3
|
+
|
|
4
|
+
import * as jobScorer from './jobScorer';
|
|
5
|
+
|
|
6
|
+
describe('scoreJobPublishedEvent', () => {
|
|
7
|
+
beforeAll(scoringService.spy);
|
|
8
|
+
|
|
9
|
+
afterEach(scoringService.clear);
|
|
10
|
+
|
|
11
|
+
it('scores an event', async () => {
|
|
12
|
+
const score = chance.floating({ max: 1, min: 0 });
|
|
13
|
+
|
|
14
|
+
scoringService.request.mockResolvedValue(score);
|
|
15
|
+
|
|
16
|
+
await expect(
|
|
17
|
+
jobScorer.scoreJobPublishedEvent(
|
|
18
|
+
mockJobPublishedEvent({ entityId: '1' }),
|
|
19
|
+
),
|
|
20
|
+
).resolves.toStrictEqual({
|
|
21
|
+
data: {
|
|
22
|
+
score,
|
|
23
|
+
},
|
|
24
|
+
entityId: '1',
|
|
25
|
+
eventType: 'JobScored',
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
expect(scoringService.request).toHaveBeenCalledTimes(1);
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
it('bubbles up scoring service error', async () => {
|
|
32
|
+
const err = Error(chance.sentence());
|
|
33
|
+
|
|
34
|
+
scoringService.request.mockRejectedValue(err);
|
|
35
|
+
|
|
36
|
+
await expect(
|
|
37
|
+
jobScorer.scoreJobPublishedEvent(
|
|
38
|
+
mockJobPublishedEvent({ entityId: '1' }),
|
|
39
|
+
),
|
|
40
|
+
).rejects.toThrow(err);
|
|
41
|
+
|
|
42
|
+
expect(scoringService.request).toHaveBeenCalledTimes(1);
|
|
43
|
+
});
|
|
44
|
+
});
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
import {
|
|
2
|
+
jobPublishedEventToScorerInput,
|
|
3
|
+
jobScorerOutputToScoredEvent,
|
|
4
|
+
} from 'src/mapping/jobScorer';
|
|
5
|
+
import {
|
|
6
|
+
type JobScorerInput,
|
|
7
|
+
type JobScorerOutput,
|
|
8
|
+
JobScorerOutputSchema,
|
|
9
|
+
} from 'src/types/jobScorer';
|
|
10
|
+
import type {
|
|
11
|
+
JobPublishedEvent,
|
|
12
|
+
JobScoredEvent,
|
|
13
|
+
} from 'src/types/pipelineEvents';
|
|
14
|
+
|
|
15
|
+
/* istanbul ignore next: simulation of an external service */
|
|
16
|
+
export const scoringService = {
|
|
17
|
+
request: (details: string): Promise<unknown> => {
|
|
18
|
+
// Networking woes
|
|
19
|
+
if (Math.random() < 0.05) {
|
|
20
|
+
const err = Error('could not reach scoring service');
|
|
21
|
+
|
|
22
|
+
return Promise.reject(err);
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
// Unexpected behaviour on certain inputs
|
|
26
|
+
if (details.length % 100 === 0) {
|
|
27
|
+
return Promise.resolve(null);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
return Promise.resolve(Math.random());
|
|
31
|
+
},
|
|
32
|
+
|
|
33
|
+
smokeTest: async (): Promise<void> => {
|
|
34
|
+
// A connectivity test
|
|
35
|
+
await Promise.resolve();
|
|
36
|
+
},
|
|
37
|
+
};
|
|
38
|
+
|
|
39
|
+
const scoreJob = async ({
|
|
40
|
+
details,
|
|
41
|
+
id,
|
|
42
|
+
}: JobScorerInput): Promise<JobScorerOutput> => {
|
|
43
|
+
const score = await scoringService.request(details);
|
|
44
|
+
|
|
45
|
+
return JobScorerOutputSchema.parse({
|
|
46
|
+
id,
|
|
47
|
+
score,
|
|
48
|
+
});
|
|
49
|
+
};
|
|
50
|
+
|
|
51
|
+
export const scoreJobPublishedEvent = async (
|
|
52
|
+
publishedJob: JobPublishedEvent,
|
|
53
|
+
): Promise<JobScoredEvent> => {
|
|
54
|
+
const scorerInput = jobPublishedEventToScorerInput(publishedJob);
|
|
55
|
+
|
|
56
|
+
const scorerOutput = await scoreJob(scorerInput);
|
|
57
|
+
|
|
58
|
+
return jobScorerOutputToScoredEvent(scorerOutput);
|
|
59
|
+
};
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import { PublishCommand } from '@aws-sdk/client-sns';
|
|
2
|
+
|
|
3
|
+
import { sns } from 'src/testing/services';
|
|
4
|
+
import { chance } from 'src/testing/types';
|
|
5
|
+
|
|
6
|
+
import { sendPipelineEvent } from './pipelineEventSender';
|
|
7
|
+
|
|
8
|
+
describe('sendPipelineEvent', () => {
|
|
9
|
+
afterEach(() => {
|
|
10
|
+
jest.clearAllMocks();
|
|
11
|
+
});
|
|
12
|
+
|
|
13
|
+
it('handles happy path', async () => {
|
|
14
|
+
const messageId = chance.guid({ version: 4 });
|
|
15
|
+
|
|
16
|
+
sns.publish.resolves({ MessageId: messageId });
|
|
17
|
+
|
|
18
|
+
await expect(sendPipelineEvent({})).resolves.toBe(messageId);
|
|
19
|
+
|
|
20
|
+
expect(sns.client).toReceiveCommandTimes(PublishCommand, 1);
|
|
21
|
+
});
|
|
22
|
+
|
|
23
|
+
it('bubbles up SNS error', () => {
|
|
24
|
+
const err = Error(chance.sentence());
|
|
25
|
+
|
|
26
|
+
sns.publish.rejects(err);
|
|
27
|
+
|
|
28
|
+
return expect(sendPipelineEvent({})).rejects.toThrow(err);
|
|
29
|
+
});
|
|
30
|
+
|
|
31
|
+
it('throws on missing message ID', () => {
|
|
32
|
+
sns.publish.resolves({});
|
|
33
|
+
|
|
34
|
+
return expect(
|
|
35
|
+
sendPipelineEvent({}),
|
|
36
|
+
).rejects.toThrowErrorMatchingInlineSnapshot(
|
|
37
|
+
`"SNS did not return a message ID"`,
|
|
38
|
+
);
|
|
39
|
+
});
|
|
40
|
+
});
|