skuba 9.0.1-upgrade-cdk-template-20241002233314 → 9.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/lib/cli/lint/autofix.js +15 -0
  2. package/lib/cli/lint/autofix.js.map +2 -2
  3. package/package.json +1 -1
  4. package/template/express-rest-api/package.json +1 -1
  5. package/template/greeter/package.json +2 -2
  6. package/template/koa-rest-api/package.json +1 -1
  7. package/template/lambda-sqs-worker/package.json +1 -1
  8. package/template/lambda-sqs-worker/serverless.yml +1 -1
  9. package/template/lambda-sqs-worker-cdk/infra/__snapshots__/appStack.test.ts.snap +134 -102
  10. package/template/lambda-sqs-worker-cdk/infra/appStack.test.ts +2 -13
  11. package/template/lambda-sqs-worker-cdk/infra/appStack.ts +6 -52
  12. package/template/lambda-sqs-worker-cdk/infra/config.ts +0 -3
  13. package/template/lambda-sqs-worker-cdk/package.json +2 -9
  14. package/template/lambda-sqs-worker-cdk/src/app.ts +21 -43
  15. package/template/lambda-sqs-worker-cdk/src/config.ts +0 -15
  16. package/template/lambda-sqs-worker-cdk/.env +0 -1
  17. package/template/lambda-sqs-worker-cdk/README.md +0 -145
  18. package/template/lambda-sqs-worker-cdk/src/app.test.ts +0 -116
  19. package/template/lambda-sqs-worker-cdk/src/framework/handler.test.ts +0 -61
  20. package/template/lambda-sqs-worker-cdk/src/framework/handler.ts +0 -43
  21. package/template/lambda-sqs-worker-cdk/src/framework/logging.ts +0 -27
  22. package/template/lambda-sqs-worker-cdk/src/framework/metrics.ts +0 -14
  23. package/template/lambda-sqs-worker-cdk/src/framework/validation.test.ts +0 -84
  24. package/template/lambda-sqs-worker-cdk/src/framework/validation.ts +0 -10
  25. package/template/lambda-sqs-worker-cdk/src/mapping/jobScorer.ts +0 -22
  26. package/template/lambda-sqs-worker-cdk/src/services/aws.ts +0 -5
  27. package/template/lambda-sqs-worker-cdk/src/services/jobScorer.test.ts +0 -44
  28. package/template/lambda-sqs-worker-cdk/src/services/jobScorer.ts +0 -59
  29. package/template/lambda-sqs-worker-cdk/src/services/pipelineEventSender.test.ts +0 -40
  30. package/template/lambda-sqs-worker-cdk/src/services/pipelineEventSender.ts +0 -33
  31. package/template/lambda-sqs-worker-cdk/src/testing/handler.ts +0 -13
  32. package/template/lambda-sqs-worker-cdk/src/testing/logging.ts +0 -19
  33. package/template/lambda-sqs-worker-cdk/src/testing/services.ts +0 -28
  34. package/template/lambda-sqs-worker-cdk/src/testing/types.ts +0 -33
  35. package/template/lambda-sqs-worker-cdk/src/types/jobScorer.ts +0 -15
  36. package/template/lambda-sqs-worker-cdk/src/types/pipelineEvents.ts +0 -21
@@ -1,57 +1,35 @@
1
- import 'skuba-dive/register';
1
+ import createLogger from '@seek/logger';
2
+ import type { SQSEvent, SQSHandler } from 'aws-lambda';
2
3
 
3
- import type { SQSEvent } from 'aws-lambda';
4
+ import { config } from './config';
4
5
 
5
- import { createHandler } from 'src/framework/handler';
6
- import { logger } from 'src/framework/logging';
7
- import { metricsClient } from 'src/framework/metrics';
8
- import { validateJson } from 'src/framework/validation';
9
- import { scoreJobPublishedEvent, scoringService } from 'src/services/jobScorer';
10
- import { sendPipelineEvent } from 'src/services/pipelineEventSender';
11
- import { JobPublishedEventSchema } from 'src/types/pipelineEvents';
6
+ export const logger = createLogger({
7
+ base: {
8
+ environment: config.environment,
9
+ version: config.version,
10
+ },
11
+
12
+ level: config.logLevel,
13
+
14
+ name: config.name,
15
+
16
+ transport:
17
+ config.environment === 'local' ? { target: 'pino-pretty' } : undefined,
18
+ });
12
19
 
13
20
  /**
14
21
  * Tests connectivity to ensure appropriate access and network configuration.
15
22
  */
16
- const smokeTest = async () => {
17
- await Promise.all([scoringService.smokeTest(), sendPipelineEvent({}, true)]);
18
- };
23
+ const smokeTest = async () => Promise.resolve();
19
24
 
20
- export const handler = createHandler<SQSEvent>(async (event) => {
25
+ export const handler: SQSHandler = (event: SQSEvent) => {
21
26
  // Treat an empty object as our smoke test event.
22
27
  if (!Object.keys(event).length) {
23
28
  logger.debug('Received smoke test request');
24
29
  return smokeTest();
25
30
  }
26
31
 
27
- const count = event.Records.length;
28
-
29
- if (count !== 1) {
30
- throw Error(`Received ${count} records`);
31
- }
32
-
33
- logger.debug({ count }, 'Received jobs');
34
-
35
- metricsClient.distribution('job.received', event.Records.length);
36
-
37
- const record = event.Records[0];
38
- if (!record) {
39
- throw new Error('Malformed SQS event with no records');
40
- }
41
-
42
- const { body } = record;
43
-
44
- // TODO: this throws an error, which will cause the Lambda function to retry
45
- // the event and eventually send it to your dead-letter queue. If you don't
46
- // trust your source to provide consistently well-formed input, consider
47
- // catching and handling this error in code.
48
- const publishedJob = validateJson(body, JobPublishedEventSchema);
49
-
50
- const scoredJob = await scoreJobPublishedEvent(publishedJob);
32
+ logger.info('Hello World!');
51
33
 
52
- const snsMessageId = await sendPipelineEvent(scoredJob);
53
-
54
- logger.debug({ snsMessageId }, 'Scored job');
55
-
56
- metricsClient.distribution('job.scored', 1);
57
- });
34
+ return;
35
+ };
@@ -4,11 +4,8 @@ interface Config {
4
4
  environment: Environment;
5
5
 
6
6
  logLevel: string;
7
- metrics: boolean;
8
7
  name: string;
9
8
  version: string;
10
-
11
- destinationSnsTopicArn: string;
12
9
  }
13
10
 
14
11
  type Environment = (typeof environments)[number];
@@ -21,38 +18,26 @@ const environment = Env.oneOf(environments)('ENVIRONMENT');
21
18
  const configs: Record<Environment, () => Omit<Config, 'environment'>> = {
22
19
  local: () => ({
23
20
  logLevel: 'debug',
24
- metrics: false,
25
21
  name: '<%- serviceName %>',
26
22
  version: 'local',
27
-
28
- destinationSnsTopicArn: 'arn:aws:sns:us-east-2:123456789012:destination',
29
23
  }),
30
24
 
31
25
  test: () => ({
32
26
  logLevel: Env.string('LOG_LEVEL', { default: 'silent' }),
33
- metrics: false,
34
27
  name: '<%- serviceName %>',
35
28
  version: 'test',
36
-
37
- destinationSnsTopicArn: 'arn:aws:sns:us-east-2:123456789012:destination',
38
29
  }),
39
30
 
40
31
  dev: () => ({
41
32
  logLevel: 'debug',
42
- metrics: true,
43
33
  name: Env.string('SERVICE'),
44
34
  version: Env.string('VERSION'),
45
-
46
- destinationSnsTopicArn: Env.string('DESTINATION_SNS_TOPIC_ARN'),
47
35
  }),
48
36
 
49
37
  prod: () => ({
50
38
  logLevel: 'info',
51
- metrics: true,
52
39
  name: Env.string('SERVICE'),
53
40
  version: Env.string('VERSION'),
54
-
55
- destinationSnsTopicArn: Env.string('DESTINATION_SNS_TOPIC_ARN'),
56
41
  }),
57
42
  };
58
43
 
@@ -1 +0,0 @@
1
- ENVIRONMENT=local
@@ -1,145 +0,0 @@
1
- # <%- repoName %>
2
-
3
- [![Powered by skuba](https://img.shields.io/badge/🤿%20skuba-powered-009DC4)](https://github.com/seek-oss/skuba)
4
-
5
- Next steps:
6
-
7
- 1. [ ] Finish templating if this was skipped earlier:
8
-
9
- ```shell
10
- pnpm exec skuba configure
11
- ```
12
-
13
- 2. [ ] Create a new repository in the appropriate GitHub organisation.
14
- 3. [ ] Add the repository to BuildAgency;
15
- see our internal [Buildkite Docs] for more information.
16
- 4. [ ] Add Datadog extension, deployment bucket configuration and data classification tags to [infra/config.ts](infra/config.ts).
17
- 5. [ ] Push local commits to the upstream GitHub branch.
18
- 6. [ ] Configure [GitHub repository settings].
19
- 7. [ ] Delete this checklist 😌.
20
-
21
- [Buildkite Docs]: https://backstage.myseek.xyz/docs/default/component/buildkite-docs
22
- [GitHub repository settings]: https://github.com/<%-orgName%>/<%-repoName%>/settings
23
-
24
- ## Design
25
-
26
- <%-repoName %> is a Node.js [Lambda] application built in line with our [Technical Guidelines].
27
- It is backed by a typical SQS message + dead letter queue configuration and uses common SEEK packages.
28
- Workers enable fault-tolerant asynchronous processing of events.
29
-
30
- The `lambda-sqs-worker-cdk` template is modelled after a hypothetical enricher that scores job advertisements.
31
- It's stubbed out with in-memory [scoring service](src/services/jobScorer.ts).
32
- This would be replaced with internal logic or an external service in production.
33
-
34
- This project is deployed with [AWS CDK].
35
- The Lambda runtime provisions a single Node.js process per container.
36
- The supplied [infra/appStack.ts](infra/appStack.ts) starts out with a minimal `memorySize` which may require tuning based on workload.
37
- Under load, we autoscale horizontally in terms of container count up to `reservedConcurrency`.
38
-
39
- [@seek/aws-codedeploy-hooks] configures [CodeDeploy] for a blue-green deployment approach.
40
- A smoke test is run against the new version before traffic is switched over,
41
- providing an opportunity to test access and connectivity to online dependencies.
42
- This defaults to an invocation with an empty object `{}`.
43
-
44
- ## Development
45
-
46
- ### Test
47
-
48
- ```shell
49
- # Run Jest tests locally
50
- pnpm test
51
-
52
- # Authenticate to dev account
53
- awsauth
54
-
55
- # Run smoke test against deployed application
56
- ENVIRONMENT=dev pnpm smoke
57
- ```
58
-
59
- ### Lint
60
-
61
- ```shell
62
- # Fix issues
63
- pnpm format
64
-
65
- # Check for issues
66
- pnpm lint
67
- ```
68
-
69
- ### Start
70
-
71
- ```shell
72
- # Start a local HTTP server
73
- pnpm start
74
-
75
- # Start with Node.js Inspector enabled
76
- pnpm start:debug
77
- ```
78
-
79
- This serves the Lambda application over HTTP.
80
- For example, to invoke the handler with an empty object `{}` for smoke testing:
81
-
82
- ```shell
83
- curl --data '[{}, {"awsRequestId": "local"}]' --include localhost:<%- port %>
84
- ```
85
-
86
- ### Deploy
87
-
88
- This project is deployed through a [Buildkite pipeline](.buildkite/pipeline.yml).
89
-
90
- - Commits to a feature branch can be deployed to the dev environment by unblocking a step in the Buildkite UI
91
- - Commits to the default branch are automatically deployed to the dev and prod environments in sequence
92
-
93
- To deploy locally:
94
-
95
- ```shell
96
- # Authenticate to dev account
97
- awsauth
98
-
99
- ENVIRONMENT=dev pnpm run deploy
100
- ```
101
-
102
- A hotswap deploy enables faster deployment but come with caveats such as requiring a Lambda to be rebuilt with every build.
103
-
104
- To deploy a [hotswap]:
105
-
106
- ```shell
107
- # Authenticate to dev account
108
- awsauth
109
-
110
- ENVIRONMENT=dev pnpm run deploy:hotswap
111
- ```
112
-
113
- To rapidly roll back a change,
114
- retry an individual deployment step from the previous build in Buildkite.
115
- Note that this will introduce drift between the head of the default Git branch and the live environment;
116
- use with caution and always follow up with a proper revert or fix in Git history.
117
-
118
- ## Support
119
-
120
- ### Dev
121
-
122
- TODO: add support links for the dev environment.
123
-
124
- <!--
125
- - CloudWatch dashboard
126
- - Datadog dashboard
127
- - Splunk logs
128
- -->
129
-
130
- ### Prod
131
-
132
- TODO: add support links for the prod environment.
133
-
134
- <!--
135
- - CloudWatch dashboard
136
- - Datadog dashboard
137
- - Splunk logs
138
- -->
139
-
140
- [@seek/aws-codedeploy-hooks]: https://github.com/seek-oss/aws-codedeploy-hooks
141
- [AWS CDK]: https://docs.aws.amazon.com/cdk/v2/guide/home.html
142
- [CodeDeploy]: https://docs.aws.amazon.com/codedeploy
143
- [Hotswap]: https://docs.aws.amazon.com/cdk/v2/guide/ref-cli-cmd-deploy.html#ref-cli-cmd-deploy-options
144
- [Lambda]: https://docs.aws.amazon.com/lambda
145
- [Technical Guidelines]: https://myseek.atlassian.net/wiki/spaces/AA/pages/2358346017/
@@ -1,116 +0,0 @@
1
- import { PublishCommand } from '@aws-sdk/client-sns';
2
-
3
- import { metricsClient } from 'src/framework/metrics';
4
- import { createCtx, createSqsEvent } from 'src/testing/handler';
5
- import { logger } from 'src/testing/logging';
6
- import { scoringService, sns } from 'src/testing/services';
7
- import { chance, mockJobPublishedEvent } from 'src/testing/types';
8
-
9
- import * as app from './app';
10
-
11
- describe('app', () => {
12
- it('exports a handler', () => expect(app).toHaveProperty('handler'));
13
- });
14
-
15
- describe('handler', () => {
16
- const ctx = createCtx();
17
-
18
- const jobPublished = mockJobPublishedEvent({ entityId: chance.name() });
19
-
20
- const score = chance.floating({ max: 1, min: 0 });
21
-
22
- const distribution = jest
23
- .spyOn(metricsClient, 'distribution')
24
- .mockReturnValue();
25
-
26
- beforeAll(logger.spy);
27
- beforeAll(scoringService.spy);
28
-
29
- beforeEach(() => {
30
- scoringService.request.mockResolvedValue(score);
31
- sns.publish.resolves({ MessageId: chance.guid({ version: 4 }) });
32
- });
33
-
34
- afterEach(() => {
35
- logger.clear();
36
- distribution.mockClear();
37
- scoringService.clear();
38
- sns.clear();
39
- });
40
-
41
- it('handles one record', async () => {
42
- const event = createSqsEvent([JSON.stringify(jobPublished)]);
43
-
44
- await expect(app.handler(event, ctx)).resolves.toBeUndefined();
45
-
46
- expect(scoringService.request).toHaveBeenCalledTimes(1);
47
-
48
- expect(logger.error).not.toHaveBeenCalled();
49
-
50
- expect(logger.debug.mock.calls).toEqual([
51
- [{ count: 1 }, 'Received jobs'],
52
- [{ snsMessageId: expect.any(String) }, 'Scored job'],
53
- ['Function succeeded'],
54
- ]);
55
-
56
- expect(distribution.mock.calls).toEqual([
57
- ['job.received', 1],
58
- ['job.scored', 1],
59
- ]);
60
-
61
- expect(sns.client).toReceiveCommandTimes(PublishCommand, 1);
62
- });
63
-
64
- it('throws on invalid input', () => {
65
- const event = createSqsEvent(['}']);
66
-
67
- return expect(app.handler(event, ctx)).rejects.toThrow('Function failed');
68
- });
69
-
70
- it('bubbles up scoring service error', async () => {
71
- const err = Error(chance.sentence());
72
-
73
- scoringService.request.mockRejectedValue(err);
74
-
75
- const event = createSqsEvent([JSON.stringify(jobPublished)]);
76
-
77
- await expect(app.handler(event, ctx)).rejects.toThrow('Function failed');
78
-
79
- expect(logger.error).toHaveBeenCalledWith({ err }, 'Function failed');
80
- });
81
-
82
- it('bubbles up SNS error', async () => {
83
- const err = Error(chance.sentence());
84
-
85
- sns.publish.rejects(err);
86
-
87
- const event = createSqsEvent([JSON.stringify(jobPublished)]);
88
-
89
- await expect(app.handler(event, ctx)).rejects.toThrow('Function failed');
90
-
91
- expect(logger.error).toHaveBeenCalledWith({ err }, 'Function failed');
92
- });
93
-
94
- it('throws on zero records', async () => {
95
- const err = new Error('Received 0 records');
96
-
97
- const event = createSqsEvent([]);
98
-
99
- await expect(app.handler(event, ctx)).rejects.toThrow('Function failed');
100
-
101
- expect(logger.error).toHaveBeenCalledWith({ err }, 'Function failed');
102
- });
103
-
104
- it('throws on multiple records', async () => {
105
- const err = new Error('Received 2 records');
106
-
107
- const event = createSqsEvent([
108
- JSON.stringify(jobPublished),
109
- JSON.stringify(jobPublished),
110
- ]);
111
-
112
- await expect(app.handler(event, ctx)).rejects.toThrow('Function failed');
113
-
114
- expect(logger.error).toHaveBeenCalledWith({ err }, 'Function failed');
115
- });
116
- });
@@ -1,61 +0,0 @@
1
- import { createCtx } from 'src/testing/handler';
2
- import { logger } from 'src/testing/logging';
3
- import { chance } from 'src/testing/types';
4
-
5
- import { createHandler } from './handler';
6
-
7
- describe('createHandler', () => {
8
- const ctx = createCtx();
9
- const input = chance.paragraph();
10
-
11
- beforeAll(logger.spy);
12
-
13
- afterEach(logger.clear);
14
-
15
- it('handles happy path', async () => {
16
- const output = chance.paragraph();
17
-
18
- const handler = createHandler((event) => {
19
- expect(event).toBe(input);
20
-
21
- logger.debug('Handler invoked');
22
-
23
- return Promise.resolve(output);
24
- });
25
-
26
- await expect(handler(input, ctx)).resolves.toBe(output);
27
-
28
- expect(logger.error).not.toHaveBeenCalled();
29
-
30
- expect(logger.debug.mock.calls).toEqual([
31
- ['Handler invoked'],
32
- ['Function succeeded'],
33
- ]);
34
- });
35
-
36
- it('handles async error', async () => {
37
- const err = Error(chance.sentence());
38
-
39
- const handler = createHandler(() => Promise.reject(err));
40
-
41
- await expect(handler(input, ctx)).rejects.toThrow('Function failed');
42
-
43
- expect(logger.error).toHaveBeenCalledWith({ err }, 'Function failed');
44
-
45
- expect(logger.debug).not.toHaveBeenCalled();
46
- });
47
-
48
- it('handles sync error', async () => {
49
- const err = Error(chance.sentence());
50
-
51
- const handler = createHandler(() => {
52
- throw err;
53
- });
54
-
55
- await expect(handler(input, ctx)).rejects.toThrow('Function failed');
56
-
57
- expect(logger.error).toHaveBeenCalledWith({ err }, 'Function failed');
58
-
59
- expect(logger.debug).not.toHaveBeenCalled();
60
- });
61
- });
@@ -1,43 +0,0 @@
1
- import { datadog } from 'datadog-lambda-js';
2
-
3
- import { config } from 'src/config';
4
- import { logger, loggerContext } from 'src/framework/logging';
5
-
6
- interface LambdaContext {
7
- awsRequestId: string;
8
- }
9
-
10
- type Handler<Event, Output> = (
11
- event: Event,
12
- ctx: LambdaContext,
13
- ) => Promise<Output>;
14
-
15
- /**
16
- * Conditionally applies the Datadog wrapper to a Lambda handler.
17
- *
18
- * This also "fixes" its broken type definitions.
19
- */
20
- const withDatadog = <Event, Output = unknown>(
21
- fn: Handler<Event, Output>,
22
- ): Handler<Event, Output> =>
23
- // istanbul ignore next
24
- config.metrics ? (datadog(fn) as Handler<Event, Output>) : fn;
25
-
26
- export const createHandler = <Event, Output = unknown>(
27
- fn: (event: Event) => Promise<Output>,
28
- ) =>
29
- withDatadog<Event>((event, { awsRequestId }) =>
30
- loggerContext.run({ awsRequestId }, async () => {
31
- try {
32
- const output = await fn(event);
33
-
34
- logger.debug('Function succeeded');
35
-
36
- return output;
37
- } catch (err) {
38
- logger.error({ err }, 'Function failed');
39
-
40
- throw new Error('Function failed');
41
- }
42
- }),
43
- );
@@ -1,27 +0,0 @@
1
- import { AsyncLocalStorage } from 'async_hooks';
2
-
3
- import createLogger from '@seek/logger';
4
-
5
- import { config } from 'src/config';
6
-
7
- interface LoggerContext {
8
- awsRequestId: string;
9
- }
10
-
11
- export const loggerContext = new AsyncLocalStorage<LoggerContext>();
12
-
13
- export const logger = createLogger({
14
- base: {
15
- environment: config.environment,
16
- version: config.version,
17
- },
18
-
19
- level: config.logLevel,
20
-
21
- mixin: () => ({ ...loggerContext.getStore() }),
22
-
23
- name: config.name,
24
-
25
- transport:
26
- config.environment === 'local' ? { target: 'pino-pretty' } : undefined,
27
- });
@@ -1,14 +0,0 @@
1
- import { sendDistributionMetric } from 'datadog-lambda-js';
2
-
3
- import { config } from 'src/config';
4
-
5
- const prefix = `${config.name}.`;
6
-
7
- export const metricsClient = {
8
- distribution: (
9
- ...[name, ...rest]: Parameters<typeof sendDistributionMetric>
10
- ) =>
11
- config.metrics
12
- ? sendDistributionMetric(`${prefix}${name}`, ...rest)
13
- : undefined,
14
- };
@@ -1,84 +0,0 @@
1
- import {
2
- IdDescriptionSchema,
3
- chance,
4
- mockIdDescription,
5
- } from 'src/testing/types';
6
-
7
- import { validateJson } from './validation';
8
-
9
- describe('validateJson', () => {
10
- const idDescription = mockIdDescription();
11
-
12
- it('permits valid input', () => {
13
- const input = JSON.stringify(idDescription);
14
-
15
- expect(validateJson(input, IdDescriptionSchema)).toStrictEqual(
16
- idDescription,
17
- );
18
- });
19
-
20
- it('filters additional properties', () => {
21
- const input = JSON.stringify({ ...idDescription, hacker: chance.name() });
22
-
23
- expect(validateJson(input, IdDescriptionSchema)).toStrictEqual(
24
- idDescription,
25
- );
26
- });
27
-
28
- it('blocks mistyped prop', () => {
29
- const input = JSON.stringify({ ...idDescription, id: null });
30
-
31
- expect(() => validateJson(input, IdDescriptionSchema))
32
- .toThrowErrorMatchingInlineSnapshot(`
33
- "[
34
- {
35
- "code": "invalid_type",
36
- "expected": "string",
37
- "received": "null",
38
- "path": [
39
- "id"
40
- ],
41
- "message": "Expected string, received null"
42
- }
43
- ]"
44
- `);
45
- });
46
-
47
- it('blocks missing prop', () => {
48
- const input = '{}';
49
-
50
- expect(() => validateJson(input, IdDescriptionSchema))
51
- .toThrowErrorMatchingInlineSnapshot(`
52
- "[
53
- {
54
- "code": "invalid_type",
55
- "expected": "string",
56
- "received": "undefined",
57
- "path": [
58
- "id"
59
- ],
60
- "message": "Required"
61
- },
62
- {
63
- "code": "invalid_type",
64
- "expected": "string",
65
- "received": "undefined",
66
- "path": [
67
- "description"
68
- ],
69
- "message": "Required"
70
- }
71
- ]"
72
- `);
73
- });
74
-
75
- it('blocks invalid JSON', () => {
76
- const input = '}';
77
-
78
- expect(() =>
79
- validateJson(input, IdDescriptionSchema),
80
- ).toThrowErrorMatchingInlineSnapshot(
81
- `"Unexpected token '}', "}" is not valid JSON"`,
82
- );
83
- });
84
- });
@@ -1,10 +0,0 @@
1
- import type { z } from 'zod';
2
-
3
- export const validateJson = <
4
- Output,
5
- Def extends z.ZodTypeDef = z.ZodTypeDef,
6
- Input = Output,
7
- >(
8
- input: string,
9
- schema: z.ZodSchema<Output, Def, Input>,
10
- ): Output => schema.parse(JSON.parse(input));
@@ -1,22 +0,0 @@
1
- import type { JobScorerInput, JobScorerOutput } from 'src/types/jobScorer';
2
- import type {
3
- JobPublishedEvent,
4
- JobScoredEvent,
5
- } from 'src/types/pipelineEvents';
6
-
7
- export const jobPublishedEventToScorerInput = (
8
- record: JobPublishedEvent,
9
- ): JobScorerInput => ({
10
- details: record.data.details,
11
- id: record.entityId,
12
- });
13
-
14
- export const jobScorerOutputToScoredEvent = (
15
- output: JobScorerOutput,
16
- ): JobScoredEvent => ({
17
- data: {
18
- score: output.score,
19
- },
20
- entityId: output.id,
21
- eventType: 'JobScored',
22
- });
@@ -1,5 +0,0 @@
1
- import { SNSClient } from '@aws-sdk/client-sns';
2
-
3
- export const sns = new SNSClient({
4
- apiVersion: '2010-03-31',
5
- });