skuba 4.4.0 → 4.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/utils/version.js +46 -3
- package/lib/utils/version.js.map +1 -1
- package/package.json +4 -3
- package/template/koa-rest-api/package.json +2 -4
- package/template/koa-rest-api/src/storage/jobs.ts +2 -2
- package/template/lambda-sqs-worker/README.md +1 -1
- package/template/lambda-sqs-worker/package.json +5 -1
- package/template/lambda-sqs-worker/serverless.yml +17 -2
- package/template/lambda-sqs-worker/src/app.test.ts +19 -17
- package/template/lambda-sqs-worker/src/app.ts +6 -6
- package/template/lambda-sqs-worker/src/config.ts +3 -0
- package/template/lambda-sqs-worker/src/framework/handler.test.ts +7 -7
- package/template/lambda-sqs-worker/src/framework/handler.ts +31 -8
- package/template/lambda-sqs-worker/src/framework/metrics.ts +10 -6
package/lib/utils/version.js
CHANGED
|
@@ -1,16 +1,59 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || function (mod) {
|
|
19
|
+
if (mod && mod.__esModule) return mod;
|
|
20
|
+
var result = {};
|
|
21
|
+
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
22
|
+
__setModuleDefault(result, mod);
|
|
23
|
+
return result;
|
|
24
|
+
};
|
|
2
25
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
26
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
27
|
};
|
|
5
28
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
29
|
exports.getSkubaVersionInfo = exports.getSkubaVersion = exports.latestNpmVersion = void 0;
|
|
7
|
-
const
|
|
30
|
+
const path_1 = __importDefault(require("path"));
|
|
31
|
+
const validate_npm_package_name_1 = __importDefault(require("validate-npm-package-name"));
|
|
8
32
|
const manifest_1 = require("./manifest");
|
|
33
|
+
const validation_1 = require("./validation");
|
|
9
34
|
const wait_1 = require("./wait");
|
|
35
|
+
const loadPackageJson = async (packageName) => {
|
|
36
|
+
const { validForNewPackages } = (0, validate_npm_package_name_1.default)(packageName);
|
|
37
|
+
if (!validForNewPackages) {
|
|
38
|
+
throw new Error(`Package "${packageName}" does not have a valid name`);
|
|
39
|
+
}
|
|
40
|
+
const message = `Package "${packageName}" does not have a valid package.json manifest`;
|
|
41
|
+
let packageJson;
|
|
42
|
+
try {
|
|
43
|
+
packageJson = await Promise.resolve().then(() => __importStar(require(path_1.default.posix.join(packageName, 'package.json'))));
|
|
44
|
+
}
|
|
45
|
+
catch {
|
|
46
|
+
throw new Error(message);
|
|
47
|
+
}
|
|
48
|
+
if (!(0, validation_1.isObject)(packageJson)) {
|
|
49
|
+
throw new Error(message);
|
|
50
|
+
}
|
|
51
|
+
return packageJson;
|
|
52
|
+
};
|
|
10
53
|
const latestNpmVersion = async (packageName) => {
|
|
11
|
-
const { version } = await (
|
|
54
|
+
const { version } = await loadPackageJson(packageName);
|
|
12
55
|
if (typeof version !== 'string') {
|
|
13
|
-
throw new Error(`
|
|
56
|
+
throw new Error(`Package "${packageName}" does not have a valid version in its package.json manifest`);
|
|
14
57
|
}
|
|
15
58
|
return version;
|
|
16
59
|
};
|
package/lib/utils/version.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"version.js","sourceRoot":"","sources":["../../src/utils/version.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"version.js","sourceRoot":"","sources":["../../src/utils/version.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,gDAAwB;AAExB,0FAA4D;AAE5D,yCAA8C;AAC9C,6CAAwC;AACxC,iCAAqC;AAErC,MAAM,eAAe,GAAG,KAAK,EAC3B,WAAmB,EACe,EAAE;IACpC,MAAM,EAAE,mBAAmB,EAAE,GAAG,IAAA,mCAAmB,EAAC,WAAW,CAAC,CAAC;IAEjE,IAAI,CAAC,mBAAmB,EAAE;QACxB,MAAM,IAAI,KAAK,CAAC,YAAY,WAAW,8BAA8B,CAAC,CAAC;KACxE;IAED,MAAM,OAAO,GAAG,YAAY,WAAW,+CAA+C,CAAC;IAEvF,IAAI,WAAoB,CAAC;IACzB,IAAI;QACF,WAAW,GAAG,wDAAa,cAAI,CAAC,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,cAAc,CAAC,GAAC,CAAC;KAC1E;IAAC,MAAM;QACN,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC;KAC1B;IAED,IAAI,CAAC,IAAA,qBAAQ,EAAC,WAAW,CAAC,EAAE;QAC1B,MAAM,IAAI,KAAK,CAAC,OAAO,CAAC,CAAC;KAC1B;IAED,OAAO,WAAW,CAAC;AACrB,CAAC,CAAC;AAEK,MAAM,gBAAgB,GAAG,KAAK,EACnC,WAAmB,EACF,EAAE;IACnB,MAAM,EAAE,OAAO,EAAE,GAAG,MAAM,eAAe,CAAC,WAAW,CAAC,CAAC;IAEvD,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;QAC/B,MAAM,IAAI,KAAK,CACb,YAAY,WAAW,8DAA8D,CACtF,CAAC;KACH;IAED,OAAO,OAAO,CAAC;AACjB,CAAC,CAAC;AAZW,QAAA,gBAAgB,oBAY3B;AAEF,MAAM,kBAAkB,GAAG,KAAK,IAA4B,EAAE;IAC5D,IAAI;QACF,MAAM,MAAM,GAAG,MAAM,IAAA,kBAAW,EAAC,IAAA,wBAAgB,EAAC,OAAO,CAAC,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC;QAEtE,OAAO,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC;KACxC;IAAC,MAAM;QACN,OAAO,IAAI,CAAC;KACb;AACH,CAAC,CAAC;AAEK,MAAM,eAAe,GAAG,KAAK,IAAqB,EAAE;IACzD,MAAM,EAAE,OAAO,EAAE,GAAG,MAAM,IAAA,2BAAgB,GAAE,CAAC;IAE7C,OAAO,OAAO,CAAC;AACjB,CAAC,CAAC;AAJW,QAAA,eAAe,mBAI1B;AAgBK,MAAM,mBAAmB,GAAG,KAAK,IAA+B,EAAE;IACvE,MAAM,CAAC,KAAK,EAAE,MAAM,CAAC,GAAG,MAAM,OAAO,CAAC,GAAG,CAAC;QACxC,IAAA,uBAAe,GAAE;QACjB,kBAAkB,EAAE;KACrB,CAAC,CAAC;IAEH,IAAI,MAAM,KAAK,IAAI,EAAE;QACnB,6DAA6D;QAC7D,OAAO;YACL,OAAO,EAAE,KAAK;YACd,KAAK;YACL,MAAM;SACP,CAAC;KACH;IAED,OAAO;QACL,OAAO,EAAE,MAAM,KAAK,KAAK;QACzB,KAAK;QACL,MAAM;KACP,CAAC;AACJ,CAAC,CAAC;AApBW,QAAA,mBAAmB,uBAoB9B"}
|
package/package.json
CHANGED
|
@@ -34,7 +34,6 @@
|
|
|
34
34
|
"normalize-package-data": "^4.0.0",
|
|
35
35
|
"npm-run-path": "^4.0.1",
|
|
36
36
|
"npm-which": "^3.0.1",
|
|
37
|
-
"package-json": "^7.0.0",
|
|
38
37
|
"picomatch": "^2.2.2",
|
|
39
38
|
"prettier": "~2.7.0",
|
|
40
39
|
"read-pkg-up": "^7.0.1",
|
|
@@ -50,7 +49,8 @@
|
|
|
50
49
|
"ts-node-dev": "^2.0.0",
|
|
51
50
|
"tsconfig-paths": "^4.0.0",
|
|
52
51
|
"tsconfig-seek": "1.0.2",
|
|
53
|
-
"typescript": "~4.8.2"
|
|
52
|
+
"typescript": "~4.8.2",
|
|
53
|
+
"validate-npm-package-name": "^4.0.0"
|
|
54
54
|
},
|
|
55
55
|
"description": "SEEK development toolkit for backend applications and packages",
|
|
56
56
|
"devDependencies": {
|
|
@@ -66,6 +66,7 @@
|
|
|
66
66
|
"@types/npm-which": "3.0.1",
|
|
67
67
|
"@types/picomatch": "2.3.0",
|
|
68
68
|
"@types/supertest": "2.0.12",
|
|
69
|
+
"@types/validate-npm-package-name": "4.0.0",
|
|
69
70
|
"enhanced-resolve": "5.10.0",
|
|
70
71
|
"express": "4.18.1",
|
|
71
72
|
"jsonfile": "6.1.0",
|
|
@@ -154,5 +155,5 @@
|
|
|
154
155
|
"version": "4.0.0"
|
|
155
156
|
},
|
|
156
157
|
"types": "./lib/index.d.ts",
|
|
157
|
-
"version": "4.4.
|
|
158
|
+
"version": "4.4.1"
|
|
158
159
|
}
|
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
"@koa/router": "^12.0.0",
|
|
4
4
|
"@opentelemetry/api": "^1.1.0",
|
|
5
5
|
"@opentelemetry/exporter-collector-grpc": "^0.25.0",
|
|
6
|
-
"@opentelemetry/instrumentation-aws-sdk": "^0.
|
|
6
|
+
"@opentelemetry/instrumentation-aws-sdk": "^0.9.0",
|
|
7
7
|
"@opentelemetry/instrumentation-http": "^0.32.0",
|
|
8
8
|
"@opentelemetry/sdk-node": "^0.32.0",
|
|
9
9
|
"@seek/logger": "^5.0.1",
|
|
@@ -16,8 +16,7 @@
|
|
|
16
16
|
"runtypes-filter": "^0.6.0",
|
|
17
17
|
"seek-datadog-custom-metrics": "^4.0.0",
|
|
18
18
|
"seek-koala": "^6.0.0",
|
|
19
|
-
"skuba-dive": "^2.0.0"
|
|
20
|
-
"uuid": "^8.3.2"
|
|
19
|
+
"skuba-dive": "^2.0.0"
|
|
21
20
|
},
|
|
22
21
|
"devDependencies": {
|
|
23
22
|
"@types/chance": "^1.1.3",
|
|
@@ -26,7 +25,6 @@
|
|
|
26
25
|
"@types/koa__router": "^8.0.8",
|
|
27
26
|
"@types/node": "^16.0.0",
|
|
28
27
|
"@types/supertest": "^2.0.11",
|
|
29
|
-
"@types/uuid": "^8.3.1",
|
|
30
28
|
"chance": "^1.1.8",
|
|
31
29
|
"pino-pretty": "^9.0.0",
|
|
32
30
|
"skuba": "*",
|
|
@@ -1,11 +1,11 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { randomUUID } from 'crypto';
|
|
2
2
|
|
|
3
3
|
import { Job, JobInput } from 'src/types/jobs';
|
|
4
4
|
|
|
5
5
|
const jobStore: Record<string, Job> = {};
|
|
6
6
|
|
|
7
7
|
export const createJob = (jobInput: JobInput): Promise<Job> => {
|
|
8
|
-
const id =
|
|
8
|
+
const id = randomUUID();
|
|
9
9
|
|
|
10
10
|
const job = { ...jobInput, id };
|
|
11
11
|
|
|
@@ -15,7 +15,7 @@ Next steps:
|
|
|
15
15
|
3. [ ] Create a new repository in the appropriate GitHub organisation.
|
|
16
16
|
4. [ ] Add the repository to BuildAgency;
|
|
17
17
|
see [Builds at SEEK] for more information.
|
|
18
|
-
5. [ ] Add deployment bucket configuration and data classification tags to [serverless.yml](serverless.yml).
|
|
18
|
+
5. [ ] Add Datadog extension, deployment bucket configuration and data classification tags to [serverless.yml](serverless.yml).
|
|
19
19
|
6. [ ] Push local commits to the upstream GitHub branch.
|
|
20
20
|
7. [ ] Configure [GitHub repository settings].
|
|
21
21
|
8. [ ] Delete this checklist 😌.
|
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
"dependencies": {
|
|
3
3
|
"@seek/logger": "^5.0.1",
|
|
4
4
|
"aws-sdk": "^2.1011.0",
|
|
5
|
-
"
|
|
5
|
+
"datadog-lambda-js": "^6.83.0",
|
|
6
6
|
"skuba-dive": "^2.0.0",
|
|
7
7
|
"runtypes": "^6.4.1",
|
|
8
8
|
"runtypes-filter": "^0.6.0"
|
|
@@ -15,6 +15,7 @@
|
|
|
15
15
|
"pino-pretty": "^9.0.0",
|
|
16
16
|
"serverless": "^3.17.0",
|
|
17
17
|
"serverless-plugin-canary-deployments": "^0.8.0",
|
|
18
|
+
"serverless-plugin-datadog": "^5.7.0",
|
|
18
19
|
"serverless-prune-plugin": "^2.0.0",
|
|
19
20
|
"skuba": "*"
|
|
20
21
|
},
|
|
@@ -23,6 +24,9 @@
|
|
|
23
24
|
},
|
|
24
25
|
"license": "UNLICENSED",
|
|
25
26
|
"private": true,
|
|
27
|
+
"resolutions": {
|
|
28
|
+
"@types/responselike": "1.0.0"
|
|
29
|
+
},
|
|
26
30
|
"scripts": {
|
|
27
31
|
"build": "skuba build",
|
|
28
32
|
"deploy": "serverless deploy --force --verbose",
|
|
@@ -4,21 +4,33 @@ configValidationMode: error
|
|
|
4
4
|
|
|
5
5
|
params:
|
|
6
6
|
default:
|
|
7
|
+
datadogApiKeySecretArn: 'TODO: arn:aws:secretsmanager:${aws:region}:${aws:accountId}:secret:SECRET-NAME'
|
|
7
8
|
description: <%- description %>
|
|
8
9
|
dev:
|
|
9
|
-
deploymentBucket: 'TODO:
|
|
10
|
+
deploymentBucket: 'TODO: deployment-bucket-name'
|
|
10
11
|
isProduction: false
|
|
11
12
|
prod:
|
|
12
|
-
deploymentBucket: 'TODO:
|
|
13
|
+
deploymentBucket: 'TODO: deployment-bucket-name'
|
|
13
14
|
isProduction: true
|
|
14
15
|
|
|
15
16
|
custom:
|
|
17
|
+
datadog:
|
|
18
|
+
addLayers: false
|
|
19
|
+
apiKeySecretArn: ${param:datadogApiKeySecretArn}
|
|
20
|
+
enableDDLogs: false
|
|
21
|
+
# TODO: enable Datadog extension
|
|
22
|
+
enabled: false
|
|
23
|
+
exclude:
|
|
24
|
+
- WorkerPreHook
|
|
25
|
+
injectLogContext: false
|
|
26
|
+
version: ${env:VERSION}
|
|
16
27
|
prune:
|
|
17
28
|
automatic: true
|
|
18
29
|
number: 3
|
|
19
30
|
|
|
20
31
|
plugins:
|
|
21
32
|
- serverless-plugin-canary-deployments
|
|
33
|
+
- serverless-plugin-datadog
|
|
22
34
|
- serverless-prune-plugin
|
|
23
35
|
|
|
24
36
|
provider:
|
|
@@ -51,6 +63,9 @@ provider:
|
|
|
51
63
|
- Action: lambda:InvokeFunction
|
|
52
64
|
Effect: Allow
|
|
53
65
|
Resource: !Sub arn:aws:lambda:${AWS::Region}:${AWS::AccountId}:function:${self:functions.Worker.name}
|
|
66
|
+
- Action: secretsmanager:GetSecretValue
|
|
67
|
+
Effect: Allow
|
|
68
|
+
Resource: ${param:datadogApiKeySecretArn}-??????
|
|
54
69
|
- Action: sns:Publish
|
|
55
70
|
Effect: Allow
|
|
56
71
|
Resource: !Ref DestinationTopic
|
|
@@ -17,7 +17,9 @@ describe('handler', () => {
|
|
|
17
17
|
|
|
18
18
|
const score = chance.floating({ max: 1, min: 0 });
|
|
19
19
|
|
|
20
|
-
const
|
|
20
|
+
const distribution = jest
|
|
21
|
+
.spyOn(metricsClient, 'distribution')
|
|
22
|
+
.mockReturnValue();
|
|
21
23
|
|
|
22
24
|
beforeAll(logger.spy);
|
|
23
25
|
beforeAll(scoringService.spy);
|
|
@@ -32,7 +34,7 @@ describe('handler', () => {
|
|
|
32
34
|
|
|
33
35
|
afterEach(() => {
|
|
34
36
|
logger.clear();
|
|
35
|
-
|
|
37
|
+
distribution.mockClear();
|
|
36
38
|
scoringService.clear();
|
|
37
39
|
sns.clear();
|
|
38
40
|
});
|
|
@@ -47,12 +49,12 @@ describe('handler', () => {
|
|
|
47
49
|
expect(logger.error).not.toHaveBeenCalled();
|
|
48
50
|
|
|
49
51
|
expect(logger.info.mock.calls).toEqual([
|
|
50
|
-
[{ count: 1 }, '
|
|
51
|
-
[{ snsMessageId: expect.any(String) }, '
|
|
52
|
-
['
|
|
52
|
+
[{ count: 1 }, 'Received jobs'],
|
|
53
|
+
[{ snsMessageId: expect.any(String) }, 'Scored job'],
|
|
54
|
+
['Function succeeded'],
|
|
53
55
|
]);
|
|
54
56
|
|
|
55
|
-
expect(
|
|
57
|
+
expect(distribution.mock.calls).toEqual([
|
|
56
58
|
['job.received', 1],
|
|
57
59
|
['job.scored', 1],
|
|
58
60
|
]);
|
|
@@ -63,7 +65,7 @@ describe('handler', () => {
|
|
|
63
65
|
it('throws on invalid input', () => {
|
|
64
66
|
const event = createSqsEvent(['}']);
|
|
65
67
|
|
|
66
|
-
return expect(app.handler(event, ctx)).rejects.toThrow('
|
|
68
|
+
return expect(app.handler(event, ctx)).rejects.toThrow('Function failed');
|
|
67
69
|
});
|
|
68
70
|
|
|
69
71
|
it('bubbles up scoring service error', async () => {
|
|
@@ -73,9 +75,9 @@ describe('handler', () => {
|
|
|
73
75
|
|
|
74
76
|
const event = createSqsEvent([JSON.stringify(jobPublished)]);
|
|
75
77
|
|
|
76
|
-
await expect(app.handler(event, ctx)).rejects.toThrow('
|
|
78
|
+
await expect(app.handler(event, ctx)).rejects.toThrow('Function failed');
|
|
77
79
|
|
|
78
|
-
expect(logger.error).toHaveBeenCalledWith({ err }, '
|
|
80
|
+
expect(logger.error).toHaveBeenCalledWith({ err }, 'Function failed');
|
|
79
81
|
});
|
|
80
82
|
|
|
81
83
|
it('bubbles up SNS error', async () => {
|
|
@@ -85,31 +87,31 @@ describe('handler', () => {
|
|
|
85
87
|
|
|
86
88
|
const event = createSqsEvent([JSON.stringify(jobPublished)]);
|
|
87
89
|
|
|
88
|
-
await expect(app.handler(event, ctx)).rejects.toThrow('
|
|
90
|
+
await expect(app.handler(event, ctx)).rejects.toThrow('Function failed');
|
|
89
91
|
|
|
90
|
-
expect(logger.error).toHaveBeenCalledWith({ err }, '
|
|
92
|
+
expect(logger.error).toHaveBeenCalledWith({ err }, 'Function failed');
|
|
91
93
|
});
|
|
92
94
|
|
|
93
95
|
it('throws on zero records', async () => {
|
|
94
|
-
const err = new Error('
|
|
96
|
+
const err = new Error('Received 0 records');
|
|
95
97
|
|
|
96
98
|
const event = createSqsEvent([]);
|
|
97
99
|
|
|
98
|
-
await expect(app.handler(event, ctx)).rejects.toThrow('
|
|
100
|
+
await expect(app.handler(event, ctx)).rejects.toThrow('Function failed');
|
|
99
101
|
|
|
100
|
-
expect(logger.error).toHaveBeenCalledWith({ err }, '
|
|
102
|
+
expect(logger.error).toHaveBeenCalledWith({ err }, 'Function failed');
|
|
101
103
|
});
|
|
102
104
|
|
|
103
105
|
it('throws on multiple records', async () => {
|
|
104
|
-
const err = new Error('
|
|
106
|
+
const err = new Error('Received 2 records');
|
|
105
107
|
|
|
106
108
|
const event = createSqsEvent([
|
|
107
109
|
JSON.stringify(jobPublished),
|
|
108
110
|
JSON.stringify(jobPublished),
|
|
109
111
|
]);
|
|
110
112
|
|
|
111
|
-
await expect(app.handler(event, ctx)).rejects.toThrow('
|
|
113
|
+
await expect(app.handler(event, ctx)).rejects.toThrow('Function failed');
|
|
112
114
|
|
|
113
|
-
expect(logger.error).toHaveBeenCalledWith({ err }, '
|
|
115
|
+
expect(logger.error).toHaveBeenCalledWith({ err }, 'Function failed');
|
|
114
116
|
});
|
|
115
117
|
});
|
|
@@ -20,19 +20,19 @@ const smokeTest = async () => {
|
|
|
20
20
|
export const handler = createHandler<SQSEvent>(async (event) => {
|
|
21
21
|
// Treat an empty object as our smoke test event.
|
|
22
22
|
if (!Object.keys(event).length) {
|
|
23
|
-
logger.info('
|
|
23
|
+
logger.info('Received smoke test request');
|
|
24
24
|
return smokeTest();
|
|
25
25
|
}
|
|
26
26
|
|
|
27
27
|
const count = event.Records.length;
|
|
28
28
|
|
|
29
29
|
if (count !== 1) {
|
|
30
|
-
throw Error(`
|
|
30
|
+
throw Error(`Received ${count} records`);
|
|
31
31
|
}
|
|
32
32
|
|
|
33
|
-
logger.info({ count }, '
|
|
33
|
+
logger.info({ count }, 'Received jobs');
|
|
34
34
|
|
|
35
|
-
metricsClient.
|
|
35
|
+
metricsClient.distribution('job.received', event.Records.length);
|
|
36
36
|
|
|
37
37
|
const record = event.Records[0];
|
|
38
38
|
|
|
@@ -46,7 +46,7 @@ export const handler = createHandler<SQSEvent>(async (event) => {
|
|
|
46
46
|
|
|
47
47
|
const snsMessageId = await sendPipelineEvent(scoredJob);
|
|
48
48
|
|
|
49
|
-
logger.info({ snsMessageId }, '
|
|
49
|
+
logger.info({ snsMessageId }, 'Scored job');
|
|
50
50
|
|
|
51
|
-
metricsClient.
|
|
51
|
+
metricsClient.distribution('job.scored', 1);
|
|
52
52
|
});
|
|
@@ -4,6 +4,7 @@ interface Config {
|
|
|
4
4
|
environment: Environment;
|
|
5
5
|
|
|
6
6
|
logLevel: string;
|
|
7
|
+
metrics: boolean;
|
|
7
8
|
name: string;
|
|
8
9
|
version: string;
|
|
9
10
|
|
|
@@ -20,6 +21,7 @@ const environment = Env.oneOf(environments)('ENVIRONMENT');
|
|
|
20
21
|
const configs: Record<Environment, () => Omit<Config, 'environment'>> = {
|
|
21
22
|
local: () => ({
|
|
22
23
|
logLevel: 'debug',
|
|
24
|
+
metrics: false,
|
|
23
25
|
name: '<%- serviceName %>',
|
|
24
26
|
version: 'local',
|
|
25
27
|
|
|
@@ -41,6 +43,7 @@ const configs: Record<Environment, () => Omit<Config, 'environment'>> = {
|
|
|
41
43
|
|
|
42
44
|
prod: () => ({
|
|
43
45
|
logLevel: 'info',
|
|
46
|
+
metrics: true,
|
|
44
47
|
name: Env.string('SERVICE'),
|
|
45
48
|
version: Env.string('VERSION'),
|
|
46
49
|
|
|
@@ -18,7 +18,7 @@ describe('createHandler', () => {
|
|
|
18
18
|
const handler = createHandler((event) => {
|
|
19
19
|
expect(event).toBe(input);
|
|
20
20
|
|
|
21
|
-
logger.info('
|
|
21
|
+
logger.info('Handler invoked');
|
|
22
22
|
|
|
23
23
|
return Promise.resolve(output);
|
|
24
24
|
});
|
|
@@ -28,8 +28,8 @@ describe('createHandler', () => {
|
|
|
28
28
|
expect(logger.error).not.toHaveBeenCalled();
|
|
29
29
|
|
|
30
30
|
expect(logger.info.mock.calls).toEqual([
|
|
31
|
-
['
|
|
32
|
-
['
|
|
31
|
+
['Handler invoked'],
|
|
32
|
+
['Function succeeded'],
|
|
33
33
|
]);
|
|
34
34
|
});
|
|
35
35
|
|
|
@@ -38,9 +38,9 @@ describe('createHandler', () => {
|
|
|
38
38
|
|
|
39
39
|
const handler = createHandler(() => Promise.reject(err));
|
|
40
40
|
|
|
41
|
-
await expect(handler(input, ctx)).rejects.toThrow('
|
|
41
|
+
await expect(handler(input, ctx)).rejects.toThrow('Function failed');
|
|
42
42
|
|
|
43
|
-
expect(logger.error.mock.calls).toEqual([[{ err }, '
|
|
43
|
+
expect(logger.error.mock.calls).toEqual([[{ err }, 'Function failed']]);
|
|
44
44
|
|
|
45
45
|
expect(logger.info).not.toHaveBeenCalled();
|
|
46
46
|
});
|
|
@@ -52,9 +52,9 @@ describe('createHandler', () => {
|
|
|
52
52
|
throw err;
|
|
53
53
|
});
|
|
54
54
|
|
|
55
|
-
await expect(handler(input, ctx)).rejects.toThrow('
|
|
55
|
+
await expect(handler(input, ctx)).rejects.toThrow('Function failed');
|
|
56
56
|
|
|
57
|
-
expect(logger.error.mock.calls).toEqual([[{ err }, '
|
|
57
|
+
expect(logger.error.mock.calls).toEqual([[{ err }, 'Function failed']]);
|
|
58
58
|
|
|
59
59
|
expect(logger.info).not.toHaveBeenCalled();
|
|
60
60
|
});
|
|
@@ -1,20 +1,43 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { datadog } from 'datadog-lambda-js';
|
|
2
2
|
|
|
3
|
+
import { config } from 'src/config';
|
|
3
4
|
import { logger, loggerContext } from 'src/framework/logging';
|
|
4
5
|
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
6
|
+
interface LambdaContext {
|
|
7
|
+
awsRequestId: string;
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
type Handler<Event, Output> = (
|
|
11
|
+
event: Event,
|
|
12
|
+
ctx: LambdaContext,
|
|
13
|
+
) => Promise<Output>;
|
|
14
|
+
|
|
15
|
+
/**
|
|
16
|
+
* Conditionally applies the Datadog wrapper to a Lambda handler.
|
|
17
|
+
*
|
|
18
|
+
* This also "fixes" its broken type definitions.
|
|
19
|
+
*/
|
|
20
|
+
const withDatadog = <Event, Output = unknown>(
|
|
21
|
+
fn: Handler<Event, Output>,
|
|
22
|
+
): Handler<Event, Output> =>
|
|
23
|
+
// istanbul ignore next
|
|
24
|
+
config.metrics ? (datadog(fn) as Handler<Event, Output>) : fn;
|
|
25
|
+
|
|
26
|
+
export const createHandler = <Event, Output = unknown>(
|
|
27
|
+
fn: (event: Event) => Promise<Output>,
|
|
28
|
+
) =>
|
|
29
|
+
withDatadog<Event>((event, { awsRequestId }) =>
|
|
8
30
|
loggerContext.run({ awsRequestId }, async () => {
|
|
9
31
|
try {
|
|
10
32
|
const output = await fn(event);
|
|
11
33
|
|
|
12
|
-
logger.info('
|
|
34
|
+
logger.info('Function succeeded');
|
|
13
35
|
|
|
14
36
|
return output;
|
|
15
37
|
} catch (err) {
|
|
16
|
-
logger.error({ err }, '
|
|
38
|
+
logger.error({ err }, 'Function failed');
|
|
17
39
|
|
|
18
|
-
throw new Error('
|
|
40
|
+
throw new Error('Function failed');
|
|
19
41
|
}
|
|
20
|
-
})
|
|
42
|
+
}),
|
|
43
|
+
);
|
|
@@ -1,10 +1,14 @@
|
|
|
1
|
-
import {
|
|
2
|
-
createCloudWatchClient,
|
|
3
|
-
createTimedSpan,
|
|
4
|
-
} from 'seek-datadog-custom-metrics';
|
|
1
|
+
import { sendDistributionMetric } from 'datadog-lambda-js';
|
|
5
2
|
|
|
6
3
|
import { config } from 'src/config';
|
|
7
4
|
|
|
8
|
-
|
|
5
|
+
const prefix = `${config.name}.`;
|
|
9
6
|
|
|
10
|
-
export const
|
|
7
|
+
export const metricsClient = {
|
|
8
|
+
distribution: (
|
|
9
|
+
...[name, ...rest]: Parameters<typeof sendDistributionMetric>
|
|
10
|
+
) =>
|
|
11
|
+
config.metrics
|
|
12
|
+
? sendDistributionMetric(`${prefix}${name}`, ...rest)
|
|
13
|
+
: undefined,
|
|
14
|
+
};
|