@digitraffic/common 2022.11.2-1 → 2022.11.11-1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/aws/infra/canaries/database-checker.d.ts +22 -10
- package/dist/aws/infra/canaries/database-checker.js +38 -28
- package/dist/aws/infra/canaries/database-checker.js.map +1 -1
- package/dist/aws/infra/import-util.d.ts +21 -0
- package/dist/aws/infra/import-util.js +53 -0
- package/dist/aws/infra/import-util.js.map +1 -0
- package/dist/aws/infra/stack/lambda-configs.d.ts +0 -8
- package/dist/aws/infra/stack/lambda-configs.js +7 -7
- package/dist/aws/infra/stack/rest_apis.d.ts +1 -1
- package/dist/aws/infra/stack/rest_apis.js +2 -2
- package/dist/aws/infra/stack/rest_apis.js.map +1 -1
- package/dist/aws/infra/stacks/db-dns-stack.d.ts +11 -0
- package/dist/aws/infra/stacks/db-dns-stack.js +63 -0
- package/dist/aws/infra/stacks/db-dns-stack.js.map +1 -0
- package/dist/aws/infra/stacks/db-proxy-stack.d.ts +19 -0
- package/dist/aws/infra/stacks/db-proxy-stack.js +74 -0
- package/dist/aws/infra/stacks/db-proxy-stack.js.map +1 -0
- package/dist/aws/infra/stacks/db-stack.d.ts +31 -0
- package/dist/aws/infra/stacks/db-stack.js +91 -0
- package/dist/aws/infra/stacks/db-stack.js.map +1 -0
- package/dist/aws/infra/stacks/intra-stack-configuration.d.ts +5 -0
- package/dist/aws/infra/stacks/intra-stack-configuration.js +3 -0
- package/dist/aws/infra/stacks/intra-stack-configuration.js.map +1 -0
- package/dist/aws/infra/stacks/network-stack.d.ts +12 -0
- package/dist/aws/infra/stacks/network-stack.js +36 -0
- package/dist/aws/infra/stacks/network-stack.js.map +1 -0
- package/dist/aws/infra/usage-plans.d.ts +3 -2
- package/dist/aws/infra/usage-plans.js +5 -4
- package/dist/aws/infra/usage-plans.js.map +1 -1
- package/package.json +1 -1
- package/src/aws/infra/canaries/database-checker.ts +48 -44
- package/src/aws/infra/import-util.ts +57 -0
- package/src/aws/infra/stack/lambda-configs.ts +7 -16
- package/src/aws/infra/stack/rest_apis.ts +2 -2
- package/src/aws/infra/stacks/db-dns-stack.ts +88 -0
- package/src/aws/infra/stacks/db-proxy-stack.ts +129 -0
- package/src/aws/infra/stacks/db-stack.ts +165 -0
- package/src/aws/infra/stacks/intra-stack-configuration.ts +6 -0
- package/src/aws/infra/stacks/network-stack.ts +46 -0
- package/src/aws/infra/usage-plans.ts +15 -6
@@ -0,0 +1,91 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.DbStack = void 0;
|
4
|
+
const aws_cdk_lib_1 = require("aws-cdk-lib");
|
5
|
+
const aws_ec2_1 = require("aws-cdk-lib/aws-ec2");
|
6
|
+
const aws_rds_1 = require("aws-cdk-lib/aws-rds");
|
7
|
+
const aws_secretsmanager_1 = require("aws-cdk-lib/aws-secretsmanager");
|
8
|
+
const import_util_1 = require("../import-util");
|
9
|
+
/**
|
10
|
+
* How to upgrade major version?
|
11
|
+
* 0. Set correct SG for db-stack and db-proxy-stack(this step will be removed in the future)
|
12
|
+
* 1. Update db-stack WITHOUT parameter group
|
13
|
+
* 2. Upgrade extensions by hand
|
14
|
+
* 3. Upgrade database from the AWS console
|
15
|
+
* 4. Update db-stack with the upgraded version and custom parameter group
|
16
|
+
*/
|
17
|
+
class DbStack extends aws_cdk_lib_1.Stack {
|
18
|
+
constructor(scope, id, isc, configuration) {
|
19
|
+
super(scope, id, {
|
20
|
+
env: isc.env,
|
21
|
+
});
|
22
|
+
const cluster = this.createAuroraCluster(isc, configuration);
|
23
|
+
(0, import_util_1.exportValue)(this, isc.environmentName, DbStack.CLUSTER_IDENTIFIER_EXPORT_NAME, cluster.clusterIdentifier);
|
24
|
+
(0, import_util_1.exportValue)(this, isc.environmentName, DbStack.CLUSTER_WRITE_ENDPOINT_EXPORT_NAME, cluster.clusterEndpoint.hostname);
|
25
|
+
(0, import_util_1.exportValue)(this, isc.environmentName, DbStack.CLUSTER_READ_ENDPOINT_EXPORT_NAME, cluster.clusterReadEndpoint.hostname);
|
26
|
+
}
|
27
|
+
createAuroraCluster(isc, configuration) {
|
28
|
+
const instanceName = isc.environmentName + "-db";
|
29
|
+
const secret = aws_secretsmanager_1.Secret.fromSecretAttributes(this, "db-secret", {
|
30
|
+
secretCompleteArn: configuration.secretArn,
|
31
|
+
});
|
32
|
+
const securityGroup = aws_ec2_1.SecurityGroup.fromSecurityGroupId(this, "securitygroup", configuration.securityGroupId);
|
33
|
+
const vpc = (0, import_util_1.importVpc)(this, isc.environmentName);
|
34
|
+
const parameterGroup = configuration.customParameterGroup
|
35
|
+
? new aws_rds_1.ParameterGroup(this, `parameter-group-${configuration.dbVersion.auroraPostgresMajorVersion}`, {
|
36
|
+
engine: aws_rds_1.DatabaseClusterEngine.auroraPostgres({
|
37
|
+
version: configuration.dbVersion,
|
38
|
+
}),
|
39
|
+
parameters: {
|
40
|
+
"pg_stat_statements.track": "ALL",
|
41
|
+
random_page_cost: "1",
|
42
|
+
},
|
43
|
+
})
|
44
|
+
: aws_rds_1.ParameterGroup.fromParameterGroupName(this, "ParameterGroup", `default.aurora-postgresql${configuration.dbVersion.auroraPostgresMajorVersion}`);
|
45
|
+
const cluster = new aws_rds_1.DatabaseClusterFromSnapshot(this, instanceName, {
|
46
|
+
snapshotIdentifier: configuration.snapshotIdentifier,
|
47
|
+
engine: aws_rds_1.DatabaseClusterEngine.auroraPostgres({
|
48
|
+
version: configuration.dbVersion,
|
49
|
+
}),
|
50
|
+
instances: configuration.instances,
|
51
|
+
instanceUpdateBehaviour: aws_rds_1.InstanceUpdateBehaviour.ROLLING,
|
52
|
+
instanceIdentifierBase: instanceName + "-",
|
53
|
+
cloudwatchLogsExports: ["postgresql"],
|
54
|
+
backup: {
|
55
|
+
retention: aws_cdk_lib_1.Duration.days(35),
|
56
|
+
preferredWindow: "01:00-02:00",
|
57
|
+
},
|
58
|
+
preferredMaintenanceWindow: "mon:03:00-mon:04:00",
|
59
|
+
deletionProtection: true,
|
60
|
+
removalPolicy: aws_cdk_lib_1.RemovalPolicy.RETAIN,
|
61
|
+
port: DbStack.CLUSTER_PORT,
|
62
|
+
instanceProps: {
|
63
|
+
autoMinorVersionUpgrade: true,
|
64
|
+
allowMajorVersionUpgrade: false,
|
65
|
+
enablePerformanceInsights: true,
|
66
|
+
vpc,
|
67
|
+
securityGroups: [securityGroup],
|
68
|
+
vpcSubnets: {
|
69
|
+
subnetType: aws_ec2_1.SubnetType.PRIVATE_WITH_NAT,
|
70
|
+
},
|
71
|
+
instanceType: configuration.dbInstanceType,
|
72
|
+
parameterGroup,
|
73
|
+
},
|
74
|
+
credentials: aws_rds_1.Credentials.fromSecret(secret),
|
75
|
+
parameterGroup,
|
76
|
+
});
|
77
|
+
// this workaround should prevent stack failing on version upgrade
|
78
|
+
const cfnInstances = cluster.node.children.filter((child) => child instanceof aws_rds_1.CfnDBInstance);
|
79
|
+
if (cfnInstances.length === 0) {
|
80
|
+
throw new Error("Couldn't pull CfnDBInstances from the L1 constructs!");
|
81
|
+
}
|
82
|
+
cfnInstances.forEach((cfnInstance) => delete cfnInstance.engineVersion);
|
83
|
+
return cluster;
|
84
|
+
}
|
85
|
+
}
|
86
|
+
exports.DbStack = DbStack;
|
87
|
+
DbStack.CLUSTER_IDENTIFIER_EXPORT_NAME = "db-cluster";
|
88
|
+
DbStack.CLUSTER_READ_ENDPOINT_EXPORT_NAME = "db-cluster-reader-endpoint";
|
89
|
+
DbStack.CLUSTER_WRITE_ENDPOINT_EXPORT_NAME = "db-cluster-writer-endpoint";
|
90
|
+
DbStack.CLUSTER_PORT = 5432;
|
91
|
+
//# sourceMappingURL=db-stack.js.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"db-stack.js","sourceRoot":"","sources":["../../../../src/aws/infra/stacks/db-stack.ts"],"names":[],"mappings":";;;AAAA,6CAA6D;AAE7D,iDAAgE;AAChE,iDAS6B;AAC7B,uEAAwD;AACxD,gDAAwD;AAiBxD;;;;;;;GAOG;AAEH,MAAa,OAAQ,SAAQ,mBAAK;IAS9B,YACI,KAAgB,EAChB,EAAU,EACV,GAA4B,EAC5B,aAA8B;QAE9B,KAAK,CAAC,KAAK,EAAE,EAAE,EAAE;YACb,GAAG,EAAE,GAAG,CAAC,GAAG;SACf,CAAC,CAAC;QAEH,MAAM,OAAO,GAAG,IAAI,CAAC,mBAAmB,CAAC,GAAG,EAAE,aAAa,CAAC,CAAC;QAE7D,IAAA,yBAAW,EACP,IAAI,EACJ,GAAG,CAAC,eAAe,EACnB,OAAO,CAAC,8BAA8B,EACtC,OAAO,CAAC,iBAAiB,CAC5B,CAAC;QACF,IAAA,yBAAW,EACP,IAAI,EACJ,GAAG,CAAC,eAAe,EACnB,OAAO,CAAC,kCAAkC,EAC1C,OAAO,CAAC,eAAe,CAAC,QAAQ,CACnC,CAAC;QACF,IAAA,yBAAW,EACP,IAAI,EACJ,GAAG,CAAC,eAAe,EACnB,OAAO,CAAC,iCAAiC,EACzC,OAAO,CAAC,mBAAmB,CAAC,QAAQ,CACvC,CAAC;IACN,CAAC;IAED,mBAAmB,CACf,GAA4B,EAC5B,aAA8B;QAE9B,MAAM,YAAY,GAAG,GAAG,CAAC,eAAe,GAAG,KAAK,CAAC;QACjD,MAAM,MAAM,GAAG,2BAAM,CAAC,oBAAoB,CAAC,IAAI,EAAE,WAAW,EAAE;YAC1D,iBAAiB,EAAE,aAAa,CAAC,SAAS;SAC7C,CAAC,CAAC;QACH,MAAM,aAAa,GAAG,uBAAa,CAAC,mBAAmB,CACnD,IAAI,EACJ,eAAe,EACf,aAAa,CAAC,eAAe,CAChC,CAAC;QACF,MAAM,GAAG,GAAG,IAAA,uBAAS,EAAC,IAAI,EAAE,GAAG,CAAC,eAAe,CAAC,CAAC;QAEjD,MAAM,cAAc,GAAG,aAAa,CAAC,oBAAoB;YACrD,CAAC,CAAC,IAAI,wBAAc,CACd,IAAI,EACJ,mBAAmB,aAAa,CAAC,SAAS,CAAC,0BAA0B,EAAE,EACvE;gBACI,MAAM,EAAE,+BAAqB,CAAC,cAAc,CAAC;oBACzC,OAAO,EAAE,aAAa,CAAC,SAAS;iBACnC,CAAC;gBACF,UAAU,EAAE;oBACR,0BAA0B,EAAE,KAAK;oBACjC,gBAAgB,EAAE,GAAG;iBACxB;aACJ,CACJ;YACH,CAAC,CAAC,wBAAc,CAAC,sBAAsB,CACjC,IAAI,EACJ,gBAAgB,EAChB,4BAA4B,aAAa,CAAC,SAAS,CAAC,0BAA0B,EAAE,CACnF,CAAC;QAER,MAAM,OAAO,GAAG,IAAI,qCAA2B,CAAC,IAAI,EAAE,YAAY,EAAE;YAChE,kBAAkB,EAAE,aAAa,CAAC,kBAAkB;YACpD,MAAM,EAAE,+BAAqB,CAAC,cAAc,CAAC;gBACzC,OAAO,EAAE,aAAa,CAAC,SAAS;aACnC,CAAC;YACF,SAAS,EAAE,aAAa,CAAC,SAAS;YAClC,uBAAuB,EAAE,iCAAuB,CAAC,OAAO;YACxD,sBAAsB,EAAE,YAAY,GAAG,GAAG;YAC1C,qBAAqB,EAAE,CAAC,YAAY,CAAC;YACrC,MAAM,EAAE;gBACJ,SAAS,EAAE,sBAAQ,CAAC,IAAI,CAAC,EAAE,CAAC;gBAC5B,eAAe,EAAE,aAAa;aACjC;YACD,0BAA0B,EAAE,qBAAqB;YACjD,kBAAkB,EAAE,IAAI;YACxB,aAAa,EAAE,2BAAa,CAAC,MAAM;YACnC,IAAI,EAAE,OAAO,CAAC,YAAY;YAC1B,aAAa,EAAE;gBACX,uBAAuB,EAAE,IAAI;gBAC7B,wBAAwB,EAAE,KAAK;gBAC/B,yBAAyB,EAAE,IAAI;gBAC/B,GAAG;gBACH,cAAc,EAAE,CAAC,aAAa,CAAC;gBAC/B,UAAU,EAAE;oBACR,UAAU,EAAE,oBAAU,CAAC,gBAAgB;iBAC1C;gBACD,YAAY,EAAE,aAAa,CAAC,cAAc;gBAC1C,cAAc;aACjB;YACD,WAAW,EAAE,qBAAW,CAAC,UAAU,CAAC,MAAM,CAAC;YAC3C,cAAc;SACjB,CAAC,CAAC;QAEH,kEAAkE;QAClE,MAAM,YAAY,GAAG,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,MAAM,CAC7C,CAAC,KAAK,EAA0B,EAAE,CAAC,KAAK,YAAY,uBAAa,CACpE,CAAC;QACF,IAAI,YAAY,CAAC,MAAM,KAAK,CAAC,EAAE;YAC3B,MAAM,IAAI,KAAK,CACX,sDAAsD,CACzD,CAAC;SACL;QACD,YAAY,CAAC,OAAO,CAChB,CAAC,WAAW,EAAE,EAAE,CAAC,OAAO,WAAW,CAAC,aAAa,CACpD,CAAC;QAEF,OAAO,OAAO,CAAC;IACnB,CAAC;;AA3HL,0BA4HC;AA3HiB,sCAA8B,GAAG,YAAY,CAAC;AAC9C,yCAAiC,GAC3C,4BAA4B,CAAC;AACnB,0CAAkC,GAC5C,4BAA4B,CAAC;AAEnB,oBAAY,GAAG,IAAI,CAAC"}
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"intra-stack-configuration.js","sourceRoot":"","sources":["../../../../src/aws/infra/stacks/intra-stack-configuration.ts"],"names":[],"mappings":""}
|
@@ -0,0 +1,12 @@
|
|
1
|
+
import { Stack } from "aws-cdk-lib";
|
2
|
+
import { Construct } from "constructs";
|
3
|
+
import { Vpc } from "aws-cdk-lib/aws-ec2";
|
4
|
+
import { InfraStackConfiguration } from "./intra-stack-configuration";
|
5
|
+
export interface NetworkConfiguration {
|
6
|
+
readonly vpcName: string;
|
7
|
+
readonly cidr: string;
|
8
|
+
}
|
9
|
+
export declare class NetworkStack extends Stack {
|
10
|
+
constructor(scope: Construct, id: string, isc: InfraStackConfiguration, configuration: NetworkConfiguration);
|
11
|
+
createVpc(configuration: NetworkConfiguration): Vpc;
|
12
|
+
}
|
@@ -0,0 +1,36 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.NetworkStack = void 0;
|
4
|
+
const aws_cdk_lib_1 = require("aws-cdk-lib");
|
5
|
+
const aws_ec2_1 = require("aws-cdk-lib/aws-ec2");
|
6
|
+
class NetworkStack extends aws_cdk_lib_1.Stack {
|
7
|
+
constructor(scope, id, isc, configuration) {
|
8
|
+
super(scope, id, {
|
9
|
+
env: isc.env,
|
10
|
+
});
|
11
|
+
this.createVpc(configuration);
|
12
|
+
}
|
13
|
+
createVpc(configuration) {
|
14
|
+
return new aws_ec2_1.Vpc(this, "DigitrafficVPC", {
|
15
|
+
vpcName: configuration.vpcName,
|
16
|
+
availabilityZones: ["eu-west-1a", "eu-west-1b"],
|
17
|
+
enableDnsHostnames: true,
|
18
|
+
enableDnsSupport: true,
|
19
|
+
cidr: configuration.cidr,
|
20
|
+
subnetConfiguration: [
|
21
|
+
{
|
22
|
+
name: "public",
|
23
|
+
cidrMask: 24,
|
24
|
+
subnetType: aws_ec2_1.SubnetType.PUBLIC,
|
25
|
+
},
|
26
|
+
{
|
27
|
+
name: "private",
|
28
|
+
cidrMask: 24,
|
29
|
+
subnetType: aws_ec2_1.SubnetType.PRIVATE_WITH_NAT,
|
30
|
+
},
|
31
|
+
],
|
32
|
+
});
|
33
|
+
}
|
34
|
+
}
|
35
|
+
exports.NetworkStack = NetworkStack;
|
36
|
+
//# sourceMappingURL=network-stack.js.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"network-stack.js","sourceRoot":"","sources":["../../../../src/aws/infra/stacks/network-stack.ts"],"names":[],"mappings":";;;AAAA,6CAAoC;AAEpC,iDAAsD;AAQtD,MAAa,YAAa,SAAQ,mBAAK;IACnC,YACI,KAAgB,EAChB,EAAU,EACV,GAA4B,EAC5B,aAAmC;QAEnC,KAAK,CAAC,KAAK,EAAE,EAAE,EAAE;YACb,GAAG,EAAE,GAAG,CAAC,GAAG;SACf,CAAC,CAAC;QAEH,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;IAClC,CAAC;IAED,SAAS,CAAC,aAAmC;QACzC,OAAO,IAAI,aAAG,CAAC,IAAI,EAAE,gBAAgB,EAAE;YACnC,OAAO,EAAE,aAAa,CAAC,OAAO;YAC9B,iBAAiB,EAAE,CAAC,YAAY,EAAE,YAAY,CAAC;YAC/C,kBAAkB,EAAE,IAAI;YACxB,gBAAgB,EAAE,IAAI;YACtB,IAAI,EAAE,aAAa,CAAC,IAAI;YACxB,mBAAmB,EAAE;gBACjB;oBACI,IAAI,EAAE,QAAQ;oBACd,QAAQ,EAAE,EAAE;oBACZ,UAAU,EAAE,oBAAU,CAAC,MAAM;iBAChC;gBACD;oBACI,IAAI,EAAE,SAAS;oBACf,QAAQ,EAAE,EAAE;oBACZ,UAAU,EAAE,oBAAU,CAAC,gBAAgB;iBAC1C;aACJ;SACJ,CAAC,CAAC;IACP,CAAC;CACJ;AAnCD,oCAmCC"}
|
@@ -1,4 +1,4 @@
|
|
1
|
-
import { IApiKey, RestApi } from
|
1
|
+
import { IApiKey, RestApi } from "aws-cdk-lib/aws-apigateway";
|
2
2
|
/**
|
3
3
|
* Creates an usage plan for a REST API with a single API key
|
4
4
|
* @param api The REST API
|
@@ -11,5 +11,6 @@ export declare function createUsagePlan(api: RestApi, apiKeyId: string, apiKeyNa
|
|
11
11
|
* Creates a default usage plan for a REST API with a single API key
|
12
12
|
* @param api The REST API
|
13
13
|
* @param apiName Name of the api. Will generate key: apiName + ' API Key' and plan: apiName + ' API Usage Plan'
|
14
|
+
* @param value Optional value for the API key
|
14
15
|
*/
|
15
|
-
export declare function createDefaultUsagePlan(api: RestApi, apiName: string): IApiKey;
|
16
|
+
export declare function createDefaultUsagePlan(api: RestApi, apiName: string, value?: string): IApiKey;
|
@@ -24,11 +24,12 @@ exports.createUsagePlan = createUsagePlan;
|
|
24
24
|
* Creates a default usage plan for a REST API with a single API key
|
25
25
|
* @param api The REST API
|
26
26
|
* @param apiName Name of the api. Will generate key: apiName + ' API Key' and plan: apiName + ' API Usage Plan'
|
27
|
+
* @param value Optional value for the API key
|
27
28
|
*/
|
28
|
-
function createDefaultUsagePlan(api, apiName) {
|
29
|
-
const apiKeyName = apiName +
|
30
|
-
const usagePlanName = apiName +
|
31
|
-
const apiKey = api.addApiKey(apiKeyName, { apiKeyName: apiKeyName });
|
29
|
+
function createDefaultUsagePlan(api, apiName, value) {
|
30
|
+
const apiKeyName = apiName + " API Key";
|
31
|
+
const usagePlanName = apiName + " API Usage Plan";
|
32
|
+
const apiKey = api.addApiKey(apiKeyName, { apiKeyName: apiKeyName, value });
|
32
33
|
const plan = api.addUsagePlan(usagePlanName, {
|
33
34
|
name: usagePlanName,
|
34
35
|
});
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"usage-plans.js","sourceRoot":"","sources":["../../../src/aws/infra/usage-plans.ts"],"names":[],"mappings":";;;AAEA;;;;;;GAMG;AACH,SAAgB,eAAe,
|
1
|
+
{"version":3,"file":"usage-plans.js","sourceRoot":"","sources":["../../../src/aws/infra/usage-plans.ts"],"names":[],"mappings":";;;AAEA;;;;;;GAMG;AACH,SAAgB,eAAe,CAC3B,GAAY,EACZ,QAAgB,EAChB,UAAkB;IAElB,MAAM,MAAM,GAAG,GAAG,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;IACvC,MAAM,IAAI,GAAG,GAAG,CAAC,YAAY,CAAC,UAAU,EAAE;QACtC,IAAI,EAAE,UAAU;KACnB,CAAC,CAAC;IACH,IAAI,CAAC,WAAW,CAAC;QACb,KAAK,EAAE,GAAG,CAAC,eAAe;KAC7B,CAAC,CAAC;IACH,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;IAEvB,OAAO,MAAM,CAAC;AAClB,CAAC;AAfD,0CAeC;AAED;;;;;GAKG;AACH,SAAgB,sBAAsB,CAClC,GAAY,EACZ,OAAe,EACf,KAAc;IAEd,MAAM,UAAU,GAAG,OAAO,GAAG,UAAU,CAAC;IACxC,MAAM,aAAa,GAAG,OAAO,GAAG,iBAAiB,CAAC;IAClD,MAAM,MAAM,GAAG,GAAG,CAAC,SAAS,CAAC,UAAU,EAAE,EAAE,UAAU,EAAE,UAAU,EAAE,KAAK,EAAE,CAAC,CAAC;IAC5E,MAAM,IAAI,GAAG,GAAG,CAAC,YAAY,CAAC,aAAa,EAAE;QACzC,IAAI,EAAE,aAAa;KACtB,CAAC,CAAC;IACH,IAAI,CAAC,WAAW,CAAC;QACb,KAAK,EAAE,GAAG,CAAC,eAAe;KAC7B,CAAC,CAAC;IACH,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;IAEvB,OAAO,MAAM,CAAC;AAClB,CAAC;AAjBD,wDAiBC"}
|
package/package.json
CHANGED
@@ -2,6 +2,7 @@ import { DTDatabase, inDatabaseReadonly } from "../../../database/database";
|
|
2
2
|
import { ProxyHolder } from "../../runtime/secrets/proxy-holder";
|
3
3
|
import { RdsHolder } from "../../runtime/secrets/rds-holder";
|
4
4
|
import { getEnvVariable } from "../../../utils/utils";
|
5
|
+
import { Countable } from "../../../database/models";
|
5
6
|
|
6
7
|
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
7
8
|
const synthetics = require("Synthetics");
|
@@ -20,15 +21,7 @@ abstract class DatabaseCheck<T> {
|
|
20
21
|
abstract check(value: T): void;
|
21
22
|
}
|
22
23
|
|
23
|
-
|
24
|
-
// eslint-disable-next-line @typescript-eslint/no-empty-interface
|
25
|
-
interface BaseResponse {}
|
26
|
-
|
27
|
-
interface CountResponse extends BaseResponse {
|
28
|
-
count: number;
|
29
|
-
}
|
30
|
-
|
31
|
-
class CountDatabaseCheck extends DatabaseCheck<CountResponse> {
|
24
|
+
class CountDatabaseCheck extends DatabaseCheck<Countable> {
|
32
25
|
readonly minCount: number | null;
|
33
26
|
readonly maxCount: number | null;
|
34
27
|
|
@@ -40,39 +33,39 @@ class CountDatabaseCheck extends DatabaseCheck<CountResponse> {
|
|
40
33
|
) {
|
41
34
|
super(name, sql);
|
42
35
|
|
36
|
+
if (
|
37
|
+
!sql.toLowerCase().includes("select") ||
|
38
|
+
!sql.toLowerCase().includes("count")
|
39
|
+
) {
|
40
|
+
throw new Error("sql must contain select count(*)");
|
41
|
+
}
|
42
|
+
|
43
43
|
if (minCount == null && maxCount == null) {
|
44
|
-
throw new Error("no max or min given
|
44
|
+
throw new Error("no max or min given");
|
45
45
|
}
|
46
46
|
|
47
47
|
this.minCount = minCount;
|
48
48
|
this.maxCount = maxCount;
|
49
49
|
}
|
50
50
|
|
51
|
-
check(value:
|
52
|
-
if (
|
53
|
-
this.
|
54
|
-
throw new Error("no return value");
|
55
|
-
} else {
|
56
|
-
if ("count" in value) {
|
57
|
-
if (this.minCount && value.count < this.minCount) {
|
58
|
-
this.failed = true;
|
59
|
-
throw new Error(
|
60
|
-
`count was ${value.count}, minimum is ${this.minCount}`
|
61
|
-
);
|
62
|
-
}
|
63
|
-
if (this.maxCount && value.count > this.maxCount) {
|
64
|
-
this.failed = true;
|
65
|
-
throw new Error(
|
66
|
-
`count was ${value.count}, max is ${this.maxCount}`
|
67
|
-
);
|
68
|
-
}
|
69
|
-
} else {
|
51
|
+
check(value: Countable) {
|
52
|
+
if ("count" in value) {
|
53
|
+
if (this.minCount && value.count < this.minCount) {
|
70
54
|
this.failed = true;
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
55
|
+
throw new Error(
|
56
|
+
`count was ${value.count}, minimum is ${this.minCount}`
|
57
|
+
);
|
58
|
+
}
|
59
|
+
if (this.maxCount && value.count > this.maxCount) {
|
60
|
+
this.failed = true;
|
61
|
+
throw new Error(
|
62
|
+
`count was ${value.count}, max is ${this.maxCount}`
|
63
|
+
);
|
75
64
|
}
|
65
|
+
} else {
|
66
|
+
this.failed = true;
|
67
|
+
|
68
|
+
throw new Error("no count available");
|
76
69
|
}
|
77
70
|
}
|
78
71
|
}
|
@@ -84,44 +77,55 @@ const stepConfig = {
|
|
84
77
|
screenshotOnStepFailure: false,
|
85
78
|
};
|
86
79
|
|
87
|
-
|
88
|
-
|
89
|
-
|
80
|
+
/**
|
81
|
+
* Checker for sql that checks the count. Meaning that the
|
82
|
+
* sql must be structured as "select count(*) from <table> where <something>".
|
83
|
+
*/
|
84
|
+
export class DatabaseCountChecker {
|
85
|
+
readonly credentialsFunction: () => Promise<void>;
|
86
|
+
readonly checks: DatabaseCheck<Countable>[] = [];
|
90
87
|
|
91
88
|
private constructor(credentialsFunction: () => Promise<void>) {
|
92
89
|
this.credentialsFunction = credentialsFunction;
|
93
|
-
this.checks = [];
|
94
90
|
|
95
91
|
synthetics.getConfiguration().disableRequestMetrics();
|
96
|
-
|
97
92
|
synthetics.getConfiguration().withFailedCanaryMetric(true);
|
98
93
|
}
|
99
94
|
|
100
95
|
static createForProxy() {
|
101
|
-
return new
|
96
|
+
return new DatabaseCountChecker(() =>
|
102
97
|
new ProxyHolder(getEnvVariable("SECRET_ID")).setCredentials()
|
103
98
|
);
|
104
99
|
}
|
105
100
|
|
106
101
|
static createForRds() {
|
107
|
-
return new
|
102
|
+
return new DatabaseCountChecker(() =>
|
108
103
|
new RdsHolder(getEnvVariable("SECRET_ID")).setCredentials()
|
109
104
|
);
|
110
105
|
}
|
111
106
|
|
112
|
-
|
107
|
+
/**
|
108
|
+
* Expect that the count is 1
|
109
|
+
*/
|
110
|
+
expectOne(name: string, sql: string) {
|
113
111
|
this.checks.push(new CountDatabaseCheck(name, sql, 1, 1));
|
114
112
|
|
115
113
|
return this;
|
116
114
|
}
|
117
115
|
|
118
|
-
|
116
|
+
/**
|
117
|
+
* Expect that the count is 0
|
118
|
+
*/
|
119
|
+
expectZero(name: string, sql: string) {
|
119
120
|
this.checks.push(new CountDatabaseCheck(name, sql, null, 0));
|
120
121
|
|
121
122
|
return this;
|
122
123
|
}
|
123
124
|
|
124
|
-
|
125
|
+
/**
|
126
|
+
* Expect that the count is 1 or more
|
127
|
+
*/
|
128
|
+
expectOneOrMore(name: string, sql: string) {
|
125
129
|
this.checks.push(new CountDatabaseCheck(name, sql, 1, null));
|
126
130
|
|
127
131
|
return this;
|
@@ -137,7 +141,7 @@ export class DatabaseChecker {
|
|
137
141
|
for (const check of this.checks) {
|
138
142
|
console.info("canary checking sql " + check.sql);
|
139
143
|
|
140
|
-
const value = await db.
|
144
|
+
const value = await db.one<Countable>(check.sql);
|
141
145
|
const checkFunction = () => {
|
142
146
|
check.check(value);
|
143
147
|
};
|
@@ -0,0 +1,57 @@
|
|
1
|
+
import { IVpc, Vpc } from "aws-cdk-lib/aws-ec2";
|
2
|
+
import { CfnOutput, Fn, Stack } from "aws-cdk-lib";
|
3
|
+
import { Construct } from "constructs";
|
4
|
+
|
5
|
+
export class OldStackImports {
|
6
|
+
public static AURORAINSTANCE_SG_IMPORT_NAME = "AuroraInstanceSG";
|
7
|
+
public static RDSPROXY_SG_IMPORT_NAME = "RDSProxySG";
|
8
|
+
}
|
9
|
+
|
10
|
+
/**
|
11
|
+
* Import VPC from other stack outputs
|
12
|
+
*/
|
13
|
+
export function importVpc(scope: Construct, environmentName: string): IVpc {
|
14
|
+
const vpcId = importValue(environmentName, "VPCID");
|
15
|
+
const privateSubnetIds = [
|
16
|
+
importValue(environmentName, "digitrafficprivateASubnet"),
|
17
|
+
importValue(environmentName, "digitrafficprivateBSubnet"),
|
18
|
+
];
|
19
|
+
const availabilityZones = ["euw1-az1", "euw1-az2"];
|
20
|
+
|
21
|
+
// VPC reference construction requires vpcId and availability zones
|
22
|
+
// private subnets are used in Lambda configuration
|
23
|
+
return Vpc.fromVpcAttributes(scope, "vpc", {
|
24
|
+
vpcId,
|
25
|
+
privateSubnetIds,
|
26
|
+
availabilityZones,
|
27
|
+
});
|
28
|
+
}
|
29
|
+
|
30
|
+
/**
|
31
|
+
* Import value from other stack output. Stack outputs are named with
|
32
|
+
* digitraffic-${environmentName}-${name} pattern and this function takes care of it
|
33
|
+
*/
|
34
|
+
export function importValue(environmentName: string, name: string): string {
|
35
|
+
return Fn.importValue(outputName(environmentName, name));
|
36
|
+
}
|
37
|
+
|
38
|
+
/**
|
39
|
+
* Export value as stack output. Use same naming pattern as importValue.
|
40
|
+
*/
|
41
|
+
export function exportValue(
|
42
|
+
stack: Stack,
|
43
|
+
environmentName: string,
|
44
|
+
name: string,
|
45
|
+
value: string
|
46
|
+
) {
|
47
|
+
const exportName = outputName(environmentName, name);
|
48
|
+
|
49
|
+
new CfnOutput(stack, exportName, {
|
50
|
+
exportName,
|
51
|
+
value,
|
52
|
+
});
|
53
|
+
}
|
54
|
+
|
55
|
+
export function outputName(environmentName: string, name: string): string {
|
56
|
+
return `digitraffic-${environmentName}-${name}`;
|
57
|
+
}
|
@@ -76,7 +76,7 @@ export function lambdaFunctionProps(
|
|
76
76
|
config?: Partial<FunctionParameters>
|
77
77
|
): FunctionProps {
|
78
78
|
return {
|
79
|
-
runtime: config?.runtime ?? Runtime.
|
79
|
+
runtime: config?.runtime ?? Runtime.NODEJS_16_X,
|
80
80
|
architecture: config?.architecture ?? Architecture.ARM_64,
|
81
81
|
memorySize: config?.memorySize ?? 128,
|
82
82
|
functionName: lambdaName,
|
@@ -115,16 +115,16 @@ export function dbLambdaConfiguration(
|
|
115
115
|
config: FunctionParameters
|
116
116
|
): FunctionProps {
|
117
117
|
return {
|
118
|
-
runtime: props.runtime
|
119
|
-
memorySize: props.memorySize
|
118
|
+
runtime: props.runtime ?? Runtime.NODEJS_16_X,
|
119
|
+
memorySize: props.memorySize ?? config.memorySize ?? 1024,
|
120
120
|
functionName: config.functionName,
|
121
121
|
code: config.code,
|
122
122
|
role: config.role,
|
123
123
|
handler: config.handler,
|
124
124
|
timeout: Duration.seconds(
|
125
|
-
config.timeout
|
125
|
+
config.timeout ?? props.defaultLambdaDurationSeconds ?? 60
|
126
126
|
),
|
127
|
-
environment: config.environment
|
127
|
+
environment: config.environment ?? {
|
128
128
|
DB_USER: props.dbProps?.username ?? "",
|
129
129
|
DB_PASS: props.dbProps?.password ?? "",
|
130
130
|
DB_URI:
|
@@ -138,7 +138,7 @@ export function dbLambdaConfiguration(
|
|
138
138
|
subnets: vpc.privateSubnets,
|
139
139
|
},
|
140
140
|
securityGroups: [lambdaDbSg],
|
141
|
-
reservedConcurrentExecutions: config.reservedConcurrentExecutions
|
141
|
+
reservedConcurrentExecutions: config.reservedConcurrentExecutions ?? 3,
|
142
142
|
};
|
143
143
|
}
|
144
144
|
|
@@ -146,7 +146,7 @@ export function defaultLambdaConfiguration(
|
|
146
146
|
config: FunctionParameters
|
147
147
|
): FunctionProps {
|
148
148
|
const props: FunctionProps = {
|
149
|
-
runtime: Runtime.
|
149
|
+
runtime: Runtime.NODEJS_16_X,
|
150
150
|
memorySize: config.memorySize ?? 128,
|
151
151
|
functionName: config.functionName,
|
152
152
|
handler: config.handler,
|
@@ -191,15 +191,6 @@ export interface FunctionParameters {
|
|
191
191
|
}
|
192
192
|
|
193
193
|
export type MonitoredFunctionParameters = FunctionParameters & {
|
194
|
-
readonly memorySize?: number;
|
195
|
-
readonly timeout?: number;
|
196
|
-
readonly functionName?: string;
|
197
|
-
readonly reservedConcurrentExecutions?: number;
|
198
|
-
readonly role?: Role;
|
199
|
-
readonly runtime?: Runtime;
|
200
|
-
readonly architecture?: Architecture;
|
201
|
-
readonly singleLambda?: boolean;
|
202
|
-
|
203
194
|
readonly durationAlarmProps?: MonitoredFunctionAlarmProps;
|
204
195
|
readonly durationWarningProps?: MonitoredFunctionAlarmProps;
|
205
196
|
readonly errorAlarmProps?: MonitoredFunctionAlarmProps;
|
@@ -78,8 +78,8 @@ export class DigitrafficRestApi extends RestApi {
|
|
78
78
|
return newKeyId;
|
79
79
|
}
|
80
80
|
|
81
|
-
createUsagePlanV2(apiName: string): string {
|
82
|
-
const newKeyId = createDefaultUsagePlan(this, apiName).keyId;
|
81
|
+
createUsagePlanV2(apiName: string, apiKey?: string): string {
|
82
|
+
const newKeyId = createDefaultUsagePlan(this, apiName, apiKey).keyId;
|
83
83
|
|
84
84
|
this.apiKeyIds.push(newKeyId);
|
85
85
|
|
@@ -0,0 +1,88 @@
|
|
1
|
+
import { Duration, RemovalPolicy, Stack } from "aws-cdk-lib";
|
2
|
+
import { Construct } from "constructs";
|
3
|
+
import {
|
4
|
+
PrivateHostedZone,
|
5
|
+
RecordSet,
|
6
|
+
RecordTarget,
|
7
|
+
RecordType,
|
8
|
+
} from "aws-cdk-lib/aws-route53";
|
9
|
+
import { InfraStackConfiguration } from "./intra-stack-configuration";
|
10
|
+
import { importValue, importVpc } from "../import-util";
|
11
|
+
import { DbStack } from "./db-stack";
|
12
|
+
import { DbProxyStack } from "./db-proxy-stack";
|
13
|
+
|
14
|
+
const DEFAULT_RECORD_TTL = Duration.seconds(30);
|
15
|
+
|
16
|
+
/**
|
17
|
+
* Creates a dns local zone and creates records for cluster endpoints and proxy endpoints.
|
18
|
+
*
|
19
|
+
*/
|
20
|
+
export class DbDnsStack extends Stack {
|
21
|
+
constructor(scope: Construct, id: string, isc: InfraStackConfiguration) {
|
22
|
+
super(scope, id, {
|
23
|
+
env: isc.env,
|
24
|
+
});
|
25
|
+
|
26
|
+
this.createDnsRecords(isc);
|
27
|
+
}
|
28
|
+
|
29
|
+
createDnsRecords(isc: InfraStackConfiguration) {
|
30
|
+
const vpc = importVpc(this, isc.environmentName);
|
31
|
+
const zone = new PrivateHostedZone(this, "DNSHostedZone", {
|
32
|
+
zoneName: isc.environmentName + ".local",
|
33
|
+
vpc,
|
34
|
+
});
|
35
|
+
|
36
|
+
zone.applyRemovalPolicy(RemovalPolicy.RETAIN);
|
37
|
+
|
38
|
+
const clusterReaderEndpoint = importValue(
|
39
|
+
isc.environmentName,
|
40
|
+
DbStack.CLUSTER_READ_ENDPOINT_EXPORT_NAME
|
41
|
+
);
|
42
|
+
const clusterWriterEndpoint = importValue(
|
43
|
+
isc.environmentName,
|
44
|
+
DbStack.CLUSTER_WRITE_ENDPOINT_EXPORT_NAME
|
45
|
+
);
|
46
|
+
|
47
|
+
const proxyReaderEndpoint = importValue(
|
48
|
+
isc.environmentName,
|
49
|
+
DbProxyStack.PROXY_READER_EXPORT_NAME
|
50
|
+
);
|
51
|
+
const proxyWriterEndpoint = importValue(
|
52
|
+
isc.environmentName,
|
53
|
+
DbProxyStack.PROXY_WRITER_EXPORT_NAME
|
54
|
+
);
|
55
|
+
|
56
|
+
new RecordSet(this, "ReaderRecord", {
|
57
|
+
recordType: RecordType.CNAME,
|
58
|
+
recordName: `db-ro.${isc.environmentName}.local`,
|
59
|
+
target: RecordTarget.fromValues(clusterReaderEndpoint),
|
60
|
+
ttl: DEFAULT_RECORD_TTL,
|
61
|
+
zone,
|
62
|
+
});
|
63
|
+
|
64
|
+
new RecordSet(this, "WriterRecord", {
|
65
|
+
recordType: RecordType.CNAME,
|
66
|
+
recordName: `db.${isc.environmentName}.local`,
|
67
|
+
target: RecordTarget.fromValues(clusterWriterEndpoint),
|
68
|
+
ttl: DEFAULT_RECORD_TTL,
|
69
|
+
zone,
|
70
|
+
});
|
71
|
+
|
72
|
+
new RecordSet(this, "ProxyReaderRecord", {
|
73
|
+
recordType: RecordType.CNAME,
|
74
|
+
recordName: `proxy-ro.${isc.environmentName}.local`,
|
75
|
+
target: RecordTarget.fromValues(proxyReaderEndpoint),
|
76
|
+
ttl: DEFAULT_RECORD_TTL,
|
77
|
+
zone,
|
78
|
+
});
|
79
|
+
|
80
|
+
new RecordSet(this, "ProxyWriterRecord", {
|
81
|
+
recordType: RecordType.CNAME,
|
82
|
+
recordName: `proxy.${isc.environmentName}.local`,
|
83
|
+
target: RecordTarget.fromValues(proxyWriterEndpoint),
|
84
|
+
ttl: DEFAULT_RECORD_TTL,
|
85
|
+
zone,
|
86
|
+
});
|
87
|
+
}
|
88
|
+
}
|