@digitraffic/common 2023.9.11-1 → 2023.9.14-1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -6,7 +6,7 @@ const aws_cdk_lib_1 = require("aws-cdk-lib");
6
6
  class OldStackImports {
7
7
  }
8
8
  exports.OldStackImports = OldStackImports;
9
- OldStackImports.AURORAINSTANCE_SG_IMPORT_NAME = "AuroraInstanceSG";
9
+ OldStackImports.AURORAINSTANCE_SG_IMPORT_NAME = "AuroraSG";
10
10
  OldStackImports.RDSPROXY_SG_IMPORT_NAME = "RDSProxySG";
11
11
  /**
12
12
  * Import VPC from other stack outputs
@@ -0,0 +1,40 @@
1
+ import { StringParameter } from "aws-cdk-lib/aws-ssm";
2
+ import { Construct } from "constructs/lib/construct";
3
+ declare const PARAMETERS: {
4
+ readonly "topic.alarm": {
5
+ readonly parameterName: "/digitraffic/monitoring/alarm-topic";
6
+ };
7
+ readonly "topic.warning": {
8
+ readonly parameterName: "/digitraffic/monitoring/warning-topic";
9
+ };
10
+ readonly "cluster.reader": {
11
+ readonly id: "ClusterReaderEndpointParameter";
12
+ readonly parameterName: "/digitraffic/db/reader-endpoint";
13
+ readonly description: "Cluster reader endpoint";
14
+ };
15
+ readonly "cluster.writer": {
16
+ readonly id: "ClusterWriterEndpointParameter";
17
+ readonly parameterName: "/digitraffic/db/writer-endpoint";
18
+ readonly description: "Cluster writer endpoint";
19
+ };
20
+ readonly "cluster.identifier": {
21
+ readonly id: "ClusterIdentifierParameter";
22
+ readonly parameterName: "/digitraffic/db/identifier";
23
+ readonly description: "Cluster identifier";
24
+ };
25
+ readonly "proxy.reader": {
26
+ readonly id: "ProxyReaderEndpointParameter";
27
+ readonly parameterName: "/digitraffic/db/proxy-reader-endpoint";
28
+ readonly description: "Proxy reader endpoint";
29
+ };
30
+ readonly "proxy.writer": {
31
+ readonly id: "ProxyWriterEndpointParameter";
32
+ readonly parameterName: "/digitraffic/db/proxy-writer-endpoint";
33
+ readonly description: "Proxy writer endpoint";
34
+ };
35
+ };
36
+ export type ReadParameterType = keyof typeof PARAMETERS;
37
+ export type WriteParameterType = Exclude<Exclude<ReadParameterType, "topic.alarm">, "topic.warning">;
38
+ export declare function getParameterValue(scope: Construct, parameterType: ReadParameterType): string;
39
+ export declare function createParameter(scope: Construct, parameterType: WriteParameterType, stringValue: string): StringParameter;
40
+ export {};
@@ -0,0 +1,55 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.createParameter = exports.getParameterValue = void 0;
4
+ const aws_ssm_1 = require("aws-cdk-lib/aws-ssm");
5
+ const SSM_ROOT = "/digitraffic";
6
+ const MONITORING_ROOT = "/monitoring";
7
+ const DB_ROOT = "/db";
8
+ const PARAMETERS = {
9
+ "topic.alarm": {
10
+ parameterName: `${SSM_ROOT}${MONITORING_ROOT}/alarm-topic`,
11
+ },
12
+ "topic.warning": {
13
+ parameterName: `${SSM_ROOT}${MONITORING_ROOT}/warning-topic`,
14
+ },
15
+ "cluster.reader": {
16
+ id: "ClusterReaderEndpointParameter",
17
+ parameterName: `${SSM_ROOT}${DB_ROOT}/reader-endpoint`,
18
+ description: "Cluster reader endpoint",
19
+ },
20
+ "cluster.writer": {
21
+ id: "ClusterWriterEndpointParameter",
22
+ parameterName: `${SSM_ROOT}${DB_ROOT}/writer-endpoint`,
23
+ description: "Cluster writer endpoint",
24
+ },
25
+ "cluster.identifier": {
26
+ id: "ClusterIdentifierParameter",
27
+ parameterName: `${SSM_ROOT}${DB_ROOT}/identifier`,
28
+ description: "Cluster identifier",
29
+ },
30
+ "proxy.reader": {
31
+ id: "ProxyReaderEndpointParameter",
32
+ parameterName: `${SSM_ROOT}${DB_ROOT}/proxy-reader-endpoint`,
33
+ description: "Proxy reader endpoint",
34
+ },
35
+ "proxy.writer": {
36
+ id: "ProxyWriterEndpointParameter",
37
+ parameterName: `${SSM_ROOT}${DB_ROOT}/proxy-writer-endpoint`,
38
+ description: "Proxy writer endpoint",
39
+ },
40
+ };
41
+ function getParameterValue(scope, parameterType) {
42
+ const parameterName = PARAMETERS[parameterType].parameterName;
43
+ return aws_ssm_1.StringParameter.valueForStringParameter(scope, parameterName);
44
+ }
45
+ exports.getParameterValue = getParameterValue;
46
+ function createParameter(scope, parameterType, stringValue) {
47
+ const { id, parameterName, description } = PARAMETERS[parameterType];
48
+ return new aws_ssm_1.StringParameter(scope, id, {
49
+ parameterName,
50
+ description,
51
+ stringValue,
52
+ });
53
+ }
54
+ exports.createParameter = createParameter;
55
+ //# sourceMappingURL=parameters.js.map
@@ -1,5 +1,5 @@
1
1
  import { Stack } from "aws-cdk-lib";
2
- import { Construct } from "constructs";
2
+ import { Construct } from "constructs/lib/construct";
3
3
  import { InfraStackConfiguration } from "./intra-stack-configuration";
4
4
  /**
5
5
  * Creates a dns local zone and creates records for cluster endpoints and proxy endpoints.
@@ -4,8 +4,7 @@ exports.DbDnsStack = void 0;
4
4
  const aws_cdk_lib_1 = require("aws-cdk-lib");
5
5
  const aws_route53_1 = require("aws-cdk-lib/aws-route53");
6
6
  const import_util_1 = require("../import-util");
7
- const db_stack_1 = require("./db-stack");
8
- const db_proxy_stack_1 = require("./db-proxy-stack");
7
+ const parameters_1 = require("../stack/parameters");
9
8
  const DEFAULT_RECORD_TTL = aws_cdk_lib_1.Duration.seconds(30);
10
9
  /**
11
10
  * Creates a dns local zone and creates records for cluster endpoints and proxy endpoints.
@@ -27,10 +26,10 @@ class DbDnsStack extends aws_cdk_lib_1.Stack {
27
26
  vpc,
28
27
  });
29
28
  zone.applyRemovalPolicy(aws_cdk_lib_1.RemovalPolicy.RETAIN);
30
- const clusterReaderEndpoint = (0, import_util_1.importValue)(isc.environmentName, db_stack_1.DbStack.CLUSTER_READ_ENDPOINT_EXPORT_NAME);
31
- const clusterWriterEndpoint = (0, import_util_1.importValue)(isc.environmentName, db_stack_1.DbStack.CLUSTER_WRITE_ENDPOINT_EXPORT_NAME);
32
- const proxyReaderEndpoint = (0, import_util_1.importValue)(isc.environmentName, db_proxy_stack_1.DbProxyStack.PROXY_READER_EXPORT_NAME);
33
- const proxyWriterEndpoint = (0, import_util_1.importValue)(isc.environmentName, db_proxy_stack_1.DbProxyStack.PROXY_WRITER_EXPORT_NAME);
29
+ const clusterReaderEndpoint = (0, parameters_1.getParameterValue)(this, "cluster.reader");
30
+ const clusterWriterEndpoint = (0, parameters_1.getParameterValue)(this, "cluster.writer");
31
+ const proxyReaderEndpoint = (0, parameters_1.getParameterValue)(this, "proxy.reader");
32
+ const proxyWriterEndpoint = (0, parameters_1.getParameterValue)(this, "proxy.writer");
34
33
  new aws_route53_1.RecordSet(this, "ReaderRecord", {
35
34
  recordType: aws_route53_1.RecordType.CNAME,
36
35
  recordName: `db-ro.${isc.environmentName}.local`,
@@ -1,19 +1,24 @@
1
- import { Stack } from "aws-cdk-lib";
2
- import { Construct } from "constructs";
3
1
  import { CfnDBProxyEndpoint, DatabaseProxy } from "aws-cdk-lib/aws-rds";
4
2
  import { ISecret } from "aws-cdk-lib/aws-secretsmanager";
5
3
  import { IVpc } from "aws-cdk-lib/aws-ec2";
6
4
  import { InfraStackConfiguration } from "./intra-stack-configuration";
7
- import { DbConfiguration } from "./db-stack";
5
+ import { Stack } from "aws-cdk-lib/core";
6
+ import { Construct } from "constructs/lib/construct";
7
+ export interface ProxyConfiguration {
8
+ readonly secretArn: string;
9
+ readonly name?: string;
10
+ readonly securityGroupId: string;
11
+ readonly clusterIdentifier: string;
12
+ }
8
13
  /**
9
14
  * A stack that creates a Database proxy.
10
15
  */
11
16
  export declare class DbProxyStack extends Stack {
17
+ readonly isc: InfraStackConfiguration;
12
18
  static PROXY_READER_EXPORT_NAME: string;
13
19
  static PROXY_WRITER_EXPORT_NAME: string;
14
- readonly isc: InfraStackConfiguration;
15
- constructor(scope: Construct, id: string, isc: InfraStackConfiguration, configuration: DbConfiguration);
16
- createProxy(vpc: IVpc, secret: ISecret, configuration: DbConfiguration): DatabaseProxy;
20
+ constructor(scope: Construct, id: string, isc: InfraStackConfiguration, configuration: ProxyConfiguration);
21
+ setOutputs(proxy: DatabaseProxy): void;
22
+ createProxy(vpc: IVpc, secret: ISecret, configuration: ProxyConfiguration): DatabaseProxy;
17
23
  createProxyEndpoints(vpc: IVpc, proxy: DatabaseProxy, securityGroupId: string): CfnDBProxyEndpoint;
18
- setOutputs(configuration: DbConfiguration, proxy: DatabaseProxy, proxyEndpoint: CfnDBProxyEndpoint): void;
19
24
  }
@@ -1,34 +1,45 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.DbProxyStack = void 0;
4
- const aws_cdk_lib_1 = require("aws-cdk-lib");
5
4
  const aws_rds_1 = require("aws-cdk-lib/aws-rds");
6
5
  const aws_secretsmanager_1 = require("aws-cdk-lib/aws-secretsmanager");
7
6
  const aws_ec2_1 = require("aws-cdk-lib/aws-ec2");
8
7
  const db_stack_1 = require("./db-stack");
9
8
  const import_util_1 = require("../import-util");
9
+ const parameters_1 = require("../stack/parameters");
10
+ const core_1 = require("aws-cdk-lib/core");
10
11
  /**
11
12
  * A stack that creates a Database proxy.
12
13
  */
13
- class DbProxyStack extends aws_cdk_lib_1.Stack {
14
+ class DbProxyStack extends core_1.Stack {
14
15
  constructor(scope, id, isc, configuration) {
15
16
  super(scope, id, {
16
17
  env: isc.env,
17
18
  });
18
19
  this.isc = isc;
20
+ if (configuration.clusterIdentifier === "") {
21
+ throw new Error("Empty cluster identifier!");
22
+ }
19
23
  const vpc = (0, import_util_1.importVpc)(this, isc.environmentName);
20
24
  const secret = aws_secretsmanager_1.Secret.fromSecretAttributes(this, "proxy-secret", {
21
25
  secretCompleteArn: configuration.secretArn,
22
26
  });
23
27
  const proxy = this.createProxy(vpc, secret, configuration);
24
- const readerEndpoint = this.createProxyEndpoints(vpc, proxy, configuration.proxy.securityGroupId);
25
- this.setOutputs(configuration, proxy, readerEndpoint);
28
+ const readerEndpoint = this.createProxyEndpoints(vpc, proxy, configuration.securityGroupId);
29
+ (0, parameters_1.createParameter)(this, "proxy.reader", readerEndpoint.attrEndpoint);
30
+ (0, parameters_1.createParameter)(this, "proxy.writer", proxy.endpoint);
31
+ this.setOutputs(proxy);
32
+ }
33
+ setOutputs(proxy) {
34
+ // if only one instance, then there is no reader-endpoint
35
+ (0, import_util_1.exportValue)(this, this.isc.environmentName, DbProxyStack.PROXY_READER_EXPORT_NAME, proxy.endpoint);
36
+ (0, import_util_1.exportValue)(this, this.isc.environmentName, DbProxyStack.PROXY_WRITER_EXPORT_NAME, proxy.endpoint);
26
37
  }
27
38
  createProxy(vpc, secret, configuration) {
28
39
  const proxyId = `${this.isc.environmentName}-proxy`;
29
- const securityGroup = aws_ec2_1.SecurityGroup.fromSecurityGroupId(this, "securitygroup", configuration.proxy.securityGroupId);
40
+ const securityGroup = aws_ec2_1.SecurityGroup.fromSecurityGroupId(this, "securitygroup", configuration.securityGroupId);
30
41
  const cluster = aws_rds_1.DatabaseCluster.fromDatabaseClusterAttributes(this, "db-cluster", {
31
- clusterIdentifier: (0, import_util_1.importValue)(this.isc.environmentName, db_stack_1.DbStack.CLUSTER_IDENTIFIER_EXPORT_NAME),
42
+ clusterIdentifier: configuration.clusterIdentifier,
32
43
  engine: aws_rds_1.DatabaseClusterEngine.AURORA_POSTGRESQL,
33
44
  port: db_stack_1.DbStack.CLUSTER_PORT,
34
45
  });
@@ -38,13 +49,13 @@ class DbProxyStack extends aws_cdk_lib_1.Stack {
38
49
  /* nothing */
39
50
  };
40
51
  return new aws_rds_1.DatabaseProxy(this, proxyId, {
41
- dbProxyName: configuration.proxy.name ?? "AuroraProxy",
52
+ dbProxyName: configuration.name ?? "AuroraProxy",
42
53
  securityGroups: [securityGroup],
43
54
  proxyTarget: aws_rds_1.ProxyTarget.fromCluster(cluster),
44
- idleClientTimeout: aws_cdk_lib_1.Duration.seconds(1800),
55
+ idleClientTimeout: core_1.Duration.seconds(1800),
45
56
  maxConnectionsPercent: 50,
46
57
  maxIdleConnectionsPercent: 25,
47
- borrowTimeout: aws_cdk_lib_1.Duration.seconds(120),
58
+ borrowTimeout: core_1.Duration.seconds(120),
48
59
  requireTLS: false,
49
60
  secrets: [secret],
50
61
  vpc: vpc,
@@ -59,14 +70,6 @@ class DbProxyStack extends aws_cdk_lib_1.Stack {
59
70
  targetRole: "READ_ONLY",
60
71
  });
61
72
  }
62
- setOutputs(configuration, proxy, proxyEndpoint) {
63
- const readerEndpoint = configuration.instances > 1
64
- ? proxyEndpoint.attrEndpoint
65
- : proxy.endpoint;
66
- // if only one instance, then there is no reader-endpoint
67
- (0, import_util_1.exportValue)(this, this.isc.environmentName, DbProxyStack.PROXY_READER_EXPORT_NAME, readerEndpoint);
68
- (0, import_util_1.exportValue)(this, this.isc.environmentName, DbProxyStack.PROXY_WRITER_EXPORT_NAME, proxy.endpoint);
69
- }
70
73
  }
71
74
  exports.DbProxyStack = DbProxyStack;
72
75
  DbProxyStack.PROXY_READER_EXPORT_NAME = "db-reader-endpoint";
@@ -1,26 +1,32 @@
1
- import { Stack } from "aws-cdk-lib";
2
1
  import { InstanceType, IVpc } from "aws-cdk-lib/aws-ec2";
3
2
  import { ISecurityGroup } from "aws-cdk-lib/aws-ec2/lib/security-group";
4
3
  import { AuroraPostgresEngineVersion, DatabaseCluster, DatabaseClusterProps, IParameterGroup } from "aws-cdk-lib/aws-rds";
5
- import { Construct } from "constructs";
4
+ import { Construct } from "constructs/lib/construct";
6
5
  import { InfraStackConfiguration } from "./intra-stack-configuration";
6
+ import { Stack } from "aws-cdk-lib/core";
7
7
  export interface DbConfiguration {
8
+ readonly cluster?: ClusterConfiguration;
9
+ readonly clusterImport?: ClusterImportConfiguration;
10
+ readonly customParameterGroups: AuroraPostgresEngineVersion[];
11
+ readonly workmem?: number;
8
12
  /** superuser username and password are fetched from this secret, using keys
9
13
  * db.superuser and db.superuser.password
10
14
  */
11
15
  readonly secretArn: string;
12
- readonly dbVersion: AuroraPostgresEngineVersion;
16
+ /** If this is not specified, import default vpc */
17
+ readonly vpc?: IVpc;
18
+ }
19
+ export interface ClusterConfiguration {
20
+ readonly securityGroupId: string;
13
21
  readonly dbInstanceType: InstanceType;
14
22
  readonly snapshotIdentifier?: string;
15
23
  readonly instances: number;
16
- readonly customParameterGroup: boolean;
17
- readonly securityGroupId: string;
18
- /** If this is not specified, import default vpc */
19
- readonly vpc?: IVpc;
20
- readonly proxy: {
21
- readonly name?: string;
22
- readonly securityGroupId: string;
23
- };
24
+ readonly dbVersion: AuroraPostgresEngineVersion;
25
+ readonly storageEncrypted?: boolean;
26
+ }
27
+ export interface ClusterImportConfiguration {
28
+ readonly clusterReadEndpoint: string;
29
+ readonly clusterWriteEndpoint: string;
24
30
  }
25
31
  /**
26
32
  * Stack that creates DatabaseCluster.
@@ -28,20 +34,17 @@ export interface DbConfiguration {
28
34
  * Please not, that created Cluster has RETAIL removalPolicy, so if you want to delete the stack,
29
35
  * you must first deploy without parameter group, then delete stack and manually delete cluster.
30
36
  *
31
- * How to upgrade major version?
32
- * 0. Set correct SG for db-stack and db-proxy-stack(this step will be removed in the future)
33
- * 1. Update db-stack WITHOUT parameter group
34
- * 2. Upgrade extensions by hand
35
- * 3. Upgrade database from the AWS console
36
- * 4. Update db-stack with the upgraded version and custom parameter group
37
+ * You should deploy once with cluster and then without. This way you can create the cluster with this
38
+ * stack, but cluster is not part of the stack after that.
37
39
  */
38
40
  export declare class DbStack extends Stack {
41
+ static CLUSTER_PORT: number;
39
42
  static CLUSTER_IDENTIFIER_EXPORT_NAME: string;
40
43
  static CLUSTER_READ_ENDPOINT_EXPORT_NAME: string;
41
44
  static CLUSTER_WRITE_ENDPOINT_EXPORT_NAME: string;
42
- static CLUSTER_PORT: number;
45
+ clusterIdentifier: string;
43
46
  constructor(scope: Construct, id: string, isc: InfraStackConfiguration, configuration: DbConfiguration);
44
- createParamaterGroup(configuration: DbConfiguration): IParameterGroup;
45
- createClusterParameters(configuration: DbConfiguration, instanceName: string, vpc: IVpc, securityGroup: ISecurityGroup, parameterGroup: IParameterGroup): DatabaseClusterProps;
46
- createAuroraCluster(isc: InfraStackConfiguration, configuration: DbConfiguration): DatabaseCluster;
47
+ createParameterGroups(customVersions: AuroraPostgresEngineVersion[], workmem: number): IParameterGroup[];
48
+ createClusterParameters(secretArn: string, clusterConfiguration: ClusterConfiguration, instanceName: string, vpc: IVpc, securityGroup: ISecurityGroup, parameterGroup: IParameterGroup): DatabaseClusterProps;
49
+ createAuroraCluster(isc: InfraStackConfiguration, configuration: DbConfiguration, clusterConfiguration: ClusterConfiguration, parameterGroups: IParameterGroup[]): DatabaseCluster;
47
50
  }
@@ -1,65 +1,83 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.DbStack = void 0;
4
- const aws_cdk_lib_1 = require("aws-cdk-lib");
5
4
  const aws_ec2_1 = require("aws-cdk-lib/aws-ec2");
6
5
  const aws_rds_1 = require("aws-cdk-lib/aws-rds");
7
6
  const aws_secretsmanager_1 = require("aws-cdk-lib/aws-secretsmanager");
8
7
  const import_util_1 = require("../import-util");
8
+ const core_1 = require("aws-cdk-lib/core");
9
+ const parameters_1 = require("../stack/parameters");
9
10
  /**
10
11
  * Stack that creates DatabaseCluster.
11
12
  *
12
13
  * Please not, that created Cluster has RETAIL removalPolicy, so if you want to delete the stack,
13
14
  * you must first deploy without parameter group, then delete stack and manually delete cluster.
14
15
  *
15
- * How to upgrade major version?
16
- * 0. Set correct SG for db-stack and db-proxy-stack(this step will be removed in the future)
17
- * 1. Update db-stack WITHOUT parameter group
18
- * 2. Upgrade extensions by hand
19
- * 3. Upgrade database from the AWS console
20
- * 4. Update db-stack with the upgraded version and custom parameter group
16
+ * You should deploy once with cluster and then without. This way you can create the cluster with this
17
+ * stack, but cluster is not part of the stack after that.
21
18
  */
22
- class DbStack extends aws_cdk_lib_1.Stack {
19
+ class DbStack extends core_1.Stack {
23
20
  constructor(scope, id, isc, configuration) {
24
21
  super(scope, id, {
25
22
  env: isc.env,
26
23
  });
27
- const cluster = this.createAuroraCluster(isc, configuration);
28
- (0, import_util_1.exportValue)(this, isc.environmentName, DbStack.CLUSTER_IDENTIFIER_EXPORT_NAME, cluster.clusterIdentifier);
29
- (0, import_util_1.exportValue)(this, isc.environmentName, DbStack.CLUSTER_WRITE_ENDPOINT_EXPORT_NAME, cluster.clusterEndpoint.hostname);
30
- (0, import_util_1.exportValue)(this, isc.environmentName, DbStack.CLUSTER_READ_ENDPOINT_EXPORT_NAME, cluster.clusterReadEndpoint.hostname);
24
+ this.clusterIdentifier = "";
25
+ const parameterGroups = this.createParameterGroups(configuration.customParameterGroups, configuration.workmem ?? 524288);
26
+ if ((configuration.cluster && configuration.clusterImport) ||
27
+ (!configuration.cluster && !configuration.clusterImport)) {
28
+ throw new Error("Configure either cluster or clusterImport");
29
+ }
30
+ // create cluster if this is wanted, should do it only once
31
+ if (configuration.cluster) {
32
+ const cluster = this.createAuroraCluster(isc, configuration, configuration.cluster, parameterGroups);
33
+ (0, import_util_1.exportValue)(this, isc.environmentName, DbStack.CLUSTER_IDENTIFIER_EXPORT_NAME, cluster.clusterIdentifier);
34
+ (0, import_util_1.exportValue)(this, isc.environmentName, DbStack.CLUSTER_WRITE_ENDPOINT_EXPORT_NAME, cluster.clusterEndpoint.hostname);
35
+ (0, import_util_1.exportValue)(this, isc.environmentName, DbStack.CLUSTER_READ_ENDPOINT_EXPORT_NAME, cluster.clusterReadEndpoint.hostname);
36
+ (0, parameters_1.createParameter)(this, "cluster.reader", cluster.clusterReadEndpoint.hostname);
37
+ (0, parameters_1.createParameter)(this, "cluster.writer", cluster.clusterEndpoint.hostname);
38
+ (0, parameters_1.createParameter)(this, "cluster.identifier", cluster.clusterIdentifier);
39
+ this.clusterIdentifier = cluster.clusterIdentifier;
40
+ }
41
+ if (configuration.clusterImport) {
42
+ (0, parameters_1.createParameter)(this, "cluster.reader", configuration.clusterImport.clusterReadEndpoint);
43
+ (0, parameters_1.createParameter)(this, "cluster.writer", configuration.clusterImport.clusterWriteEndpoint);
44
+ }
31
45
  }
32
- createParamaterGroup(configuration) {
33
- return configuration.customParameterGroup
34
- ? new aws_rds_1.ParameterGroup(this, `parameter-group-${configuration.dbVersion.auroraPostgresMajorVersion}`, {
46
+ createParameterGroups(customVersions, workmem) {
47
+ return customVersions.map((version) => {
48
+ const pg = new aws_rds_1.ParameterGroup(this, `parameter-group-${version.auroraPostgresMajorVersion}`, {
35
49
  engine: aws_rds_1.DatabaseClusterEngine.auroraPostgres({
36
- version: configuration.dbVersion,
50
+ version,
37
51
  }),
38
52
  parameters: {
39
53
  "pg_stat_statements.track": "ALL",
40
54
  random_page_cost: "1",
41
- work_mem: "524288", // 512 MiB
55
+ work_mem: workmem.toString(),
42
56
  },
43
- })
44
- : aws_rds_1.ParameterGroup.fromParameterGroupName(this, "ParameterGroup", `default.aurora-postgresql${configuration.dbVersion.auroraPostgresMajorVersion}`);
57
+ });
58
+ // create both cluster parameter group and instance parameter group
59
+ pg.bindToCluster({});
60
+ pg.bindToInstance({});
61
+ return pg;
62
+ });
45
63
  }
46
- createClusterParameters(configuration, instanceName, vpc, securityGroup, parameterGroup) {
47
- const secret = aws_secretsmanager_1.Secret.fromSecretCompleteArn(this, "DBSecret", configuration.secretArn);
64
+ createClusterParameters(secretArn, clusterConfiguration, instanceName, vpc, securityGroup, parameterGroup) {
65
+ const secret = aws_secretsmanager_1.Secret.fromSecretCompleteArn(this, "DBSecret", secretArn);
48
66
  return {
49
67
  engine: aws_rds_1.DatabaseClusterEngine.auroraPostgres({
50
- version: configuration.dbVersion,
68
+ version: clusterConfiguration.dbVersion,
51
69
  }),
52
- instances: configuration.instances,
70
+ instances: clusterConfiguration.instances,
53
71
  instanceUpdateBehaviour: aws_rds_1.InstanceUpdateBehaviour.ROLLING,
54
72
  instanceIdentifierBase: instanceName + "-",
55
73
  cloudwatchLogsExports: ["postgresql"],
56
74
  backup: {
57
- retention: aws_cdk_lib_1.Duration.days(35),
75
+ retention: core_1.Duration.days(35),
58
76
  preferredWindow: "01:00-02:00",
59
77
  },
60
78
  preferredMaintenanceWindow: "mon:03:00-mon:04:00",
61
79
  deletionProtection: true,
62
- removalPolicy: aws_cdk_lib_1.RemovalPolicy.RETAIN,
80
+ removalPolicy: core_1.RemovalPolicy.RETAIN,
63
81
  port: DbStack.CLUSTER_PORT,
64
82
  instanceProps: {
65
83
  autoMinorVersionUpgrade: true,
@@ -70,28 +88,29 @@ class DbStack extends aws_cdk_lib_1.Stack {
70
88
  vpcSubnets: {
71
89
  subnetType: aws_ec2_1.SubnetType.PRIVATE_WITH_EGRESS,
72
90
  },
73
- instanceType: configuration.dbInstanceType,
91
+ instanceType: clusterConfiguration.dbInstanceType,
74
92
  parameterGroup,
75
93
  },
76
94
  credentials: aws_rds_1.Credentials.fromPassword(secret.secretValueFromJson("db.superuser").unsafeUnwrap(), secret.secretValueFromJson("db.superuser.password")),
77
95
  parameterGroup,
78
- storageEncrypted: true,
79
- monitoringInterval: aws_cdk_lib_1.Duration.seconds(30),
96
+ // storageEncrypted: clusterConfiguration.storageEncrypted ?? true,
97
+ monitoringInterval: core_1.Duration.seconds(30),
80
98
  };
81
99
  }
82
- createAuroraCluster(isc, configuration) {
100
+ createAuroraCluster(isc, configuration, clusterConfiguration, parameterGroups) {
83
101
  const instanceName = isc.environmentName + "-db";
84
- const securityGroup = aws_ec2_1.SecurityGroup.fromSecurityGroupId(this, "securitygroup", configuration.securityGroupId);
85
- const parameterGroup = this.createParamaterGroup(configuration);
102
+ const securityGroup = aws_ec2_1.SecurityGroup.fromSecurityGroupId(this, "securitygroup", clusterConfiguration.securityGroupId);
86
103
  const vpc = configuration.vpc
87
104
  ? configuration.vpc
88
105
  : (0, import_util_1.importVpc)(this, isc.environmentName);
89
- const parameters = this.createClusterParameters(configuration, instanceName, vpc, securityGroup, parameterGroup);
106
+ const parameters = this.createClusterParameters(configuration.secretArn, clusterConfiguration, instanceName, vpc, securityGroup, parameterGroups[0]);
90
107
  // create cluster from the snapshot or from the scratch
91
- const cluster = configuration.snapshotIdentifier
108
+ const cluster = clusterConfiguration.snapshotIdentifier
92
109
  ? new aws_rds_1.DatabaseClusterFromSnapshot(this, instanceName, {
93
110
  ...parameters,
94
- ...{ snapshotIdentifier: configuration.snapshotIdentifier },
111
+ ...{
112
+ snapshotIdentifier: clusterConfiguration.snapshotIdentifier,
113
+ },
95
114
  })
96
115
  : new aws_rds_1.DatabaseCluster(this, instanceName, parameters);
97
116
  // this workaround should prevent stack failing on version upgrade
@@ -100,13 +119,12 @@ class DbStack extends aws_cdk_lib_1.Stack {
100
119
  throw new Error("Couldn't pull CfnDBInstances from the L1 constructs!");
101
120
  }
102
121
  cfnInstances.forEach((cfnInstance) => delete cfnInstance.engineVersion);
103
- cluster.node.addDependency(parameterGroup);
104
122
  return cluster;
105
123
  }
106
124
  }
107
125
  exports.DbStack = DbStack;
126
+ DbStack.CLUSTER_PORT = 5432;
108
127
  DbStack.CLUSTER_IDENTIFIER_EXPORT_NAME = "db-cluster";
109
128
  DbStack.CLUSTER_READ_ENDPOINT_EXPORT_NAME = "db-cluster-reader-endpoint";
110
129
  DbStack.CLUSTER_WRITE_ENDPOINT_EXPORT_NAME = "db-cluster-writer-endpoint";
111
- DbStack.CLUSTER_PORT = 5432;
112
130
  //# sourceMappingURL=db-stack.js.map
@@ -1,4 +1,4 @@
1
- import { Environment } from "aws-cdk-lib";
1
+ import { Environment } from "aws-cdk-lib/core";
2
2
  export interface InfraStackConfiguration {
3
3
  readonly env: Environment;
4
4
  readonly environmentName: string;
@@ -1,7 +1,7 @@
1
- import { Stack } from "aws-cdk-lib";
2
- import { Construct } from "constructs";
3
1
  import { IVpc, Vpc } from "aws-cdk-lib/aws-ec2";
4
2
  import { InfraStackConfiguration } from "./intra-stack-configuration";
3
+ import { Stack } from "aws-cdk-lib/core";
4
+ import { Construct } from "constructs/lib/construct";
5
5
  export interface NetworkConfiguration {
6
6
  readonly vpcName: string;
7
7
  readonly cidr: string;
@@ -1,10 +1,10 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.NetworkStack = void 0;
4
- const aws_cdk_lib_1 = require("aws-cdk-lib");
5
4
  const aws_ec2_1 = require("aws-cdk-lib/aws-ec2");
6
5
  const import_util_1 = require("../import-util");
7
- class NetworkStack extends aws_cdk_lib_1.Stack {
6
+ const core_1 = require("aws-cdk-lib/core");
7
+ class NetworkStack extends core_1.Stack {
8
8
  constructor(scope, id, isc, configuration) {
9
9
  super(scope, id, {
10
10
  env: isc.env,
@@ -19,7 +19,7 @@ class NetworkStack extends aws_cdk_lib_1.Stack {
19
19
  createVpc(configuration) {
20
20
  return new aws_ec2_1.Vpc(this, "DigitrafficVPC", {
21
21
  vpcName: configuration.vpcName,
22
- availabilityZones: aws_cdk_lib_1.Stack.of(this)
22
+ availabilityZones: core_1.Stack.of(this)
23
23
  .availabilityZones.sort()
24
24
  .slice(0, 2),
25
25
  enableDnsHostnames: true,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@digitraffic/common",
3
- "version": "2023.9.11-1",
3
+ "version": "2023.9.14-1",
4
4
  "description": "",
5
5
  "repository": {
6
6
  "type": "git",
@@ -3,7 +3,7 @@ import { CfnOutput, Fn, Stack } from "aws-cdk-lib";
3
3
  import { Construct } from "constructs";
4
4
 
5
5
  export class OldStackImports {
6
- public static AURORAINSTANCE_SG_IMPORT_NAME = "AuroraInstanceSG";
6
+ public static AURORAINSTANCE_SG_IMPORT_NAME = "AuroraSG";
7
7
  public static RDSPROXY_SG_IMPORT_NAME = "RDSProxySG";
8
8
  }
9
9
 
@@ -0,0 +1,74 @@
1
+ import { StringParameter } from "aws-cdk-lib/aws-ssm";
2
+ import { Construct } from "constructs/lib/construct";
3
+
4
+ const SSM_ROOT = "/digitraffic" as const;
5
+ const MONITORING_ROOT = "/monitoring" as const;
6
+ const DB_ROOT = "/db" as const;
7
+
8
+ interface Parameter {
9
+ readonly id?: string;
10
+ readonly parameterName: string;
11
+ readonly description?: string;
12
+ }
13
+
14
+ const PARAMETERS = {
15
+ "topic.alarm": {
16
+ parameterName: `${SSM_ROOT}${MONITORING_ROOT}/alarm-topic`,
17
+ },
18
+ "topic.warning": {
19
+ parameterName: `${SSM_ROOT}${MONITORING_ROOT}/warning-topic`,
20
+ },
21
+ "cluster.reader": {
22
+ id: "ClusterReaderEndpointParameter",
23
+ parameterName: `${SSM_ROOT}${DB_ROOT}/reader-endpoint`,
24
+ description: "Cluster reader endpoint",
25
+ },
26
+ "cluster.writer": {
27
+ id: "ClusterWriterEndpointParameter",
28
+ parameterName: `${SSM_ROOT}${DB_ROOT}/writer-endpoint`,
29
+ description: "Cluster writer endpoint",
30
+ },
31
+ "cluster.identifier": {
32
+ id: "ClusterIdentifierParameter",
33
+ parameterName: `${SSM_ROOT}${DB_ROOT}/identifier`,
34
+ description: "Cluster identifier",
35
+ },
36
+ "proxy.reader": {
37
+ id: "ProxyReaderEndpointParameter",
38
+ parameterName: `${SSM_ROOT}${DB_ROOT}/proxy-reader-endpoint`,
39
+ description: "Proxy reader endpoint",
40
+ },
41
+ "proxy.writer": {
42
+ id: "ProxyWriterEndpointParameter",
43
+ parameterName: `${SSM_ROOT}${DB_ROOT}/proxy-writer-endpoint`,
44
+ description: "Proxy writer endpoint",
45
+ },
46
+ } as const satisfies Record<string, Parameter>;
47
+
48
+ export type ReadParameterType = keyof typeof PARAMETERS;
49
+ export type WriteParameterType = Exclude<
50
+ Exclude<ReadParameterType, "topic.alarm">,
51
+ "topic.warning"
52
+ >;
53
+
54
+ export function getParameterValue(
55
+ scope: Construct,
56
+ parameterType: ReadParameterType
57
+ ) {
58
+ const parameterName = PARAMETERS[parameterType].parameterName;
59
+ return StringParameter.valueForStringParameter(scope, parameterName);
60
+ }
61
+
62
+ export function createParameter(
63
+ scope: Construct,
64
+ parameterType: WriteParameterType,
65
+ stringValue: string
66
+ ): StringParameter {
67
+ const { id, parameterName, description } = PARAMETERS[parameterType];
68
+
69
+ return new StringParameter(scope, id, {
70
+ parameterName,
71
+ description,
72
+ stringValue,
73
+ });
74
+ }
@@ -1,5 +1,5 @@
1
1
  import { Duration, RemovalPolicy, Stack } from "aws-cdk-lib";
2
- import { Construct } from "constructs";
2
+ import { Construct } from "constructs/lib/construct";
3
3
  import {
4
4
  PrivateHostedZone,
5
5
  RecordSet,
@@ -7,9 +7,8 @@ import {
7
7
  RecordType,
8
8
  } from "aws-cdk-lib/aws-route53";
9
9
  import { InfraStackConfiguration } from "./intra-stack-configuration";
10
- import { importValue, importVpc } from "../import-util";
11
- import { DbStack } from "./db-stack";
12
- import { DbProxyStack } from "./db-proxy-stack";
10
+ import { importVpc } from "../import-util";
11
+ import { getParameterValue } from "../stack/parameters";
13
12
 
14
13
  const DEFAULT_RECORD_TTL = Duration.seconds(30);
15
14
 
@@ -37,23 +36,11 @@ export class DbDnsStack extends Stack {
37
36
 
38
37
  zone.applyRemovalPolicy(RemovalPolicy.RETAIN);
39
38
 
40
- const clusterReaderEndpoint = importValue(
41
- isc.environmentName,
42
- DbStack.CLUSTER_READ_ENDPOINT_EXPORT_NAME
43
- );
44
- const clusterWriterEndpoint = importValue(
45
- isc.environmentName,
46
- DbStack.CLUSTER_WRITE_ENDPOINT_EXPORT_NAME
47
- );
39
+ const clusterReaderEndpoint = getParameterValue(this, "cluster.reader");
40
+ const clusterWriterEndpoint = getParameterValue(this, "cluster.writer");
48
41
 
49
- const proxyReaderEndpoint = importValue(
50
- isc.environmentName,
51
- DbProxyStack.PROXY_READER_EXPORT_NAME
52
- );
53
- const proxyWriterEndpoint = importValue(
54
- isc.environmentName,
55
- DbProxyStack.PROXY_WRITER_EXPORT_NAME
56
- );
42
+ const proxyReaderEndpoint = getParameterValue(this, "proxy.reader");
43
+ const proxyWriterEndpoint = getParameterValue(this, "proxy.writer");
57
44
 
58
45
  new RecordSet(this, "ReaderRecord", {
59
46
  recordType: RecordType.CNAME,
@@ -1,5 +1,3 @@
1
- import { Duration, Stack } from "aws-cdk-lib";
2
- import { Construct } from "constructs";
3
1
  import {
4
2
  CfnDBProxyEndpoint,
5
3
  DatabaseCluster,
@@ -10,23 +8,33 @@ import {
10
8
  import { ISecret, Secret } from "aws-cdk-lib/aws-secretsmanager";
11
9
  import { IVpc, SecurityGroup } from "aws-cdk-lib/aws-ec2";
12
10
  import { InfraStackConfiguration } from "./intra-stack-configuration";
13
- import { DbConfiguration, DbStack } from "./db-stack";
14
- import { exportValue, importValue, importVpc } from "../import-util";
11
+ import { DbStack } from "./db-stack";
12
+ import { exportValue, importVpc } from "../import-util";
13
+ import { createParameter } from "../stack/parameters";
14
+ import { Stack, Duration } from "aws-cdk-lib/core";
15
+ import { Construct } from "constructs/lib/construct";
16
+
17
+ export interface ProxyConfiguration {
18
+ readonly secretArn: string;
19
+ readonly name?: string;
20
+ readonly securityGroupId: string;
21
+ readonly clusterIdentifier: string;
22
+ }
15
23
 
16
24
  /**
17
25
  * A stack that creates a Database proxy.
18
26
  */
19
27
  export class DbProxyStack extends Stack {
28
+ readonly isc: InfraStackConfiguration;
29
+
20
30
  public static PROXY_READER_EXPORT_NAME = "db-reader-endpoint";
21
31
  public static PROXY_WRITER_EXPORT_NAME = "db-writer-endpoint";
22
32
 
23
- readonly isc: InfraStackConfiguration;
24
-
25
33
  constructor(
26
34
  scope: Construct,
27
35
  id: string,
28
36
  isc: InfraStackConfiguration,
29
- configuration: DbConfiguration
37
+ configuration: ProxyConfiguration
30
38
  ) {
31
39
  super(scope, id, {
32
40
  env: isc.env,
@@ -34,6 +42,10 @@ export class DbProxyStack extends Stack {
34
42
 
35
43
  this.isc = isc;
36
44
 
45
+ if (configuration.clusterIdentifier === "") {
46
+ throw new Error("Empty cluster identifier!");
47
+ }
48
+
37
49
  const vpc = importVpc(this, isc.environmentName);
38
50
  const secret = Secret.fromSecretAttributes(this, "proxy-secret", {
39
51
  secretCompleteArn: configuration.secretArn,
@@ -43,27 +55,44 @@ export class DbProxyStack extends Stack {
43
55
  const readerEndpoint = this.createProxyEndpoints(
44
56
  vpc,
45
57
  proxy,
46
- configuration.proxy.securityGroupId
58
+ configuration.securityGroupId
47
59
  );
48
- this.setOutputs(configuration, proxy, readerEndpoint);
60
+
61
+ createParameter(this, "proxy.reader", readerEndpoint.attrEndpoint);
62
+ createParameter(this, "proxy.writer", proxy.endpoint);
63
+
64
+ this.setOutputs(proxy);
49
65
  }
50
66
 
51
- createProxy(vpc: IVpc, secret: ISecret, configuration: DbConfiguration) {
67
+ setOutputs(proxy: DatabaseProxy) {
68
+ // if only one instance, then there is no reader-endpoint
69
+ exportValue(
70
+ this,
71
+ this.isc.environmentName,
72
+ DbProxyStack.PROXY_READER_EXPORT_NAME,
73
+ proxy.endpoint
74
+ );
75
+ exportValue(
76
+ this,
77
+ this.isc.environmentName,
78
+ DbProxyStack.PROXY_WRITER_EXPORT_NAME,
79
+ proxy.endpoint
80
+ );
81
+ }
82
+
83
+ createProxy(vpc: IVpc, secret: ISecret, configuration: ProxyConfiguration) {
52
84
  const proxyId = `${this.isc.environmentName}-proxy`;
53
85
  const securityGroup = SecurityGroup.fromSecurityGroupId(
54
86
  this,
55
87
  "securitygroup",
56
- configuration.proxy.securityGroupId
88
+ configuration.securityGroupId
57
89
  );
58
90
 
59
91
  const cluster = DatabaseCluster.fromDatabaseClusterAttributes(
60
92
  this,
61
93
  "db-cluster",
62
94
  {
63
- clusterIdentifier: importValue(
64
- this.isc.environmentName,
65
- DbStack.CLUSTER_IDENTIFIER_EXPORT_NAME
66
- ),
95
+ clusterIdentifier: configuration.clusterIdentifier,
67
96
  engine: DatabaseClusterEngine.AURORA_POSTGRESQL,
68
97
  port: DbStack.CLUSTER_PORT,
69
98
  }
@@ -76,7 +105,7 @@ export class DbProxyStack extends Stack {
76
105
  };
77
106
 
78
107
  return new DatabaseProxy(this, proxyId, {
79
- dbProxyName: configuration.proxy.name ?? "AuroraProxy",
108
+ dbProxyName: configuration.name ?? "AuroraProxy",
80
109
  securityGroups: [securityGroup],
81
110
  proxyTarget: ProxyTarget.fromCluster(cluster),
82
111
  idleClientTimeout: Duration.seconds(1800),
@@ -102,29 +131,4 @@ export class DbProxyStack extends Stack {
102
131
  targetRole: "READ_ONLY",
103
132
  });
104
133
  }
105
-
106
- setOutputs(
107
- configuration: DbConfiguration,
108
- proxy: DatabaseProxy,
109
- proxyEndpoint: CfnDBProxyEndpoint
110
- ) {
111
- const readerEndpoint =
112
- configuration.instances > 1
113
- ? proxyEndpoint.attrEndpoint
114
- : proxy.endpoint;
115
-
116
- // if only one instance, then there is no reader-endpoint
117
- exportValue(
118
- this,
119
- this.isc.environmentName,
120
- DbProxyStack.PROXY_READER_EXPORT_NAME,
121
- readerEndpoint
122
- );
123
- exportValue(
124
- this,
125
- this.isc.environmentName,
126
- DbProxyStack.PROXY_WRITER_EXPORT_NAME,
127
- proxy.endpoint
128
- );
129
- }
130
134
  }
@@ -1,10 +1,8 @@
1
- import { Duration, RemovalPolicy, Stack } from "aws-cdk-lib";
2
1
  import {
3
2
  InstanceType,
4
3
  IVpc,
5
4
  SecurityGroup,
6
5
  SubnetType,
7
- Vpc,
8
6
  } from "aws-cdk-lib/aws-ec2";
9
7
  import { ISecurityGroup } from "aws-cdk-lib/aws-ec2/lib/security-group";
10
8
  import {
@@ -19,30 +17,41 @@ import {
19
17
  IParameterGroup,
20
18
  ParameterGroup,
21
19
  } from "aws-cdk-lib/aws-rds";
22
- import { Construct } from "constructs";
20
+ import { Construct } from "constructs/lib/construct";
23
21
  import { Secret } from "aws-cdk-lib/aws-secretsmanager";
24
22
  import { InfraStackConfiguration } from "./intra-stack-configuration";
25
23
  import { exportValue, importVpc } from "../import-util";
24
+ import { Duration, RemovalPolicy, Stack } from "aws-cdk-lib/core";
25
+ import { createParameter } from "../stack/parameters";
26
26
 
27
27
  export interface DbConfiguration {
28
+ readonly cluster?: ClusterConfiguration;
29
+ readonly clusterImport?: ClusterImportConfiguration;
30
+
31
+ readonly customParameterGroups: AuroraPostgresEngineVersion[];
32
+ readonly workmem?: number; // default 524288, 512MiB
33
+
28
34
  /** superuser username and password are fetched from this secret, using keys
29
35
  * db.superuser and db.superuser.password
30
36
  */
31
37
  readonly secretArn: string;
32
38
 
33
- readonly dbVersion: AuroraPostgresEngineVersion;
39
+ /** If this is not specified, import default vpc */
40
+ readonly vpc?: IVpc;
41
+ }
42
+
43
+ export interface ClusterConfiguration {
44
+ readonly securityGroupId: string;
34
45
  readonly dbInstanceType: InstanceType;
35
46
  readonly snapshotIdentifier?: string;
36
47
  readonly instances: number;
37
- readonly customParameterGroup: boolean;
38
- readonly securityGroupId: string;
39
- /** If this is not specified, import default vpc */
40
- readonly vpc?: IVpc;
48
+ readonly dbVersion: AuroraPostgresEngineVersion;
49
+ readonly storageEncrypted?: boolean; /// default true
50
+ }
41
51
 
42
- readonly proxy: {
43
- readonly name?: string;
44
- readonly securityGroupId: string;
45
- };
52
+ export interface ClusterImportConfiguration {
53
+ readonly clusterReadEndpoint: string;
54
+ readonly clusterWriteEndpoint: string;
46
55
  }
47
56
 
48
57
  /**
@@ -51,22 +60,20 @@ export interface DbConfiguration {
51
60
  * Please not, that created Cluster has RETAIL removalPolicy, so if you want to delete the stack,
52
61
  * you must first deploy without parameter group, then delete stack and manually delete cluster.
53
62
  *
54
- * How to upgrade major version?
55
- * 0. Set correct SG for db-stack and db-proxy-stack(this step will be removed in the future)
56
- * 1. Update db-stack WITHOUT parameter group
57
- * 2. Upgrade extensions by hand
58
- * 3. Upgrade database from the AWS console
59
- * 4. Update db-stack with the upgraded version and custom parameter group
63
+ * You should deploy once with cluster and then without. This way you can create the cluster with this
64
+ * stack, but cluster is not part of the stack after that.
60
65
  */
61
66
 
62
67
  export class DbStack extends Stack {
68
+ public static CLUSTER_PORT = 5432;
69
+
63
70
  public static CLUSTER_IDENTIFIER_EXPORT_NAME = "db-cluster";
64
71
  public static CLUSTER_READ_ENDPOINT_EXPORT_NAME =
65
72
  "db-cluster-reader-endpoint";
66
73
  public static CLUSTER_WRITE_ENDPOINT_EXPORT_NAME =
67
74
  "db-cluster-writer-endpoint";
68
75
 
69
- public static CLUSTER_PORT = 5432;
76
+ public clusterIdentifier = "";
70
77
 
71
78
  constructor(
72
79
  scope: Construct,
@@ -78,53 +85,112 @@ export class DbStack extends Stack {
78
85
  env: isc.env,
79
86
  });
80
87
 
81
- const cluster = this.createAuroraCluster(isc, configuration);
82
-
83
- exportValue(
84
- this,
85
- isc.environmentName,
86
- DbStack.CLUSTER_IDENTIFIER_EXPORT_NAME,
87
- cluster.clusterIdentifier
88
- );
89
- exportValue(
90
- this,
91
- isc.environmentName,
92
- DbStack.CLUSTER_WRITE_ENDPOINT_EXPORT_NAME,
93
- cluster.clusterEndpoint.hostname
94
- );
95
- exportValue(
96
- this,
97
- isc.environmentName,
98
- DbStack.CLUSTER_READ_ENDPOINT_EXPORT_NAME,
99
- cluster.clusterReadEndpoint.hostname
88
+ const parameterGroups = this.createParameterGroups(
89
+ configuration.customParameterGroups,
90
+ configuration.workmem ?? 524288
100
91
  );
92
+
93
+ if (
94
+ (configuration.cluster && configuration.clusterImport) ||
95
+ (!configuration.cluster && !configuration.clusterImport)
96
+ ) {
97
+ throw new Error("Configure either cluster or clusterImport");
98
+ }
99
+
100
+ // create cluster if this is wanted, should do it only once
101
+ if (configuration.cluster) {
102
+ const cluster = this.createAuroraCluster(
103
+ isc,
104
+ configuration,
105
+ configuration.cluster,
106
+ parameterGroups
107
+ );
108
+
109
+ exportValue(
110
+ this,
111
+ isc.environmentName,
112
+ DbStack.CLUSTER_IDENTIFIER_EXPORT_NAME,
113
+ cluster.clusterIdentifier
114
+ );
115
+
116
+ exportValue(
117
+ this,
118
+ isc.environmentName,
119
+ DbStack.CLUSTER_WRITE_ENDPOINT_EXPORT_NAME,
120
+ cluster.clusterEndpoint.hostname
121
+ );
122
+
123
+ exportValue(
124
+ this,
125
+ isc.environmentName,
126
+ DbStack.CLUSTER_READ_ENDPOINT_EXPORT_NAME,
127
+ cluster.clusterReadEndpoint.hostname
128
+ );
129
+
130
+ createParameter(
131
+ this,
132
+ "cluster.reader",
133
+ cluster.clusterReadEndpoint.hostname
134
+ );
135
+ createParameter(
136
+ this,
137
+ "cluster.writer",
138
+ cluster.clusterEndpoint.hostname
139
+ );
140
+ createParameter(
141
+ this,
142
+ "cluster.identifier",
143
+ cluster.clusterIdentifier
144
+ );
145
+
146
+ this.clusterIdentifier = cluster.clusterIdentifier;
147
+ }
148
+
149
+ if (configuration.clusterImport) {
150
+ createParameter(
151
+ this,
152
+ "cluster.reader",
153
+ configuration.clusterImport.clusterReadEndpoint
154
+ );
155
+ createParameter(
156
+ this,
157
+ "cluster.writer",
158
+ configuration.clusterImport.clusterWriteEndpoint
159
+ );
160
+ }
101
161
  }
102
162
 
103
- createParamaterGroup(configuration: DbConfiguration) {
104
- return configuration.customParameterGroup
105
- ? new ParameterGroup(
106
- this,
107
- `parameter-group-${configuration.dbVersion.auroraPostgresMajorVersion}`,
108
- {
109
- engine: DatabaseClusterEngine.auroraPostgres({
110
- version: configuration.dbVersion,
111
- }),
112
- parameters: {
113
- "pg_stat_statements.track": "ALL",
114
- random_page_cost: "1",
115
- work_mem: "524288", // 512 MiB
116
- },
117
- }
118
- )
119
- : ParameterGroup.fromParameterGroupName(
120
- this,
121
- "ParameterGroup",
122
- `default.aurora-postgresql${configuration.dbVersion.auroraPostgresMajorVersion}`
123
- );
163
+ createParameterGroups(
164
+ customVersions: AuroraPostgresEngineVersion[],
165
+ workmem: number
166
+ ): IParameterGroup[] {
167
+ return customVersions.map((version: AuroraPostgresEngineVersion) => {
168
+ const pg = new ParameterGroup(
169
+ this,
170
+ `parameter-group-${version.auroraPostgresMajorVersion}`,
171
+ {
172
+ engine: DatabaseClusterEngine.auroraPostgres({
173
+ version,
174
+ }),
175
+ parameters: {
176
+ "pg_stat_statements.track": "ALL",
177
+ random_page_cost: "1",
178
+ work_mem: workmem.toString(),
179
+ },
180
+ }
181
+ );
182
+
183
+ // create both cluster parameter group and instance parameter group
184
+ pg.bindToCluster({});
185
+ pg.bindToInstance({});
186
+
187
+ return pg;
188
+ });
124
189
  }
125
190
 
126
191
  createClusterParameters(
127
- configuration: DbConfiguration,
192
+ secretArn: string,
193
+ clusterConfiguration: ClusterConfiguration,
128
194
  instanceName: string,
129
195
  vpc: IVpc,
130
196
  securityGroup: ISecurityGroup,
@@ -133,14 +199,14 @@ export class DbStack extends Stack {
133
199
  const secret = Secret.fromSecretCompleteArn(
134
200
  this,
135
201
  "DBSecret",
136
- configuration.secretArn
202
+ secretArn
137
203
  );
138
204
 
139
205
  return {
140
206
  engine: DatabaseClusterEngine.auroraPostgres({
141
- version: configuration.dbVersion,
207
+ version: clusterConfiguration.dbVersion,
142
208
  }),
143
- instances: configuration.instances,
209
+ instances: clusterConfiguration.instances,
144
210
  instanceUpdateBehaviour: InstanceUpdateBehaviour.ROLLING,
145
211
  instanceIdentifierBase: instanceName + "-",
146
212
  cloudwatchLogsExports: ["postgresql"],
@@ -161,7 +227,7 @@ export class DbStack extends Stack {
161
227
  vpcSubnets: {
162
228
  subnetType: SubnetType.PRIVATE_WITH_EGRESS,
163
229
  },
164
- instanceType: configuration.dbInstanceType,
230
+ instanceType: clusterConfiguration.dbInstanceType,
165
231
  parameterGroup,
166
232
  },
167
233
  credentials: Credentials.fromPassword(
@@ -169,39 +235,44 @@ export class DbStack extends Stack {
169
235
  secret.secretValueFromJson("db.superuser.password")
170
236
  ),
171
237
  parameterGroup,
172
- storageEncrypted: true,
238
+ // storageEncrypted: clusterConfiguration.storageEncrypted ?? true,
173
239
  monitoringInterval: Duration.seconds(30),
174
240
  };
175
241
  }
176
242
 
177
243
  createAuroraCluster(
178
244
  isc: InfraStackConfiguration,
179
- configuration: DbConfiguration
245
+ configuration: DbConfiguration,
246
+ clusterConfiguration: ClusterConfiguration,
247
+ parameterGroups: IParameterGroup[]
180
248
  ): DatabaseCluster {
181
249
  const instanceName = isc.environmentName + "-db";
182
250
  const securityGroup = SecurityGroup.fromSecurityGroupId(
183
251
  this,
184
252
  "securitygroup",
185
- configuration.securityGroupId
253
+ clusterConfiguration.securityGroupId
186
254
  );
187
- const parameterGroup = this.createParamaterGroup(configuration);
188
255
  const vpc = configuration.vpc
189
256
  ? configuration.vpc
190
257
  : importVpc(this, isc.environmentName);
191
258
 
192
259
  const parameters = this.createClusterParameters(
193
- configuration,
260
+ configuration.secretArn,
261
+ clusterConfiguration,
194
262
  instanceName,
195
263
  vpc,
196
264
  securityGroup,
197
- parameterGroup
265
+ parameterGroups[0]
198
266
  );
199
267
 
200
268
  // create cluster from the snapshot or from the scratch
201
- const cluster = configuration.snapshotIdentifier
269
+ const cluster = clusterConfiguration.snapshotIdentifier
202
270
  ? new DatabaseClusterFromSnapshot(this, instanceName, {
203
271
  ...parameters,
204
- ...{ snapshotIdentifier: configuration.snapshotIdentifier },
272
+ ...{
273
+ snapshotIdentifier:
274
+ clusterConfiguration.snapshotIdentifier,
275
+ },
205
276
  })
206
277
  : new DatabaseCluster(this, instanceName, parameters);
207
278
 
@@ -216,8 +287,6 @@ export class DbStack extends Stack {
216
287
  }
217
288
  cfnInstances.forEach((cfnInstance) => delete cfnInstance.engineVersion);
218
289
 
219
- cluster.node.addDependency(parameterGroup);
220
-
221
290
  return cluster;
222
291
  }
223
292
  }
@@ -1,4 +1,4 @@
1
- import { Environment } from "aws-cdk-lib";
1
+ import { Environment } from "aws-cdk-lib/core";
2
2
 
3
3
  export interface InfraStackConfiguration {
4
4
  readonly env: Environment;
@@ -1,8 +1,8 @@
1
- import { Stack } from "aws-cdk-lib";
2
- import { Construct } from "constructs";
3
1
  import { IpAddresses, IVpc, SubnetType, Vpc } from "aws-cdk-lib/aws-ec2";
4
2
  import { InfraStackConfiguration } from "./intra-stack-configuration";
5
3
  import { exportValue } from "../import-util";
4
+ import { Stack } from "aws-cdk-lib/core";
5
+ import { Construct } from "constructs/lib/construct";
6
6
 
7
7
  export interface NetworkConfiguration {
8
8
  readonly vpcName: string;