@digitraffic/common 2023.9.13-1 → 2023.9.14-1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -15,7 +15,10 @@ export interface ProxyConfiguration {
15
15
  */
16
16
  export declare class DbProxyStack extends Stack {
17
17
  readonly isc: InfraStackConfiguration;
18
+ static PROXY_READER_EXPORT_NAME: string;
19
+ static PROXY_WRITER_EXPORT_NAME: string;
18
20
  constructor(scope: Construct, id: string, isc: InfraStackConfiguration, configuration: ProxyConfiguration);
21
+ setOutputs(proxy: DatabaseProxy): void;
19
22
  createProxy(vpc: IVpc, secret: ISecret, configuration: ProxyConfiguration): DatabaseProxy;
20
23
  createProxyEndpoints(vpc: IVpc, proxy: DatabaseProxy, securityGroupId: string): CfnDBProxyEndpoint;
21
24
  }
@@ -28,6 +28,12 @@ class DbProxyStack extends core_1.Stack {
28
28
  const readerEndpoint = this.createProxyEndpoints(vpc, proxy, configuration.securityGroupId);
29
29
  (0, parameters_1.createParameter)(this, "proxy.reader", readerEndpoint.attrEndpoint);
30
30
  (0, parameters_1.createParameter)(this, "proxy.writer", proxy.endpoint);
31
+ this.setOutputs(proxy);
32
+ }
33
+ setOutputs(proxy) {
34
+ // if only one instance, then there is no reader-endpoint
35
+ (0, import_util_1.exportValue)(this, this.isc.environmentName, DbProxyStack.PROXY_READER_EXPORT_NAME, proxy.endpoint);
36
+ (0, import_util_1.exportValue)(this, this.isc.environmentName, DbProxyStack.PROXY_WRITER_EXPORT_NAME, proxy.endpoint);
31
37
  }
32
38
  createProxy(vpc, secret, configuration) {
33
39
  const proxyId = `${this.isc.environmentName}-proxy`;
@@ -66,4 +72,6 @@ class DbProxyStack extends core_1.Stack {
66
72
  }
67
73
  }
68
74
  exports.DbProxyStack = DbProxyStack;
75
+ DbProxyStack.PROXY_READER_EXPORT_NAME = "db-reader-endpoint";
76
+ DbProxyStack.PROXY_WRITER_EXPORT_NAME = "db-writer-endpoint";
69
77
  //# sourceMappingURL=db-proxy-stack.js.map
@@ -6,6 +6,7 @@ import { InfraStackConfiguration } from "./intra-stack-configuration";
6
6
  import { Stack } from "aws-cdk-lib/core";
7
7
  export interface DbConfiguration {
8
8
  readonly cluster?: ClusterConfiguration;
9
+ readonly clusterImport?: ClusterImportConfiguration;
9
10
  readonly customParameterGroups: AuroraPostgresEngineVersion[];
10
11
  readonly workmem?: number;
11
12
  /** superuser username and password are fetched from this secret, using keys
@@ -23,6 +24,10 @@ export interface ClusterConfiguration {
23
24
  readonly dbVersion: AuroraPostgresEngineVersion;
24
25
  readonly storageEncrypted?: boolean;
25
26
  }
27
+ export interface ClusterImportConfiguration {
28
+ readonly clusterReadEndpoint: string;
29
+ readonly clusterWriteEndpoint: string;
30
+ }
26
31
  /**
27
32
  * Stack that creates DatabaseCluster.
28
33
  *
@@ -39,7 +44,7 @@ export declare class DbStack extends Stack {
39
44
  static CLUSTER_WRITE_ENDPOINT_EXPORT_NAME: string;
40
45
  clusterIdentifier: string;
41
46
  constructor(scope: Construct, id: string, isc: InfraStackConfiguration, configuration: DbConfiguration);
42
- createParamaterGroups(customVersions: AuroraPostgresEngineVersion[], workmem: number): IParameterGroup[];
47
+ createParameterGroups(customVersions: AuroraPostgresEngineVersion[], workmem: number): IParameterGroup[];
43
48
  createClusterParameters(secretArn: string, clusterConfiguration: ClusterConfiguration, instanceName: string, vpc: IVpc, securityGroup: ISecurityGroup, parameterGroup: IParameterGroup): DatabaseClusterProps;
44
49
  createAuroraCluster(isc: InfraStackConfiguration, configuration: DbConfiguration, clusterConfiguration: ClusterConfiguration, parameterGroups: IParameterGroup[]): DatabaseCluster;
45
50
  }
@@ -22,7 +22,11 @@ class DbStack extends core_1.Stack {
22
22
  env: isc.env,
23
23
  });
24
24
  this.clusterIdentifier = "";
25
- const parameterGroups = this.createParamaterGroups(configuration.customParameterGroups, configuration.workmem ?? 524288);
25
+ const parameterGroups = this.createParameterGroups(configuration.customParameterGroups, configuration.workmem ?? 524288);
26
+ if ((configuration.cluster && configuration.clusterImport) ||
27
+ (!configuration.cluster && !configuration.clusterImport)) {
28
+ throw new Error("Configure either cluster or clusterImport");
29
+ }
26
30
  // create cluster if this is wanted, should do it only once
27
31
  if (configuration.cluster) {
28
32
  const cluster = this.createAuroraCluster(isc, configuration, configuration.cluster, parameterGroups);
@@ -34,18 +38,28 @@ class DbStack extends core_1.Stack {
34
38
  (0, parameters_1.createParameter)(this, "cluster.identifier", cluster.clusterIdentifier);
35
39
  this.clusterIdentifier = cluster.clusterIdentifier;
36
40
  }
41
+ if (configuration.clusterImport) {
42
+ (0, parameters_1.createParameter)(this, "cluster.reader", configuration.clusterImport.clusterReadEndpoint);
43
+ (0, parameters_1.createParameter)(this, "cluster.writer", configuration.clusterImport.clusterWriteEndpoint);
44
+ }
37
45
  }
38
- createParamaterGroups(customVersions, workmem) {
39
- return customVersions.map((version) => new aws_rds_1.ParameterGroup(this, `parameter-group-${version.auroraPostgresMajorVersion}`, {
40
- engine: aws_rds_1.DatabaseClusterEngine.auroraPostgres({
41
- version,
42
- }),
43
- parameters: {
44
- "pg_stat_statements.track": "ALL",
45
- random_page_cost: "1",
46
- work_mem: workmem.toString(),
47
- },
48
- }));
46
+ createParameterGroups(customVersions, workmem) {
47
+ return customVersions.map((version) => {
48
+ const pg = new aws_rds_1.ParameterGroup(this, `parameter-group-${version.auroraPostgresMajorVersion}`, {
49
+ engine: aws_rds_1.DatabaseClusterEngine.auroraPostgres({
50
+ version,
51
+ }),
52
+ parameters: {
53
+ "pg_stat_statements.track": "ALL",
54
+ random_page_cost: "1",
55
+ work_mem: workmem.toString(),
56
+ },
57
+ });
58
+ // create both cluster parameter group and instance parameter group
59
+ pg.bindToCluster({});
60
+ pg.bindToInstance({});
61
+ return pg;
62
+ });
49
63
  }
50
64
  createClusterParameters(secretArn, clusterConfiguration, instanceName, vpc, securityGroup, parameterGroup) {
51
65
  const secret = aws_secretsmanager_1.Secret.fromSecretCompleteArn(this, "DBSecret", secretArn);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@digitraffic/common",
3
- "version": "2023.9.13-1",
3
+ "version": "2023.9.14-1",
4
4
  "description": "",
5
5
  "repository": {
6
6
  "type": "git",
@@ -9,7 +9,7 @@ import { ISecret, Secret } from "aws-cdk-lib/aws-secretsmanager";
9
9
  import { IVpc, SecurityGroup } from "aws-cdk-lib/aws-ec2";
10
10
  import { InfraStackConfiguration } from "./intra-stack-configuration";
11
11
  import { DbStack } from "./db-stack";
12
- import { importVpc } from "../import-util";
12
+ import { exportValue, importVpc } from "../import-util";
13
13
  import { createParameter } from "../stack/parameters";
14
14
  import { Stack, Duration } from "aws-cdk-lib/core";
15
15
  import { Construct } from "constructs/lib/construct";
@@ -27,6 +27,9 @@ export interface ProxyConfiguration {
27
27
  export class DbProxyStack extends Stack {
28
28
  readonly isc: InfraStackConfiguration;
29
29
 
30
+ public static PROXY_READER_EXPORT_NAME = "db-reader-endpoint";
31
+ public static PROXY_WRITER_EXPORT_NAME = "db-writer-endpoint";
32
+
30
33
  constructor(
31
34
  scope: Construct,
32
35
  id: string,
@@ -57,6 +60,24 @@ export class DbProxyStack extends Stack {
57
60
 
58
61
  createParameter(this, "proxy.reader", readerEndpoint.attrEndpoint);
59
62
  createParameter(this, "proxy.writer", proxy.endpoint);
63
+
64
+ this.setOutputs(proxy);
65
+ }
66
+
67
+ setOutputs(proxy: DatabaseProxy) {
68
+ // if only one instance, then there is no reader-endpoint
69
+ exportValue(
70
+ this,
71
+ this.isc.environmentName,
72
+ DbProxyStack.PROXY_READER_EXPORT_NAME,
73
+ proxy.endpoint
74
+ );
75
+ exportValue(
76
+ this,
77
+ this.isc.environmentName,
78
+ DbProxyStack.PROXY_WRITER_EXPORT_NAME,
79
+ proxy.endpoint
80
+ );
60
81
  }
61
82
 
62
83
  createProxy(vpc: IVpc, secret: ISecret, configuration: ProxyConfiguration) {
@@ -26,6 +26,8 @@ import { createParameter } from "../stack/parameters";
26
26
 
27
27
  export interface DbConfiguration {
28
28
  readonly cluster?: ClusterConfiguration;
29
+ readonly clusterImport?: ClusterImportConfiguration;
30
+
29
31
  readonly customParameterGroups: AuroraPostgresEngineVersion[];
30
32
  readonly workmem?: number; // default 524288, 512MiB
31
33
 
@@ -47,6 +49,11 @@ export interface ClusterConfiguration {
47
49
  readonly storageEncrypted?: boolean; /// default true
48
50
  }
49
51
 
52
+ export interface ClusterImportConfiguration {
53
+ readonly clusterReadEndpoint: string;
54
+ readonly clusterWriteEndpoint: string;
55
+ }
56
+
50
57
  /**
51
58
  * Stack that creates DatabaseCluster.
52
59
  *
@@ -78,11 +85,18 @@ export class DbStack extends Stack {
78
85
  env: isc.env,
79
86
  });
80
87
 
81
- const parameterGroups = this.createParamaterGroups(
88
+ const parameterGroups = this.createParameterGroups(
82
89
  configuration.customParameterGroups,
83
90
  configuration.workmem ?? 524288
84
91
  );
85
92
 
93
+ if (
94
+ (configuration.cluster && configuration.clusterImport) ||
95
+ (!configuration.cluster && !configuration.clusterImport)
96
+ ) {
97
+ throw new Error("Configure either cluster or clusterImport");
98
+ }
99
+
86
100
  // create cluster if this is wanted, should do it only once
87
101
  if (configuration.cluster) {
88
102
  const cluster = this.createAuroraCluster(
@@ -131,29 +145,47 @@ export class DbStack extends Stack {
131
145
 
132
146
  this.clusterIdentifier = cluster.clusterIdentifier;
133
147
  }
148
+
149
+ if (configuration.clusterImport) {
150
+ createParameter(
151
+ this,
152
+ "cluster.reader",
153
+ configuration.clusterImport.clusterReadEndpoint
154
+ );
155
+ createParameter(
156
+ this,
157
+ "cluster.writer",
158
+ configuration.clusterImport.clusterWriteEndpoint
159
+ );
160
+ }
134
161
  }
135
162
 
136
- createParamaterGroups(
163
+ createParameterGroups(
137
164
  customVersions: AuroraPostgresEngineVersion[],
138
165
  workmem: number
139
166
  ): IParameterGroup[] {
140
- return customVersions.map(
141
- (version: AuroraPostgresEngineVersion) =>
142
- new ParameterGroup(
143
- this,
144
- `parameter-group-${version.auroraPostgresMajorVersion}`,
145
- {
146
- engine: DatabaseClusterEngine.auroraPostgres({
147
- version,
148
- }),
149
- parameters: {
150
- "pg_stat_statements.track": "ALL",
151
- random_page_cost: "1",
152
- work_mem: workmem.toString(),
153
- },
154
- }
155
- )
156
- );
167
+ return customVersions.map((version: AuroraPostgresEngineVersion) => {
168
+ const pg = new ParameterGroup(
169
+ this,
170
+ `parameter-group-${version.auroraPostgresMajorVersion}`,
171
+ {
172
+ engine: DatabaseClusterEngine.auroraPostgres({
173
+ version,
174
+ }),
175
+ parameters: {
176
+ "pg_stat_statements.track": "ALL",
177
+ random_page_cost: "1",
178
+ work_mem: workmem.toString(),
179
+ },
180
+ }
181
+ );
182
+
183
+ // create both cluster parameter group and instance parameter group
184
+ pg.bindToCluster({});
185
+ pg.bindToInstance({});
186
+
187
+ return pg;
188
+ });
157
189
  }
158
190
 
159
191
  createClusterParameters(