@openhi/platform 0.0.0 → 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/{openhi.d.ts → index.d.mts} +42 -6
- package/lib/index.d.ts +205 -2
- package/lib/index.js +847 -17
- package/lib/index.js.map +1 -0
- package/lib/index.mjs +840 -0
- package/lib/index.mjs.map +1 -0
- package/package.json +33 -24
- package/lib/openhi.js +0 -108
- package/lib/service.d.ts +0 -36
- package/lib/service.js +0 -203
- package/lib/templates/generate-templates.d.ts +0 -5
- package/lib/templates/generate-templates.js +0 -45
- package/lib/templates/service-template.d.ts +0 -33
- package/lib/templates/service-template.js +0 -42
- package/lib/templates/src/README.md.d.ts +0 -5
- package/lib/templates/src/README.md.js +0 -19
- package/lib/templates/src/app-test.d.ts +0 -5
- package/lib/templates/src/app-test.js +0 -61
- package/lib/templates/src/app.d.ts +0 -5
- package/lib/templates/src/app.js +0 -27
- package/lib/templates/src/config.d.ts +0 -5
- package/lib/templates/src/config.js +0 -23
- package/lib/templates/src/data/README.md.d.ts +0 -5
- package/lib/templates/src/data/README.md.js +0 -19
- package/lib/templates/src/data/models/README.md.d.ts +0 -5
- package/lib/templates/src/data/models/README.md.js +0 -19
- package/lib/templates/src/infrastructure/README.md.d.ts +0 -5
- package/lib/templates/src/infrastructure/README.md.js +0 -19
- package/lib/templates/src/integrations/README.md.d.ts +0 -5
- package/lib/templates/src/integrations/README.md.js +0 -19
- package/lib/templates/src/main.d.ts +0 -5
- package/lib/templates/src/main.js +0 -15
- package/lib/templates/src/workflows/README.md.d.ts +0 -5
- package/lib/templates/src/workflows/README.md.js +0 -19
- package/lib/workflows/aws-teardown-workflow.d.ts +0 -13
- package/lib/workflows/aws-teardown-workflow.js +0 -222
- package/lib/workflows/build-dev-workflow.d.ts +0 -12
- package/lib/workflows/build-dev-workflow.js +0 -48
- package/lib/workflows/build-stage-workflow.d.ts +0 -12
- package/lib/workflows/build-stage-workflow.js +0 -60
package/lib/index.mjs
ADDED
|
@@ -0,0 +1,840 @@
|
|
|
1
|
+
var __create = Object.create;
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
6
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
7
|
+
var __commonJS = (cb, mod) => function __require() {
|
|
8
|
+
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
19
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
20
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
21
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
22
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
23
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
24
|
+
mod
|
|
25
|
+
));
|
|
26
|
+
|
|
27
|
+
// ../config/lib/open-hi-config.js
|
|
28
|
+
var require_open_hi_config = __commonJS({
|
|
29
|
+
"../config/lib/open-hi-config.js"(exports) {
|
|
30
|
+
"use strict";
|
|
31
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
32
|
+
exports.OPEN_HI_DEPLOYMENT_TARGET_ROLE = exports.OPEN_HI_STAGE = void 0;
|
|
33
|
+
exports.OPEN_HI_STAGE = {
|
|
34
|
+
/**
|
|
35
|
+
* Development environment, typically used for testing and development.
|
|
36
|
+
*/
|
|
37
|
+
DEV: "dev",
|
|
38
|
+
/**
|
|
39
|
+
* Staging environment, used for pre-production testing.
|
|
40
|
+
*/
|
|
41
|
+
STAGE: "stage",
|
|
42
|
+
/**
|
|
43
|
+
* Production environment, used for live deployments.
|
|
44
|
+
*/
|
|
45
|
+
PROD: "prod"
|
|
46
|
+
};
|
|
47
|
+
exports.OPEN_HI_DEPLOYMENT_TARGET_ROLE = {
|
|
48
|
+
/**
|
|
49
|
+
* The primary deployment target for this stage (main account/region).
|
|
50
|
+
* For example, the base DynamoDB region for global tables.
|
|
51
|
+
*/
|
|
52
|
+
PRIMARY: "primary",
|
|
53
|
+
/**
|
|
54
|
+
* A secondary deployment target for this stage (additional account/region).
|
|
55
|
+
* For example, a replica region for a global DynamoDB table, or another cell in the same region.
|
|
56
|
+
*/
|
|
57
|
+
SECONDARY: "secondary"
|
|
58
|
+
};
|
|
59
|
+
}
|
|
60
|
+
});
|
|
61
|
+
|
|
62
|
+
// ../config/lib/index.js
|
|
63
|
+
var require_lib = __commonJS({
|
|
64
|
+
"../config/lib/index.js"(exports) {
|
|
65
|
+
"use strict";
|
|
66
|
+
var __createBinding = exports && exports.__createBinding || (Object.create ? (function(o, m, k, k2) {
|
|
67
|
+
if (k2 === void 0) k2 = k;
|
|
68
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
69
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
70
|
+
desc = { enumerable: true, get: function() {
|
|
71
|
+
return m[k];
|
|
72
|
+
} };
|
|
73
|
+
}
|
|
74
|
+
Object.defineProperty(o, k2, desc);
|
|
75
|
+
}) : (function(o, m, k, k2) {
|
|
76
|
+
if (k2 === void 0) k2 = k;
|
|
77
|
+
o[k2] = m[k];
|
|
78
|
+
}));
|
|
79
|
+
var __exportStar = exports && exports.__exportStar || function(m, exports2) {
|
|
80
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports2, p)) __createBinding(exports2, m, p);
|
|
81
|
+
};
|
|
82
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
83
|
+
__exportStar(require_open_hi_config(), exports);
|
|
84
|
+
}
|
|
85
|
+
});
|
|
86
|
+
|
|
87
|
+
// src/openhi.ts
|
|
88
|
+
import { merge } from "ts-deepmerge";
|
|
89
|
+
|
|
90
|
+
// src/service.ts
|
|
91
|
+
var import_config2 = __toESM(require_lib());
|
|
92
|
+
import { sep as sep2 } from "path";
|
|
93
|
+
import {
|
|
94
|
+
AwsDeploymentTarget,
|
|
95
|
+
TurboRepo,
|
|
96
|
+
VERSION
|
|
97
|
+
} from "@codedrifters/configulator";
|
|
98
|
+
import { paramCase, pascalCase } from "change-case";
|
|
99
|
+
import { awscdk } from "projen";
|
|
100
|
+
import { NodePackageManager, Transform } from "projen/lib/javascript";
|
|
101
|
+
|
|
102
|
+
// src/templates/service-template.ts
|
|
103
|
+
import { existsSync, unlinkSync } from "fs";
|
|
104
|
+
import { sep } from "path";
|
|
105
|
+
import { SampleFile } from "projen";
|
|
106
|
+
var ServiceTemplate = class {
|
|
107
|
+
constructor(service, options) {
|
|
108
|
+
this.service = service;
|
|
109
|
+
this.options = options;
|
|
110
|
+
this.fullFilePath = [
|
|
111
|
+
this.service.project.outdir,
|
|
112
|
+
this.options.filePath
|
|
113
|
+
].join(sep);
|
|
114
|
+
this.createTemplate();
|
|
115
|
+
}
|
|
116
|
+
createTemplate() {
|
|
117
|
+
if (this.options.overwrite) {
|
|
118
|
+
this.overwriteTemplate();
|
|
119
|
+
}
|
|
120
|
+
this.file = new SampleFile(this.service.project, this.options.filePath, {
|
|
121
|
+
contents: this.options.contents.join("\n")
|
|
122
|
+
});
|
|
123
|
+
}
|
|
124
|
+
overwriteTemplate() {
|
|
125
|
+
if (existsSync(this.fullFilePath)) {
|
|
126
|
+
unlinkSync(this.fullFilePath);
|
|
127
|
+
}
|
|
128
|
+
this.file = void 0;
|
|
129
|
+
}
|
|
130
|
+
};
|
|
131
|
+
|
|
132
|
+
// src/templates/src/app.ts
|
|
133
|
+
var SrcApp = class extends ServiceTemplate {
|
|
134
|
+
constructor(service, options) {
|
|
135
|
+
super(service, {
|
|
136
|
+
...options,
|
|
137
|
+
filePath: "src/app.ts",
|
|
138
|
+
contents: [
|
|
139
|
+
`import { OpenHiApp, ${service.constructName} } from "@openhi/constructs";`,
|
|
140
|
+
`import { CONFIG } from "./config";`,
|
|
141
|
+
``,
|
|
142
|
+
`const app = new OpenHiApp({ config: CONFIG });`,
|
|
143
|
+
``,
|
|
144
|
+
`const stacks = app.environments.map((e) => {`,
|
|
145
|
+
` return new ${service.constructName}(e, { config: e.config });`,
|
|
146
|
+
`});`,
|
|
147
|
+
``,
|
|
148
|
+
`export { app, stacks };`,
|
|
149
|
+
``
|
|
150
|
+
]
|
|
151
|
+
});
|
|
152
|
+
}
|
|
153
|
+
};
|
|
154
|
+
|
|
155
|
+
// src/templates/src/app-test.ts
|
|
156
|
+
var SrcAppTest = class extends ServiceTemplate {
|
|
157
|
+
constructor(service, options) {
|
|
158
|
+
super(service, {
|
|
159
|
+
...options,
|
|
160
|
+
filePath: "src/app.test.ts",
|
|
161
|
+
contents: [
|
|
162
|
+
`import { Template } from "aws-cdk-lib/assertions";`,
|
|
163
|
+
`import { Code, type CodeConfig } from "aws-cdk-lib/aws-lambda";`,
|
|
164
|
+
`import { stacks } from "./app"`,
|
|
165
|
+
``,
|
|
166
|
+
`/** Replace volatile CDK asset S3 keys/buckets with static values for snapshot stability. */`,
|
|
167
|
+
`function normalizeTemplateForSnapshot(template: Record<string, unknown>): Record<string, unknown> {`,
|
|
168
|
+
` const staticBucket = "mock-assets-bucket";`,
|
|
169
|
+
` const staticKey = "mock-asset-key.zip";`,
|
|
170
|
+
` const json = JSON.stringify(template);`,
|
|
171
|
+
` const normalized = json`,
|
|
172
|
+
` .replace(/"S3Key":\\s*"[^"]+\\.zip"/g, \\\`"S3Key": "\${staticKey}"\\\`)`,
|
|
173
|
+
` .replace(/"S3Bucket":\\s*"[^"]+"/g, \\\`"S3Bucket": "\${staticBucket}"\\\`);`,
|
|
174
|
+
` return JSON.parse(normalized) as Record<string, unknown>;`,
|
|
175
|
+
`}`,
|
|
176
|
+
``,
|
|
177
|
+
`let fromAssetMock: jest.SpyInstance;`,
|
|
178
|
+
``,
|
|
179
|
+
`beforeAll(() => {`,
|
|
180
|
+
` fromAssetMock = jest.spyOn(Code, "fromAsset").mockReturnValue({`,
|
|
181
|
+
` isInline: false,`,
|
|
182
|
+
` bind: (): CodeConfig => ({`,
|
|
183
|
+
` s3Location: {`,
|
|
184
|
+
` bucketName: "mock-assets-bucket",`,
|
|
185
|
+
` objectKey: "mock-asset-key.zip",`,
|
|
186
|
+
` },`,
|
|
187
|
+
` }),`,
|
|
188
|
+
` bindToResource: () => {`,
|
|
189
|
+
` return;`,
|
|
190
|
+
` },`,
|
|
191
|
+
` } as any);`,
|
|
192
|
+
`});`,
|
|
193
|
+
``,
|
|
194
|
+
`afterAll(() => {`,
|
|
195
|
+
` fromAssetMock?.mockRestore();`,
|
|
196
|
+
`});`,
|
|
197
|
+
``,
|
|
198
|
+
`describe("Smoke Test", () => {`,
|
|
199
|
+
` it("should match previous snapshots", () => {`,
|
|
200
|
+
` stacks.forEach((s) => {`,
|
|
201
|
+
` const template = Template.fromStack(s).toJSON();`,
|
|
202
|
+
` expect(normalizeTemplateForSnapshot(template)).toMatchSnapshot();`,
|
|
203
|
+
` });`,
|
|
204
|
+
` });`,
|
|
205
|
+
`});`,
|
|
206
|
+
``
|
|
207
|
+
]
|
|
208
|
+
});
|
|
209
|
+
}
|
|
210
|
+
};
|
|
211
|
+
|
|
212
|
+
// src/templates/src/config.ts
|
|
213
|
+
import { SourceCode } from "projen";
|
|
214
|
+
var SrcConfig = class extends SourceCode {
|
|
215
|
+
constructor(service) {
|
|
216
|
+
super(service.project, "src/config.ts");
|
|
217
|
+
this.line(`import { OpenHiConfig } from "@openhi/config";`);
|
|
218
|
+
this.line("");
|
|
219
|
+
const start = "export const CONFIG: OpenHiConfig = ";
|
|
220
|
+
const mid = JSON.stringify(service.options?.config, null, 2);
|
|
221
|
+
const end = " as const;";
|
|
222
|
+
this.line(`${start}${mid}${end}`);
|
|
223
|
+
}
|
|
224
|
+
};
|
|
225
|
+
|
|
226
|
+
// src/templates/src/data/models/README.md.ts
|
|
227
|
+
var DataModelsReadMe = class extends ServiceTemplate {
|
|
228
|
+
constructor(service, options) {
|
|
229
|
+
super(service, {
|
|
230
|
+
...options,
|
|
231
|
+
filePath: "src/data/models/README.md",
|
|
232
|
+
contents: [
|
|
233
|
+
`# ${service.serviceName}: Data Models`,
|
|
234
|
+
``,
|
|
235
|
+
`This directory contains data model definitions and other related files for the ${service.serviceName} service.`
|
|
236
|
+
]
|
|
237
|
+
});
|
|
238
|
+
}
|
|
239
|
+
};
|
|
240
|
+
|
|
241
|
+
// src/templates/src/data/README.md.ts
|
|
242
|
+
var DataReadMe = class extends ServiceTemplate {
|
|
243
|
+
constructor(service, options) {
|
|
244
|
+
super(service, {
|
|
245
|
+
...options,
|
|
246
|
+
filePath: "src/data/README.md",
|
|
247
|
+
contents: [
|
|
248
|
+
`# ${service.serviceName}: Data`,
|
|
249
|
+
``,
|
|
250
|
+
`This directory contains data models and other related files for the ${service.serviceName} service.`
|
|
251
|
+
]
|
|
252
|
+
});
|
|
253
|
+
}
|
|
254
|
+
};
|
|
255
|
+
|
|
256
|
+
// src/templates/src/infrastructure/README.md.ts
|
|
257
|
+
var InfrastructureReadMe = class extends ServiceTemplate {
|
|
258
|
+
constructor(service, options) {
|
|
259
|
+
super(service, {
|
|
260
|
+
...options,
|
|
261
|
+
filePath: "src/infrastructure/README.md",
|
|
262
|
+
contents: [
|
|
263
|
+
`# ${service.serviceName}: Infrastructure`,
|
|
264
|
+
``,
|
|
265
|
+
`This directory contains infrastructure definitions and other related files for the ${service.serviceName} service.`
|
|
266
|
+
]
|
|
267
|
+
});
|
|
268
|
+
}
|
|
269
|
+
};
|
|
270
|
+
|
|
271
|
+
// src/templates/src/integrations/README.md.ts
|
|
272
|
+
var IntegrationsReadMe = class extends ServiceTemplate {
|
|
273
|
+
constructor(service, options) {
|
|
274
|
+
super(service, {
|
|
275
|
+
...options,
|
|
276
|
+
filePath: "src/integrations/README.md",
|
|
277
|
+
contents: [
|
|
278
|
+
`# ${service.serviceName}: Integrations`,
|
|
279
|
+
``,
|
|
280
|
+
`This directory contains integration definitions and other related files for the ${service.serviceName} service.`
|
|
281
|
+
]
|
|
282
|
+
});
|
|
283
|
+
}
|
|
284
|
+
};
|
|
285
|
+
|
|
286
|
+
// src/templates/src/main.ts
|
|
287
|
+
var SrcMain = class extends ServiceTemplate {
|
|
288
|
+
constructor(service, options) {
|
|
289
|
+
super(service, {
|
|
290
|
+
...options,
|
|
291
|
+
filePath: "src/main.ts",
|
|
292
|
+
contents: [`import { app } from "./app";`, ``, `app.synth();`, ``]
|
|
293
|
+
});
|
|
294
|
+
}
|
|
295
|
+
};
|
|
296
|
+
|
|
297
|
+
// src/templates/src/README.md.ts
|
|
298
|
+
var SrcReadMe = class extends ServiceTemplate {
|
|
299
|
+
constructor(service, options) {
|
|
300
|
+
super(service, {
|
|
301
|
+
...options,
|
|
302
|
+
filePath: "src/README.md",
|
|
303
|
+
contents: [
|
|
304
|
+
`# ${service.serviceName}: Service Overview`,
|
|
305
|
+
``,
|
|
306
|
+
`This directory contains the main entry point for the ${service.serviceName} service.`
|
|
307
|
+
]
|
|
308
|
+
});
|
|
309
|
+
}
|
|
310
|
+
};
|
|
311
|
+
|
|
312
|
+
// src/templates/src/workflows/README.md.ts
|
|
313
|
+
var WorkflowsReadMe = class extends ServiceTemplate {
|
|
314
|
+
constructor(service, options) {
|
|
315
|
+
super(service, {
|
|
316
|
+
...options,
|
|
317
|
+
filePath: "src/workflows/README.md",
|
|
318
|
+
contents: [
|
|
319
|
+
`# ${service.serviceName}: Workflows`,
|
|
320
|
+
``,
|
|
321
|
+
`This directory contains workflow definitions and other related files for the ${service.serviceName} service.`
|
|
322
|
+
]
|
|
323
|
+
});
|
|
324
|
+
}
|
|
325
|
+
};
|
|
326
|
+
|
|
327
|
+
// src/templates/generate-templates.ts
|
|
328
|
+
var GenerateTemplates = class {
|
|
329
|
+
constructor(service) {
|
|
330
|
+
this.service = service;
|
|
331
|
+
new SrcApp(this.service);
|
|
332
|
+
new SrcAppTest(this.service);
|
|
333
|
+
new SrcConfig(this.service);
|
|
334
|
+
new SrcMain(this.service);
|
|
335
|
+
new SrcReadMe(this.service);
|
|
336
|
+
new DataReadMe(this.service);
|
|
337
|
+
new DataModelsReadMe(this.service);
|
|
338
|
+
new InfrastructureReadMe(this.service);
|
|
339
|
+
new IntegrationsReadMe(this.service);
|
|
340
|
+
new WorkflowsReadMe(this.service);
|
|
341
|
+
}
|
|
342
|
+
};
|
|
343
|
+
|
|
344
|
+
// src/service.ts
|
|
345
|
+
var OPEN_HI_SERVICE_TYPE = {
|
|
346
|
+
AUTH: "auth",
|
|
347
|
+
CORE: "core",
|
|
348
|
+
DATA_SERVICE: "data",
|
|
349
|
+
GLOBAL: "global",
|
|
350
|
+
INTEGRATION: "integration",
|
|
351
|
+
REST_API: "rest-api"
|
|
352
|
+
};
|
|
353
|
+
var OpenHiService = class {
|
|
354
|
+
constructor(openHi, id, options) {
|
|
355
|
+
this.openHi = openHi;
|
|
356
|
+
this.id = id;
|
|
357
|
+
this.options = options;
|
|
358
|
+
this.addDeploymentTarget = (awsStageType, awsEnvironmentType, envConfig) => {
|
|
359
|
+
new AwsDeploymentTarget(this.project, {
|
|
360
|
+
account: envConfig.account,
|
|
361
|
+
region: envConfig.region,
|
|
362
|
+
awsStageType,
|
|
363
|
+
awsEnvironmentType,
|
|
364
|
+
localDeployment: true,
|
|
365
|
+
localDeploymentConfig: {
|
|
366
|
+
stackPattern: `${awsStageType}/${awsEnvironmentType}/*-${envConfig.account}-${envConfig.region}`,
|
|
367
|
+
roleName: "poweruseraccess"
|
|
368
|
+
},
|
|
369
|
+
ciDeployment: true,
|
|
370
|
+
ciDeploymentConfig: {
|
|
371
|
+
roleArn: `arn:aws:iam::${envConfig.account}:role/GitHubOpenHiDeployer`,
|
|
372
|
+
stackPattern: `${awsStageType}/${awsEnvironmentType}/*-${envConfig.account}-${envConfig.region}`
|
|
373
|
+
},
|
|
374
|
+
branches: awsStageType === import_config2.OPEN_HI_STAGE.DEV ? [
|
|
375
|
+
{
|
|
376
|
+
branch: "feat/*",
|
|
377
|
+
description: [
|
|
378
|
+
"Feature branches for OpenHI (short form).",
|
|
379
|
+
"These branches are used for developing new features."
|
|
380
|
+
]
|
|
381
|
+
},
|
|
382
|
+
{
|
|
383
|
+
branch: "feature/*",
|
|
384
|
+
description: [
|
|
385
|
+
"Feature branches for OpenHI.",
|
|
386
|
+
"These branches are used for developing new features."
|
|
387
|
+
]
|
|
388
|
+
},
|
|
389
|
+
{
|
|
390
|
+
branch: "fix/*",
|
|
391
|
+
description: [
|
|
392
|
+
"Fix branches for OpenHI.",
|
|
393
|
+
"This branch pattern is used when a developer is working on a fix on the project."
|
|
394
|
+
]
|
|
395
|
+
}
|
|
396
|
+
] : [
|
|
397
|
+
{
|
|
398
|
+
branch: "main",
|
|
399
|
+
description: [
|
|
400
|
+
"Main branch for OpenHI.",
|
|
401
|
+
"This branch is used for the main branch."
|
|
402
|
+
]
|
|
403
|
+
}
|
|
404
|
+
]
|
|
405
|
+
});
|
|
406
|
+
};
|
|
407
|
+
this.project = new awscdk.AwsCdkTypeScriptApp({
|
|
408
|
+
/**
|
|
409
|
+
* Top level project config options
|
|
410
|
+
*/
|
|
411
|
+
defaultReleaseBranch: "main",
|
|
412
|
+
name: this.serviceName,
|
|
413
|
+
outdir: this.outDir,
|
|
414
|
+
parent: this.openHi.rootProject,
|
|
415
|
+
cdkVersion: this.options?.config?.versions?.cdk?.cdkLibVersion ?? VERSION.AWS_CDK_LIB_VERSION,
|
|
416
|
+
cdkCliVersion: this.options?.config?.versions?.cdk?.cdkCliVersion ?? VERSION.AWS_CDK_CLI_VERSION,
|
|
417
|
+
/**
|
|
418
|
+
* Packaging options
|
|
419
|
+
*
|
|
420
|
+
* TODO: Figure out license for monorepo vs per package?
|
|
421
|
+
*/
|
|
422
|
+
licensed: false,
|
|
423
|
+
/**
|
|
424
|
+
* Node configs
|
|
425
|
+
*/
|
|
426
|
+
packageManager: NodePackageManager.PNPM,
|
|
427
|
+
/**
|
|
428
|
+
* Turn on prettier formatting
|
|
429
|
+
*/
|
|
430
|
+
prettier: true,
|
|
431
|
+
/**
|
|
432
|
+
* Don't generate sample code.
|
|
433
|
+
*/
|
|
434
|
+
sampleCode: false,
|
|
435
|
+
/**
|
|
436
|
+
* Make sure jest config is stored outside of package.json and use SWC
|
|
437
|
+
* for faster tests.
|
|
438
|
+
*/
|
|
439
|
+
jestOptions: {
|
|
440
|
+
configFilePath: "jest.config.json",
|
|
441
|
+
jestConfig: {
|
|
442
|
+
transform: {
|
|
443
|
+
["^.+\\.[t]sx?$"]: new Transform("@swc/jest")
|
|
444
|
+
}
|
|
445
|
+
}
|
|
446
|
+
},
|
|
447
|
+
/**
|
|
448
|
+
* SWC for faster testing
|
|
449
|
+
*/
|
|
450
|
+
devDeps: ["@swc/jest", "@swc/core"],
|
|
451
|
+
/**
|
|
452
|
+
* Don't package test files.
|
|
453
|
+
*/
|
|
454
|
+
npmIgnoreOptions: {
|
|
455
|
+
ignorePatterns: ["*.spec.*", "*.test.*"]
|
|
456
|
+
}
|
|
457
|
+
});
|
|
458
|
+
this.project.deps.removeDependency("ts-jest");
|
|
459
|
+
this.project.addDeps(
|
|
460
|
+
"@openhi/config@workspace:*",
|
|
461
|
+
"@openhi/constructs@workspace:*"
|
|
462
|
+
);
|
|
463
|
+
new TurboRepo(this.project);
|
|
464
|
+
new GenerateTemplates(this);
|
|
465
|
+
[import_config2.OPEN_HI_STAGE.DEV, import_config2.OPEN_HI_STAGE.STAGE, import_config2.OPEN_HI_STAGE.PROD].forEach(
|
|
466
|
+
(stage) => {
|
|
467
|
+
const targets = this.options?.config?.deploymentTargets?.[stage];
|
|
468
|
+
if (targets?.primary) {
|
|
469
|
+
this.addDeploymentTarget(
|
|
470
|
+
stage,
|
|
471
|
+
import_config2.OPEN_HI_DEPLOYMENT_TARGET_ROLE.PRIMARY,
|
|
472
|
+
targets.primary
|
|
473
|
+
);
|
|
474
|
+
}
|
|
475
|
+
targets?.secondary?.forEach((env) => {
|
|
476
|
+
this.addDeploymentTarget(
|
|
477
|
+
stage,
|
|
478
|
+
import_config2.OPEN_HI_DEPLOYMENT_TARGET_ROLE.SECONDARY,
|
|
479
|
+
env
|
|
480
|
+
);
|
|
481
|
+
});
|
|
482
|
+
}
|
|
483
|
+
);
|
|
484
|
+
}
|
|
485
|
+
get constructName() {
|
|
486
|
+
return pascalCase(["open", "hi", this.id, "service"].join("-"));
|
|
487
|
+
}
|
|
488
|
+
get outDir() {
|
|
489
|
+
return [
|
|
490
|
+
paramCase(this.openHi.id),
|
|
491
|
+
...this.typeDir.map((x) => paramCase(x)),
|
|
492
|
+
paramCase(this.id)
|
|
493
|
+
].join(sep2);
|
|
494
|
+
}
|
|
495
|
+
get serviceName() {
|
|
496
|
+
return [paramCase(this.openHi.id), paramCase(this.id)].join("-");
|
|
497
|
+
}
|
|
498
|
+
get typeDir() {
|
|
499
|
+
return [];
|
|
500
|
+
}
|
|
501
|
+
};
|
|
502
|
+
|
|
503
|
+
// src/workflows/aws-teardown-workflow.ts
|
|
504
|
+
var import_config3 = __toESM(require_lib());
|
|
505
|
+
import {
|
|
506
|
+
AwsDeploymentConfig,
|
|
507
|
+
MonorepoProject
|
|
508
|
+
} from "@codedrifters/configulator";
|
|
509
|
+
import { Component } from "projen";
|
|
510
|
+
import { GitHub, GithubWorkflow } from "projen/lib/github";
|
|
511
|
+
import { JobPermission } from "projen/lib/github/workflows-model";
|
|
512
|
+
var AwsTeardownWorkflow = class extends Component {
|
|
513
|
+
constructor(rootProject, options) {
|
|
514
|
+
super(rootProject);
|
|
515
|
+
this.rootProject = rootProject;
|
|
516
|
+
const { openhi } = options;
|
|
517
|
+
if (!(rootProject instanceof MonorepoProject)) {
|
|
518
|
+
throw new Error(
|
|
519
|
+
"AwsTeardownWorkflow requires the root project to be a MonorepoProject"
|
|
520
|
+
);
|
|
521
|
+
}
|
|
522
|
+
const github = GitHub.of(this.rootProject);
|
|
523
|
+
if (!github) {
|
|
524
|
+
throw new Error(
|
|
525
|
+
"AwsTeardownWorkflow requires a GitHub component in the root project"
|
|
526
|
+
);
|
|
527
|
+
}
|
|
528
|
+
const devTargetsFor = (service) => AwsDeploymentConfig.of(service.project)?.awsDeploymentTargets.filter(
|
|
529
|
+
(target) => target.awsStageType === import_config3.OPEN_HI_STAGE.DEV && target.ciDeployment
|
|
530
|
+
) ?? [];
|
|
531
|
+
const coreTargets = devTargetsFor(openhi.core);
|
|
532
|
+
const authTargets = devTargetsFor(openhi.auth);
|
|
533
|
+
const byAccountRegion = /* @__PURE__ */ new Map();
|
|
534
|
+
[...authTargets, ...coreTargets].forEach((t) => {
|
|
535
|
+
const key = `${t.account}-${t.region}`;
|
|
536
|
+
if (!byAccountRegion.has(key)) byAccountRegion.set(key, t);
|
|
537
|
+
});
|
|
538
|
+
const awsDestructionTargets = Array.from(byAccountRegion.values());
|
|
539
|
+
const workflow = new GithubWorkflow(github, "teardown-dev");
|
|
540
|
+
workflow.on({
|
|
541
|
+
workflowDispatch: {},
|
|
542
|
+
schedule: [
|
|
543
|
+
{
|
|
544
|
+
cron: "32 6 * * *"
|
|
545
|
+
// Every Sunday at 6:32 AM UTC
|
|
546
|
+
}
|
|
547
|
+
],
|
|
548
|
+
delete: {
|
|
549
|
+
branches: ["feature/*", "feat/*", "fix/*"]
|
|
550
|
+
}
|
|
551
|
+
/* for debugging
|
|
552
|
+
push: {
|
|
553
|
+
branches: ["feature/*"],
|
|
554
|
+
},
|
|
555
|
+
*/
|
|
556
|
+
});
|
|
557
|
+
awsDestructionTargets.forEach((target) => {
|
|
558
|
+
const {
|
|
559
|
+
awsStageType,
|
|
560
|
+
awsEnvironmentType,
|
|
561
|
+
account,
|
|
562
|
+
region,
|
|
563
|
+
ciDeploymentConfig
|
|
564
|
+
} = target;
|
|
565
|
+
const { roleArn } = ciDeploymentConfig ?? {};
|
|
566
|
+
workflow.addJob(`teardown-${account}-${region}`.toLowerCase(), {
|
|
567
|
+
name: `Teardown Stacks in ${target.account}/${target.region}`,
|
|
568
|
+
//if: "github.event.ref_type == 'branch'",
|
|
569
|
+
runsOn: ["ubuntu-latest"],
|
|
570
|
+
permissions: {
|
|
571
|
+
contents: JobPermission.READ,
|
|
572
|
+
idToken: JobPermission.WRITE
|
|
573
|
+
},
|
|
574
|
+
env: {
|
|
575
|
+
REPO: "${{ github.repository }}",
|
|
576
|
+
REGIONS: [region].join(" ")
|
|
577
|
+
},
|
|
578
|
+
steps: [
|
|
579
|
+
/**
|
|
580
|
+
* Configure AWS creds.
|
|
581
|
+
*/
|
|
582
|
+
{
|
|
583
|
+
name: `AWS Creds ${awsStageType}/${awsEnvironmentType}/${account}/${region}`,
|
|
584
|
+
uses: "aws-actions/configure-aws-credentials@v4",
|
|
585
|
+
with: {
|
|
586
|
+
"role-to-assume": roleArn,
|
|
587
|
+
"aws-region": region,
|
|
588
|
+
"role-duration-seconds": 900
|
|
589
|
+
// 15 minutes
|
|
590
|
+
}
|
|
591
|
+
},
|
|
592
|
+
/**
|
|
593
|
+
* Fetch all branch names in the repo
|
|
594
|
+
*/
|
|
595
|
+
{
|
|
596
|
+
name: "Fetch All Branches",
|
|
597
|
+
id: "fetch_branches",
|
|
598
|
+
uses: "actions/github-script@v7",
|
|
599
|
+
with: {
|
|
600
|
+
script: [
|
|
601
|
+
"const all = await github.paginate(github.rest.repos.listBranches, {",
|
|
602
|
+
" owner: context.repo.owner,",
|
|
603
|
+
" repo: context.repo.repo,",
|
|
604
|
+
" per_page: 100",
|
|
605
|
+
"});",
|
|
606
|
+
"const names = all.map(b => b.name);",
|
|
607
|
+
"console.log(`Found branches: ${names}`);",
|
|
608
|
+
'core.setOutput("json", JSON.stringify(names));'
|
|
609
|
+
].join("\n")
|
|
610
|
+
}
|
|
611
|
+
},
|
|
612
|
+
/**
|
|
613
|
+
* Save branches to a file
|
|
614
|
+
*/
|
|
615
|
+
{
|
|
616
|
+
name: "Save Branches to File",
|
|
617
|
+
run: [
|
|
618
|
+
'echo "Saving branches to file"',
|
|
619
|
+
"echo '${{ steps.fetch_branches.outputs.json }}' | jq -r '.[]' | sort -u > branches.txt",
|
|
620
|
+
'echo "Branches:"',
|
|
621
|
+
"cat branches.txt"
|
|
622
|
+
].join("\n")
|
|
623
|
+
},
|
|
624
|
+
/**
|
|
625
|
+
* Find all stacks tagged with a stage of dev. for this repo. return
|
|
626
|
+
* tag and resource arn.
|
|
627
|
+
*/
|
|
628
|
+
{
|
|
629
|
+
name: "Find Stacks by Tag",
|
|
630
|
+
id: "find_stacks",
|
|
631
|
+
run: [
|
|
632
|
+
"set -euo pipefail",
|
|
633
|
+
": > candidates.txt # columns: arn region branchTag",
|
|
634
|
+
"# Build tag filters",
|
|
635
|
+
'TAG_FILTERS=( "Key=openhi:repo-name,Values=$REPO" )',
|
|
636
|
+
`TAG_FILTERS+=( "Key=openhi:stage-type,Values=${import_config3.OPEN_HI_STAGE.DEV}" )`,
|
|
637
|
+
"for r in $REGIONS; do",
|
|
638
|
+
` echo "Scanning region: $r"`,
|
|
639
|
+
" aws resourcegroupstaggingapi get-resources \\",
|
|
640
|
+
' --region "$r" \\',
|
|
641
|
+
' --resource-type-filters "cloudformation:stack" \\',
|
|
642
|
+
' --tag-filters "${TAG_FILTERS[@]}" \\',
|
|
643
|
+
` | jq -r --arg r "$r" '`,
|
|
644
|
+
" .ResourceTagMappingList[]",
|
|
645
|
+
" | . as $res",
|
|
646
|
+
' | ($res.Tags[] | select(.Key=="openhi:branch-name") | .Value) as $branch',
|
|
647
|
+
' | [$res.ResourceARN, $r, ($branch // "")]',
|
|
648
|
+
" | @tsv",
|
|
649
|
+
" ' >> candidates.txt",
|
|
650
|
+
"done",
|
|
651
|
+
"echo 'Tagged stacks:'",
|
|
652
|
+
`(echo -e "ARN\\tREGION\\tBRANCH"; cat candidates.txt) | column -t -s $'\\t'`
|
|
653
|
+
].join("\n")
|
|
654
|
+
},
|
|
655
|
+
/**
|
|
656
|
+
* Determine which stacks are orphans that no longer have a matching branch.
|
|
657
|
+
* Save those to a file for the next step.
|
|
658
|
+
*/
|
|
659
|
+
{
|
|
660
|
+
name: "Determine Orphan Stacks (No Matching Branch)",
|
|
661
|
+
run: [
|
|
662
|
+
"set -euo pipefail",
|
|
663
|
+
": > orphans.txt # arn region branch",
|
|
664
|
+
"while IFS=$'\\t' read -r arn region branch; do",
|
|
665
|
+
' [ -z "$arn" ] && continue',
|
|
666
|
+
' if [ -z "$branch" ]; then',
|
|
667
|
+
" # If no Branch tag, treat as not-a-preview; skip (or flip to delete if you want)",
|
|
668
|
+
" continue",
|
|
669
|
+
" fi",
|
|
670
|
+
' if ! grep -Fxq "$branch" branches.txt; then',
|
|
671
|
+
' echo -e "$arn\\t$region\\t$branch" >> orphans.txt',
|
|
672
|
+
" fi",
|
|
673
|
+
"done < candidates.txt",
|
|
674
|
+
"",
|
|
675
|
+
"if [ -s orphans.txt ]; then",
|
|
676
|
+
' echo "Orphan stacks (no matching branch):"',
|
|
677
|
+
` (echo -e "ARN\\tREGION\\tBRANCH"; cat orphans.txt) | column -t -s $'\\t'`,
|
|
678
|
+
"else",
|
|
679
|
+
' echo "No orphan stacks found."',
|
|
680
|
+
"fi"
|
|
681
|
+
].join("\n")
|
|
682
|
+
},
|
|
683
|
+
/**
|
|
684
|
+
* Delete orphan stacks.
|
|
685
|
+
*/
|
|
686
|
+
{
|
|
687
|
+
name: "Delete Orphan Stacks",
|
|
688
|
+
if: "hashFiles('orphans.txt') != ''",
|
|
689
|
+
run: [
|
|
690
|
+
"set -euo pipefail",
|
|
691
|
+
"while IFS=$'\\t' read -r arn region branch; do",
|
|
692
|
+
' [ -z "$arn" ] && continue',
|
|
693
|
+
` stack_name=$(cut -d'/' -f2 <<<"$arn")`,
|
|
694
|
+
' echo "Deleting $stack_name (branch=$branch) in $region"',
|
|
695
|
+
' aws cloudformation delete-stack --region "$region" --stack-name "$stack_name" || true',
|
|
696
|
+
"done < orphans.txt"
|
|
697
|
+
].join("\n")
|
|
698
|
+
}
|
|
699
|
+
]
|
|
700
|
+
});
|
|
701
|
+
});
|
|
702
|
+
}
|
|
703
|
+
};
|
|
704
|
+
|
|
705
|
+
// src/workflows/build-dev-workflow.ts
|
|
706
|
+
import { AwsDeployWorkflow } from "@codedrifters/configulator";
|
|
707
|
+
import { Component as Component2 } from "projen";
|
|
708
|
+
var BuildDevelopmentWorkflow = class extends Component2 {
|
|
709
|
+
constructor(parent, options) {
|
|
710
|
+
super(parent, "dev-workflow");
|
|
711
|
+
const { openhi } = options;
|
|
712
|
+
const { awsDeploymentTargets: globalTargets } = new AwsDeployWorkflow(
|
|
713
|
+
openhi.global.project,
|
|
714
|
+
{
|
|
715
|
+
buildWorkflow: parent.buildWorkflow
|
|
716
|
+
}
|
|
717
|
+
);
|
|
718
|
+
const { awsDeploymentTargets: authTargets } = new AwsDeployWorkflow(
|
|
719
|
+
openhi.auth.project,
|
|
720
|
+
{
|
|
721
|
+
buildWorkflow: parent.buildWorkflow,
|
|
722
|
+
deployAfterTargets: [...globalTargets]
|
|
723
|
+
}
|
|
724
|
+
);
|
|
725
|
+
const { awsDeploymentTargets: dataTargets } = new AwsDeployWorkflow(
|
|
726
|
+
openhi.data.project,
|
|
727
|
+
{
|
|
728
|
+
buildWorkflow: parent.buildWorkflow,
|
|
729
|
+
deployAfterTargets: [...globalTargets, ...authTargets]
|
|
730
|
+
}
|
|
731
|
+
);
|
|
732
|
+
new AwsDeployWorkflow(openhi.restApi.project, {
|
|
733
|
+
buildWorkflow: parent.buildWorkflow,
|
|
734
|
+
deployAfterTargets: [...globalTargets, ...authTargets, ...dataTargets]
|
|
735
|
+
});
|
|
736
|
+
}
|
|
737
|
+
};
|
|
738
|
+
|
|
739
|
+
// src/workflows/build-stage-workflow.ts
|
|
740
|
+
var import_config4 = __toESM(require_lib());
|
|
741
|
+
import { AwsDeployWorkflow as AwsDeployWorkflow2 } from "@codedrifters/configulator";
|
|
742
|
+
import { Component as Component3 } from "projen";
|
|
743
|
+
var BuildStageWorkflow = class extends Component3 {
|
|
744
|
+
constructor(parent, options) {
|
|
745
|
+
super(parent, "stage-workflow");
|
|
746
|
+
const { openhi } = options;
|
|
747
|
+
const { awsDeploymentTargets: globalTargets, buildWorkflow } = new AwsDeployWorkflow2(openhi.global.project, {
|
|
748
|
+
awsStageType: import_config4.OPEN_HI_STAGE.STAGE,
|
|
749
|
+
buildWorkflowOptions: {
|
|
750
|
+
name: "deploy-stage",
|
|
751
|
+
workflowTriggers: {
|
|
752
|
+
push: {
|
|
753
|
+
branches: ["main"]
|
|
754
|
+
},
|
|
755
|
+
workflowDispatch: {}
|
|
756
|
+
}
|
|
757
|
+
}
|
|
758
|
+
});
|
|
759
|
+
const { awsDeploymentTargets: authTargets } = new AwsDeployWorkflow2(
|
|
760
|
+
openhi.auth.project,
|
|
761
|
+
{
|
|
762
|
+
awsStageType: import_config4.OPEN_HI_STAGE.STAGE,
|
|
763
|
+
buildWorkflow,
|
|
764
|
+
deployAfterTargets: [...globalTargets]
|
|
765
|
+
}
|
|
766
|
+
);
|
|
767
|
+
const { awsDeploymentTargets: dataTargets } = new AwsDeployWorkflow2(
|
|
768
|
+
openhi.data.project,
|
|
769
|
+
{
|
|
770
|
+
awsStageType: import_config4.OPEN_HI_STAGE.STAGE,
|
|
771
|
+
buildWorkflow,
|
|
772
|
+
deployAfterTargets: [...globalTargets, ...authTargets]
|
|
773
|
+
}
|
|
774
|
+
);
|
|
775
|
+
new AwsDeployWorkflow2(openhi.restApi.project, {
|
|
776
|
+
awsStageType: import_config4.OPEN_HI_STAGE.STAGE,
|
|
777
|
+
buildWorkflow,
|
|
778
|
+
deployAfterTargets: [...globalTargets, ...authTargets, ...dataTargets]
|
|
779
|
+
});
|
|
780
|
+
}
|
|
781
|
+
};
|
|
782
|
+
|
|
783
|
+
// src/openhi.ts
|
|
784
|
+
var OpenHi = class {
|
|
785
|
+
constructor(options = {}) {
|
|
786
|
+
this.options = merge({ name: "openhi" }, options);
|
|
787
|
+
this.rootProject = this.options.rootProject;
|
|
788
|
+
this.id = this.options.rootProject?.name ?? "openhi";
|
|
789
|
+
if (!this.rootProject) {
|
|
790
|
+
throw new Error(
|
|
791
|
+
"OpenHi requires a MonorepoProject to be passed in via the rootProject option"
|
|
792
|
+
);
|
|
793
|
+
}
|
|
794
|
+
const rootProject = this.rootProject;
|
|
795
|
+
this.global = new OpenHiService(this, "global", {
|
|
796
|
+
type: OPEN_HI_SERVICE_TYPE.GLOBAL,
|
|
797
|
+
config: merge(
|
|
798
|
+
this.options?.defaultConfig ?? {},
|
|
799
|
+
this.options?.globalServiceConfig ?? {}
|
|
800
|
+
)
|
|
801
|
+
});
|
|
802
|
+
this.auth = new OpenHiService(this, "auth", {
|
|
803
|
+
type: OPEN_HI_SERVICE_TYPE.AUTH,
|
|
804
|
+
config: merge(
|
|
805
|
+
this.options?.defaultConfig ?? {},
|
|
806
|
+
this.options?.authServiceConfig ?? {}
|
|
807
|
+
)
|
|
808
|
+
});
|
|
809
|
+
this.restApi = new OpenHiService(this, "rest-api", {
|
|
810
|
+
type: OPEN_HI_SERVICE_TYPE.REST_API,
|
|
811
|
+
config: merge(
|
|
812
|
+
this.options?.defaultConfig ?? {},
|
|
813
|
+
this.options?.restApiServiceConfig ?? {}
|
|
814
|
+
)
|
|
815
|
+
});
|
|
816
|
+
this.core = new OpenHiService(this, "core", {
|
|
817
|
+
type: OPEN_HI_SERVICE_TYPE.CORE,
|
|
818
|
+
config: merge(
|
|
819
|
+
this.options?.defaultConfig ?? {},
|
|
820
|
+
this.options?.coreServiceConfig ?? {}
|
|
821
|
+
)
|
|
822
|
+
});
|
|
823
|
+
this.data = new OpenHiService(this, "data", {
|
|
824
|
+
type: OPEN_HI_SERVICE_TYPE.DATA_SERVICE,
|
|
825
|
+
config: merge(
|
|
826
|
+
this.options?.defaultConfig ?? {},
|
|
827
|
+
this.options?.dataServiceConfig ?? {}
|
|
828
|
+
)
|
|
829
|
+
});
|
|
830
|
+
new BuildDevelopmentWorkflow(rootProject, { openhi: this });
|
|
831
|
+
new BuildStageWorkflow(rootProject, { openhi: this });
|
|
832
|
+
new AwsTeardownWorkflow(rootProject, { openhi: this });
|
|
833
|
+
}
|
|
834
|
+
};
|
|
835
|
+
export {
|
|
836
|
+
OPEN_HI_SERVICE_TYPE,
|
|
837
|
+
OpenHi,
|
|
838
|
+
OpenHiService
|
|
839
|
+
};
|
|
840
|
+
//# sourceMappingURL=index.mjs.map
|