carlin 1.36.10 → 1.36.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +4497 -0
- package/package.json +5 -5
package/dist/index.js
ADDED
|
@@ -0,0 +1,4497 @@
|
|
|
1
|
+
import 'dotenv/config';
|
|
2
|
+
import AWS from 'aws-sdk';
|
|
3
|
+
import log5 from 'npmlog';
|
|
4
|
+
import yaml4 from 'js-yaml';
|
|
5
|
+
import 'uglify-js';
|
|
6
|
+
import 'prettier';
|
|
7
|
+
import git from 'simple-git';
|
|
8
|
+
import findUpSync from 'findup-sync';
|
|
9
|
+
import * as fs3 from 'fs';
|
|
10
|
+
import fs3__default from 'fs';
|
|
11
|
+
import { pascalCase, constantCase, kebabCase, camelCase } from 'change-case';
|
|
12
|
+
import { stdout } from 'process';
|
|
13
|
+
import childProcess from 'child_process';
|
|
14
|
+
import * as path from 'path';
|
|
15
|
+
import path__default from 'path';
|
|
16
|
+
import { ValidateTemplateCommand, CloudFormationClient, DeleteStackCommand, CreateStackCommand, UpdateStackCommand, UpdateTerminationProtectionCommand, DescribeStacksCommand, DescribeStackEventsCommand, DescribeStackResourceCommand } from '@aws-sdk/client-cloudformation';
|
|
17
|
+
import { glob } from 'glob';
|
|
18
|
+
import mime from 'mime-types';
|
|
19
|
+
import deepmerge from 'deepmerge';
|
|
20
|
+
import { hideBin } from 'yargs/helpers';
|
|
21
|
+
import yargs from 'yargs';
|
|
22
|
+
import * as esbuild from 'esbuild';
|
|
23
|
+
import { builtinModules } from 'node:module';
|
|
24
|
+
import { typescriptConfig } from '@ttoss/config';
|
|
25
|
+
import * as fs4 from 'node:fs';
|
|
26
|
+
import fs4__default from 'node:fs';
|
|
27
|
+
import * as path3 from 'node:path';
|
|
28
|
+
import path3__default from 'node:path';
|
|
29
|
+
import AdmZip from 'adm-zip';
|
|
30
|
+
import importSync from 'import-sync';
|
|
31
|
+
import semver from 'semver';
|
|
32
|
+
import deepEqual from 'deep-equal';
|
|
33
|
+
|
|
34
|
+
var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
|
|
35
|
+
get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
|
|
36
|
+
}) : x)(function(x) {
|
|
37
|
+
if (typeof require !== "undefined") return require.apply(this, arguments);
|
|
38
|
+
throw Error('Dynamic require of "' + x + '" is not supported');
|
|
39
|
+
});
|
|
40
|
+
|
|
41
|
+
// src/config.ts
|
|
42
|
+
var NAME = "carlin";
|
|
43
|
+
var AWS_DEFAULT_REGION = "us-east-1";
|
|
44
|
+
var CLOUDFRONT_REGION = "us-east-1";
|
|
45
|
+
var NODE_VERSION = "20";
|
|
46
|
+
var NODE_RUNTIME = `nodejs${NODE_VERSION}.x`;
|
|
47
|
+
|
|
48
|
+
// src/utils/addGroupToOptions.ts
|
|
49
|
+
var addGroupToOptions = (options9, group) => {
|
|
50
|
+
Object.values(options9).forEach((option) => {
|
|
51
|
+
option.group = group;
|
|
52
|
+
});
|
|
53
|
+
return options9;
|
|
54
|
+
};
|
|
55
|
+
var logPrefix = "codebuild";
|
|
56
|
+
var WAIT_TIME = 10 * 1e3;
|
|
57
|
+
var waitCodeBuildFinish = async ({
|
|
58
|
+
buildId,
|
|
59
|
+
name
|
|
60
|
+
}) => {
|
|
61
|
+
const codeBuild2 = new AWS.CodeBuild();
|
|
62
|
+
let result;
|
|
63
|
+
const checkIfBuildIsFinished = async () => {
|
|
64
|
+
const { builds } = await codeBuild2.batchGetBuilds({ ids: [buildId] }).promise();
|
|
65
|
+
return new Promise((resolve6, reject) => {
|
|
66
|
+
setTimeout(() => {
|
|
67
|
+
const executedBuild = builds?.find(({ id }) => {
|
|
68
|
+
return id === buildId;
|
|
69
|
+
});
|
|
70
|
+
log5.info(
|
|
71
|
+
logPrefix,
|
|
72
|
+
`Build status of ${name || buildId}: ${executedBuild?.buildStatus}`
|
|
73
|
+
);
|
|
74
|
+
if (executedBuild && executedBuild.currentPhase === "COMPLETED") {
|
|
75
|
+
if (executedBuild.buildStatus === "SUCCEEDED") {
|
|
76
|
+
resolve6(executedBuild);
|
|
77
|
+
} else if (["FAILED", "FAILURE"].includes(executedBuild.buildStatus || "")) {
|
|
78
|
+
reject(new Error(`Cannot execute build ${buildId}.`));
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
resolve6(void 0);
|
|
82
|
+
}, WAIT_TIME);
|
|
83
|
+
});
|
|
84
|
+
};
|
|
85
|
+
while (!result) {
|
|
86
|
+
result = await checkIfBuildIsFinished();
|
|
87
|
+
}
|
|
88
|
+
return result;
|
|
89
|
+
};
|
|
90
|
+
var startCodeBuildBuild = async ({
|
|
91
|
+
projectName
|
|
92
|
+
}) => {
|
|
93
|
+
const codeBuild2 = new AWS.CodeBuild();
|
|
94
|
+
const { build } = await codeBuild2.startBuild({ projectName }).promise();
|
|
95
|
+
if (!build) {
|
|
96
|
+
throw new Error(`Cannot start ${projectName} build`);
|
|
97
|
+
}
|
|
98
|
+
return build;
|
|
99
|
+
};
|
|
100
|
+
|
|
101
|
+
// src/utils/environmentVariables.ts
|
|
102
|
+
var cache = /* @__PURE__ */ new Map();
|
|
103
|
+
var getEnvVar = (key) => {
|
|
104
|
+
return cache.has(key) && cache.get(key) ? cache.get(key) : void 0;
|
|
105
|
+
};
|
|
106
|
+
var setEnvVar = (key, value) => {
|
|
107
|
+
if (!value) {
|
|
108
|
+
return cache.delete(key);
|
|
109
|
+
}
|
|
110
|
+
return cache.set(key, value);
|
|
111
|
+
};
|
|
112
|
+
log5.heading = "exec";
|
|
113
|
+
var getAwsAccountId = async () => {
|
|
114
|
+
const sts = new AWS.STS();
|
|
115
|
+
const { Account } = await sts.getCallerIdentity().promise();
|
|
116
|
+
return Account;
|
|
117
|
+
};
|
|
118
|
+
var BRANCH_UNDEFINED = "";
|
|
119
|
+
var getCurrentBranch = async () => {
|
|
120
|
+
try {
|
|
121
|
+
if (getEnvVar("BRANCH")) {
|
|
122
|
+
return getEnvVar("BRANCH");
|
|
123
|
+
}
|
|
124
|
+
const { current } = await git().branch();
|
|
125
|
+
return current || BRANCH_UNDEFINED;
|
|
126
|
+
} catch (err) {
|
|
127
|
+
return BRANCH_UNDEFINED;
|
|
128
|
+
}
|
|
129
|
+
};
|
|
130
|
+
|
|
131
|
+
// src/utils/getEnvironment.ts
|
|
132
|
+
var getEnvironment = () => {
|
|
133
|
+
return getEnvVar("ENVIRONMENT");
|
|
134
|
+
};
|
|
135
|
+
|
|
136
|
+
// src/utils/getIamPath.ts
|
|
137
|
+
var getIamPath = () => `/${NAME}/`;
|
|
138
|
+
var readPackageJson = () => {
|
|
139
|
+
const packageJsonDir = findUpSync("package.json");
|
|
140
|
+
if (!packageJsonDir) {
|
|
141
|
+
return {};
|
|
142
|
+
}
|
|
143
|
+
return JSON.parse(fs3__default.readFileSync(packageJsonDir).toString());
|
|
144
|
+
};
|
|
145
|
+
var getPackageJsonProperty = ({ property }) => {
|
|
146
|
+
try {
|
|
147
|
+
return readPackageJson()[property];
|
|
148
|
+
} catch {
|
|
149
|
+
return "";
|
|
150
|
+
}
|
|
151
|
+
};
|
|
152
|
+
var getPackageName = () => {
|
|
153
|
+
return getPackageJsonProperty({ property: "name" });
|
|
154
|
+
};
|
|
155
|
+
var getPackageVersion = () => {
|
|
156
|
+
return getPackageJsonProperty({ property: "version" });
|
|
157
|
+
};
|
|
158
|
+
var getProjectName = () => {
|
|
159
|
+
if (getEnvVar("PROJECT")) {
|
|
160
|
+
return getEnvVar("PROJECT");
|
|
161
|
+
}
|
|
162
|
+
const name = getPackageName();
|
|
163
|
+
if (!name) {
|
|
164
|
+
return "";
|
|
165
|
+
}
|
|
166
|
+
try {
|
|
167
|
+
return pascalCase(name.split(/[@/]/)[1]);
|
|
168
|
+
} catch (err) {
|
|
169
|
+
return pascalCase(name);
|
|
170
|
+
}
|
|
171
|
+
};
|
|
172
|
+
log5.heading = "exec";
|
|
173
|
+
var spawn = (cmd) => {
|
|
174
|
+
return new Promise((resolve6, reject) => {
|
|
175
|
+
const [cmdName, ...cmdArgs] = cmd.split(" ");
|
|
176
|
+
const child = childProcess.spawn(cmdName, cmdArgs);
|
|
177
|
+
child.stdout.on("data", (data) => {
|
|
178
|
+
stdout.write(data);
|
|
179
|
+
});
|
|
180
|
+
child.stderr.on("data", (data) => {
|
|
181
|
+
stdout.write(data);
|
|
182
|
+
});
|
|
183
|
+
child.on("error", (error) => {
|
|
184
|
+
reject(error);
|
|
185
|
+
});
|
|
186
|
+
child.on("close", (code) => {
|
|
187
|
+
if (code === 0) {
|
|
188
|
+
resolve6({});
|
|
189
|
+
} else {
|
|
190
|
+
reject(code);
|
|
191
|
+
}
|
|
192
|
+
});
|
|
193
|
+
});
|
|
194
|
+
};
|
|
195
|
+
var pascalCaseName = pascalCase(NAME);
|
|
196
|
+
var BASE_STACK_NAME = `${pascalCaseName}BaseStack`;
|
|
197
|
+
var BASE_STACK_BUCKET_TEMPLATES_FOLDER = "cloudformation-templates";
|
|
198
|
+
var BASE_STACK_BUCKET_LOGICAL_NAME = `${pascalCaseName}Bucket`;
|
|
199
|
+
var BASE_STACK_BUCKET_NAME_EXPORTED_NAME = `${pascalCaseName}BucketNameExportedName`;
|
|
200
|
+
var BASE_STACK_LAMBDA_IMAGE_BUILDER_LOGICAL_NAME = `${pascalCaseName}LambdaImageBuilder`;
|
|
201
|
+
var BASE_STACK_LAMBDA_IMAGE_BUILDER_EXPORTED_NAME = `${pascalCaseName}LambdaImageBuilderExportedName`;
|
|
202
|
+
var BASE_STACK_LAMBDA_LAYER_BUILDER_LOGICAL_NAME = `${pascalCaseName}LambdaLayerBuilder`;
|
|
203
|
+
var BASE_STACK_VPC_ID_EXPORTED_NAME = `${pascalCaseName}VPCIDExportedName`;
|
|
204
|
+
var BASE_STACK_VPC_DEFAULT_SECURITY_GROUP_EXPORTED_NAME = `${pascalCaseName}DefaultSecurityGroupExportedName`;
|
|
205
|
+
var BASE_STACK_VPC_PUBLIC_SUBNET_0_EXPORTED_NAME = `${pascalCaseName}VPCPublicSubnet0ExportedName`;
|
|
206
|
+
var BASE_STACK_VPC_PUBLIC_SUBNET_1_EXPORTED_NAME = `${pascalCaseName}VPCPublicSubnet1ExportedName`;
|
|
207
|
+
var BASE_STACK_VPC_PUBLIC_SUBNET_2_EXPORTED_NAME = `${pascalCaseName}VPCPublicSubnet2ExportedName`;
|
|
208
|
+
|
|
209
|
+
// src/deploy/config.ts
|
|
210
|
+
var LATEST_DEPLOY_OUTPUTS_FILENAME = "latest-deploy.json";
|
|
211
|
+
|
|
212
|
+
// src/deploy/addDefaults.cloudformation.ts
|
|
213
|
+
var addDefaultsParametersAndTagsToParams = async (params) => {
|
|
214
|
+
const branchName = await getCurrentBranch();
|
|
215
|
+
const environment = await getEnvironment();
|
|
216
|
+
const packageName = await getPackageName();
|
|
217
|
+
const packageVersion = await getPackageVersion();
|
|
218
|
+
const projectName = await getProjectName();
|
|
219
|
+
const tagValuePattern = /[^a-zA-Z0-9_.:/=+\-@]/g;
|
|
220
|
+
return {
|
|
221
|
+
...params,
|
|
222
|
+
Parameters: [
|
|
223
|
+
...params.Parameters || [],
|
|
224
|
+
...environment ? [{ ParameterKey: "Environment", ParameterValue: environment }] : [],
|
|
225
|
+
{ ParameterKey: "Project", ParameterValue: projectName }
|
|
226
|
+
],
|
|
227
|
+
Tags: [
|
|
228
|
+
...params.Tags || [],
|
|
229
|
+
{ Key: "Branch", Value: branchName },
|
|
230
|
+
...environment ? [{ Key: "Environment", Value: environment }] : [],
|
|
231
|
+
{ Key: "Package", Value: packageName },
|
|
232
|
+
{ Key: "Project", Value: projectName },
|
|
233
|
+
{ Key: "Version", Value: packageVersion }
|
|
234
|
+
].filter(({ Value }) => {
|
|
235
|
+
return !!Value;
|
|
236
|
+
}).map(({ Key, Value }) => {
|
|
237
|
+
return {
|
|
238
|
+
Key,
|
|
239
|
+
Value: Value.replace(tagValuePattern, "")
|
|
240
|
+
};
|
|
241
|
+
})
|
|
242
|
+
};
|
|
243
|
+
};
|
|
244
|
+
var addDefaultParametersToTemplate = async (template) => {
|
|
245
|
+
const [environment, projectName] = await Promise.all([
|
|
246
|
+
getEnvironment(),
|
|
247
|
+
getProjectName()
|
|
248
|
+
]);
|
|
249
|
+
const newParameters = {
|
|
250
|
+
Project: { Default: projectName, Type: "String" }
|
|
251
|
+
};
|
|
252
|
+
if (environment) {
|
|
253
|
+
newParameters.Environment = { Default: environment, Type: "String" };
|
|
254
|
+
}
|
|
255
|
+
template.Parameters = { ...newParameters, ...template.Parameters };
|
|
256
|
+
};
|
|
257
|
+
var addLogGroupToResources = (template) => {
|
|
258
|
+
const { Resources } = template;
|
|
259
|
+
const resourcesEntries = Object.entries(Resources);
|
|
260
|
+
resourcesEntries.forEach(([key, resource]) => {
|
|
261
|
+
if (["AWS::Lambda::Function", "AWS::Serverless::Function"].includes(
|
|
262
|
+
resource.Type
|
|
263
|
+
)) {
|
|
264
|
+
const logGroup = resourcesEntries.find(([, resource2]) => {
|
|
265
|
+
const logGroupNameStr = JSON.stringify(
|
|
266
|
+
resource2.Properties?.LogGroupName?.["Fn::Join"] || ""
|
|
267
|
+
);
|
|
268
|
+
return logGroupNameStr.includes(key);
|
|
269
|
+
});
|
|
270
|
+
if (!logGroup) {
|
|
271
|
+
Resources[`${key}LogsLogGroup`] = {
|
|
272
|
+
Type: "AWS::Logs::LogGroup",
|
|
273
|
+
DeletionPolicy: "Delete",
|
|
274
|
+
Properties: {
|
|
275
|
+
LogGroupName: { "Fn::Join": ["/", ["/aws/lambda", { Ref: key }]] }
|
|
276
|
+
}
|
|
277
|
+
};
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
});
|
|
281
|
+
};
|
|
282
|
+
var addEnvironmentsToLambdaResources = async (template) => {
|
|
283
|
+
const environment = getEnvironment();
|
|
284
|
+
const { Resources } = template;
|
|
285
|
+
const resourcesEntries = Object.entries(Resources);
|
|
286
|
+
resourcesEntries.forEach(([, resource]) => {
|
|
287
|
+
if (resource.Type === "AWS::Lambda::Function") {
|
|
288
|
+
const { Properties } = resource;
|
|
289
|
+
if ((Properties.Description || "").includes("Lambda@Edge")) {
|
|
290
|
+
return;
|
|
291
|
+
}
|
|
292
|
+
if (!environment) {
|
|
293
|
+
return;
|
|
294
|
+
}
|
|
295
|
+
if (!Properties.Environment) {
|
|
296
|
+
Properties.Environment = {};
|
|
297
|
+
}
|
|
298
|
+
if (!Properties.Environment.Variables) {
|
|
299
|
+
Properties.Environment.Variables = {};
|
|
300
|
+
}
|
|
301
|
+
Properties.Environment.Variables.ENVIRONMENT = environment;
|
|
302
|
+
}
|
|
303
|
+
});
|
|
304
|
+
};
|
|
305
|
+
var CRITICAL_RESOURCES_TYPES = [
|
|
306
|
+
"AWS::Cognito::UserPool",
|
|
307
|
+
"AWS::DynamoDB::Table"
|
|
308
|
+
];
|
|
309
|
+
var addRetainToCriticalResources = async (template) => {
|
|
310
|
+
const environment = getEnvironment();
|
|
311
|
+
Object.entries(template.Resources).forEach(([, resource]) => {
|
|
312
|
+
if (CRITICAL_RESOURCES_TYPES.includes(resource.Type)) {
|
|
313
|
+
if (!resource.DeletionPolicy && environment) {
|
|
314
|
+
resource.DeletionPolicy = "Retain";
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
});
|
|
318
|
+
};
|
|
319
|
+
var addAppSyncApiOutputs = async (template) => {
|
|
320
|
+
Object.entries(template.Resources).forEach(([key, resource]) => {
|
|
321
|
+
if (resource.Type === "AWS::AppSync::GraphQLApi") {
|
|
322
|
+
template.Outputs = {
|
|
323
|
+
[key]: {
|
|
324
|
+
Description: `Automatically added by ${NAME}`,
|
|
325
|
+
Value: { "Fn::GetAtt": [key, "GraphQLUrl"] },
|
|
326
|
+
Export: {
|
|
327
|
+
Name: {
|
|
328
|
+
"Fn::Join": [":", [{ Ref: "AWS::StackName" }, "GraphQLApiUrl"]]
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
},
|
|
332
|
+
...template.Outputs
|
|
333
|
+
};
|
|
334
|
+
}
|
|
335
|
+
});
|
|
336
|
+
};
|
|
337
|
+
var addDefaults = async ({
|
|
338
|
+
params,
|
|
339
|
+
template
|
|
340
|
+
}) => {
|
|
341
|
+
const newTemplate = JSON.parse(JSON.stringify(template));
|
|
342
|
+
await addDefaultParametersToTemplate(newTemplate);
|
|
343
|
+
await addLogGroupToResources(newTemplate);
|
|
344
|
+
await addEnvironmentsToLambdaResources(newTemplate);
|
|
345
|
+
await addAppSyncApiOutputs(newTemplate);
|
|
346
|
+
await addRetainToCriticalResources(newTemplate);
|
|
347
|
+
const response = {
|
|
348
|
+
params: await addDefaultsParametersAndTagsToParams(params),
|
|
349
|
+
template: newTemplate
|
|
350
|
+
};
|
|
351
|
+
return response;
|
|
352
|
+
};
|
|
353
|
+
var logPrefix2 = "s3";
|
|
354
|
+
var s3 = new AWS.S3({ apiVersion: "2006-03-01" });
|
|
355
|
+
var getBucketKeyUrl = ({
|
|
356
|
+
bucket,
|
|
357
|
+
key
|
|
358
|
+
}) => {
|
|
359
|
+
return `https://s3.amazonaws.com/${bucket}/${key}`;
|
|
360
|
+
};
|
|
361
|
+
var uploadFileToS3 = async ({
|
|
362
|
+
bucket,
|
|
363
|
+
contentType,
|
|
364
|
+
file,
|
|
365
|
+
filePath,
|
|
366
|
+
key
|
|
367
|
+
}) => {
|
|
368
|
+
if (!file && !filePath) {
|
|
369
|
+
throw new Error("file or filePath must be defined");
|
|
370
|
+
}
|
|
371
|
+
let params = {
|
|
372
|
+
Bucket: bucket,
|
|
373
|
+
Key: key.split(path__default.sep).join("/")
|
|
374
|
+
};
|
|
375
|
+
if (file) {
|
|
376
|
+
params.ContentType = contentType;
|
|
377
|
+
params.Body = file;
|
|
378
|
+
} else if (filePath) {
|
|
379
|
+
const readFile = await fs3__default.promises.readFile(filePath);
|
|
380
|
+
params = {
|
|
381
|
+
...params,
|
|
382
|
+
ContentType: contentType || mime.contentType(path__default.extname(filePath)) || void 0
|
|
383
|
+
};
|
|
384
|
+
params.Body = Buffer.from(readFile);
|
|
385
|
+
}
|
|
386
|
+
const { Bucket, Key, VersionId } = await s3.upload(params).promise();
|
|
387
|
+
return {
|
|
388
|
+
bucket: Bucket,
|
|
389
|
+
key: Key,
|
|
390
|
+
versionId: VersionId,
|
|
391
|
+
url: getBucketKeyUrl({ bucket: Bucket, key: Key })
|
|
392
|
+
};
|
|
393
|
+
};
|
|
394
|
+
var getAllFilesInsideADirectory = async ({
|
|
395
|
+
directory
|
|
396
|
+
}) => {
|
|
397
|
+
const allFilesAndDirectories = await glob(`${directory}/**/*`);
|
|
398
|
+
const allFiles = allFilesAndDirectories.filter((item) => {
|
|
399
|
+
return fs3__default.lstatSync(item).isFile();
|
|
400
|
+
});
|
|
401
|
+
return allFiles;
|
|
402
|
+
};
|
|
403
|
+
var copyRoot404To404Index = async ({ bucket }) => {
|
|
404
|
+
try {
|
|
405
|
+
const root404Exists = await s3.headObject({
|
|
406
|
+
Bucket: bucket,
|
|
407
|
+
Key: "404.html"
|
|
408
|
+
}).promise().catch(() => {
|
|
409
|
+
return false;
|
|
410
|
+
});
|
|
411
|
+
if (root404Exists) {
|
|
412
|
+
await s3.copyObject({
|
|
413
|
+
Bucket: bucket,
|
|
414
|
+
CopySource: `${bucket}/404.html`,
|
|
415
|
+
Key: "404/index.html"
|
|
416
|
+
}).promise();
|
|
417
|
+
}
|
|
418
|
+
} catch (err) {
|
|
419
|
+
log5.error(logPrefix2, `Cannot copy 404.html to 404/index.html`);
|
|
420
|
+
throw err;
|
|
421
|
+
}
|
|
422
|
+
};
|
|
423
|
+
var uploadDirectoryToS3 = async ({
|
|
424
|
+
bucket,
|
|
425
|
+
bucketKey = "",
|
|
426
|
+
directory
|
|
427
|
+
}) => {
|
|
428
|
+
log5.info(
|
|
429
|
+
logPrefix2,
|
|
430
|
+
`Uploading directory ${directory}/ to ${bucket}/${bucketKey}...`
|
|
431
|
+
);
|
|
432
|
+
const allFiles = await getAllFilesInsideADirectory({ directory });
|
|
433
|
+
if (allFiles.length === 0) {
|
|
434
|
+
throw new Error(`Directory ${directory}/ has no files.`);
|
|
435
|
+
}
|
|
436
|
+
const GROUP_MAX_LENGTH = 63;
|
|
437
|
+
const numberOfGroups = Math.ceil(allFiles.length / GROUP_MAX_LENGTH);
|
|
438
|
+
const aoaOfFiles = allFiles.reduce((acc, file, index) => {
|
|
439
|
+
const groupIndex = index % numberOfGroups;
|
|
440
|
+
if (!acc[groupIndex]) {
|
|
441
|
+
acc[groupIndex] = [];
|
|
442
|
+
}
|
|
443
|
+
acc[index % numberOfGroups].push(file);
|
|
444
|
+
return acc;
|
|
445
|
+
}, []);
|
|
446
|
+
for (const [index, groupOfFiles] of aoaOfFiles.entries()) {
|
|
447
|
+
log5.info(logPrefix2, `Uploading group ${index + 1}/${aoaOfFiles.length}...`);
|
|
448
|
+
await Promise.all(
|
|
449
|
+
groupOfFiles.map((file) => {
|
|
450
|
+
return uploadFileToS3({
|
|
451
|
+
bucket,
|
|
452
|
+
key: path__default.join(bucketKey, path__default.relative(directory, file)),
|
|
453
|
+
filePath: file
|
|
454
|
+
});
|
|
455
|
+
})
|
|
456
|
+
);
|
|
457
|
+
}
|
|
458
|
+
};
|
|
459
|
+
var emptyS3Directory = async ({
|
|
460
|
+
bucket,
|
|
461
|
+
directory = ""
|
|
462
|
+
}) => {
|
|
463
|
+
log5.info(logPrefix2, `${bucket}/${directory} will be empty`);
|
|
464
|
+
try {
|
|
465
|
+
const { Contents, IsTruncated } = await s3.listObjectsV2({
|
|
466
|
+
Bucket: bucket,
|
|
467
|
+
Prefix: directory
|
|
468
|
+
}).promise();
|
|
469
|
+
if (Contents && Contents.length > 0) {
|
|
470
|
+
const objectsPromises = Contents.filter(({ Key }) => {
|
|
471
|
+
return !!Key;
|
|
472
|
+
}).map(async ({ Key }) => {
|
|
473
|
+
const { Versions = [] } = await s3.listObjectVersions({
|
|
474
|
+
Bucket: bucket,
|
|
475
|
+
Prefix: Key
|
|
476
|
+
}).promise();
|
|
477
|
+
return {
|
|
478
|
+
Key,
|
|
479
|
+
Versions: Versions.map(({ VersionId }) => {
|
|
480
|
+
return VersionId || void 0;
|
|
481
|
+
})
|
|
482
|
+
};
|
|
483
|
+
});
|
|
484
|
+
const objects = await Promise.all(objectsPromises);
|
|
485
|
+
const objectsWithVersionsIds = objects.reduce((acc, { Key, Versions }) => {
|
|
486
|
+
const objectWithVersionsIds = Versions.map((VersionId) => {
|
|
487
|
+
return {
|
|
488
|
+
Key,
|
|
489
|
+
VersionId
|
|
490
|
+
};
|
|
491
|
+
});
|
|
492
|
+
return [...acc, ...objectWithVersionsIds];
|
|
493
|
+
}, []);
|
|
494
|
+
await s3.deleteObjects({
|
|
495
|
+
Bucket: bucket,
|
|
496
|
+
Delete: { Objects: objectsWithVersionsIds }
|
|
497
|
+
}).promise();
|
|
498
|
+
}
|
|
499
|
+
if (IsTruncated) {
|
|
500
|
+
await emptyS3Directory({ bucket, directory });
|
|
501
|
+
}
|
|
502
|
+
log5.info(logPrefix2, `${bucket}/${directory} is empty.`);
|
|
503
|
+
} catch (err) {
|
|
504
|
+
log5.error(logPrefix2, `Cannot empty ${bucket}/${directory}.`);
|
|
505
|
+
throw err;
|
|
506
|
+
}
|
|
507
|
+
};
|
|
508
|
+
var deleteS3Directory = async ({
|
|
509
|
+
bucket,
|
|
510
|
+
directory = ""
|
|
511
|
+
}) => {
|
|
512
|
+
try {
|
|
513
|
+
log5.info(logPrefix2, `${bucket}/${directory} is being deleted...`);
|
|
514
|
+
await emptyS3Directory({ bucket, directory });
|
|
515
|
+
await s3.deleteObject({ Bucket: bucket, Key: directory }).promise();
|
|
516
|
+
log5.info(logPrefix2, `${bucket}/${directory} was deleted.`);
|
|
517
|
+
} catch (error) {
|
|
518
|
+
log5.error(logPrefix2, `Cannot delete ${bucket}/${directory}.`);
|
|
519
|
+
throw error;
|
|
520
|
+
}
|
|
521
|
+
};
|
|
522
|
+
|
|
523
|
+
// src/deploy/baseStack/getBaseStackResource.ts
|
|
524
|
+
var getBaseStackOutput = async (outputKey) => {
|
|
525
|
+
const output = await getStackOutput({
|
|
526
|
+
stackName: BASE_STACK_NAME,
|
|
527
|
+
outputKey
|
|
528
|
+
});
|
|
529
|
+
return output.OutputValue;
|
|
530
|
+
};
|
|
531
|
+
var resourcesKeys = {
|
|
532
|
+
BASE_STACK_BUCKET_LOGICAL_NAME,
|
|
533
|
+
BASE_STACK_LAMBDA_IMAGE_BUILDER_LOGICAL_NAME,
|
|
534
|
+
BASE_STACK_LAMBDA_LAYER_BUILDER_LOGICAL_NAME
|
|
535
|
+
};
|
|
536
|
+
var resources = {};
|
|
537
|
+
var getBaseStackResource = async (resource) => {
|
|
538
|
+
if (!resources[resource]) {
|
|
539
|
+
resources[resource] = await getBaseStackOutput(resourcesKeys[resource]);
|
|
540
|
+
}
|
|
541
|
+
return resources[resource];
|
|
542
|
+
};
|
|
543
|
+
var logPrefix3 = "cloudformation";
|
|
544
|
+
log5.addLevel("event", 1e4, { fg: "yellow" });
|
|
545
|
+
log5.addLevel("output", 1e4, { fg: "blue" });
|
|
546
|
+
var TEMPLATE_BODY_MAX_SIZE = 51200;
|
|
547
|
+
var isTemplateBodyGreaterThanMaxSize = (template) => {
|
|
548
|
+
return Buffer.byteLength(JSON.stringify(template), "utf8") >= TEMPLATE_BODY_MAX_SIZE;
|
|
549
|
+
};
|
|
550
|
+
var uploadTemplateToBaseStackBucket = async ({
|
|
551
|
+
stackName,
|
|
552
|
+
template
|
|
553
|
+
}) => {
|
|
554
|
+
const bucketName = await getBaseStackResource(
|
|
555
|
+
"BASE_STACK_BUCKET_LOGICAL_NAME"
|
|
556
|
+
);
|
|
557
|
+
const { url } = await uploadFileToS3({
|
|
558
|
+
bucket: bucketName,
|
|
559
|
+
contentType: "application/json",
|
|
560
|
+
key: `${BASE_STACK_BUCKET_TEMPLATES_FOLDER}/${stackName}.json`,
|
|
561
|
+
file: Buffer.from(JSON.stringify(template, null, 2))
|
|
562
|
+
});
|
|
563
|
+
return { url };
|
|
564
|
+
};
|
|
565
|
+
var cloudFormationClients = {};
|
|
566
|
+
var cloudformation = () => {
|
|
567
|
+
const cloudFormationClientConfig = {
|
|
568
|
+
apiVersion: "2010-05-15",
|
|
569
|
+
region: getEnvVar("REGION")
|
|
570
|
+
};
|
|
571
|
+
const key = JSON.stringify(cloudFormationClientConfig);
|
|
572
|
+
if (!cloudFormationClients[key]) {
|
|
573
|
+
cloudFormationClients[key] = new CloudFormationClient({
|
|
574
|
+
apiVersion: "2010-05-15",
|
|
575
|
+
region: getEnvVar("REGION")
|
|
576
|
+
});
|
|
577
|
+
}
|
|
578
|
+
return cloudFormationClients[key];
|
|
579
|
+
};
|
|
580
|
+
var cloudFormationV2 = () => {
|
|
581
|
+
return new AWS.CloudFormation({ apiVersion: "2010-05-15" });
|
|
582
|
+
};
|
|
583
|
+
var describeStacks = async ({
|
|
584
|
+
stackName
|
|
585
|
+
} = {}) => {
|
|
586
|
+
const { Stacks } = await cloudformation().send(
|
|
587
|
+
new DescribeStacksCommand({ StackName: stackName })
|
|
588
|
+
);
|
|
589
|
+
return Stacks;
|
|
590
|
+
};
|
|
591
|
+
var describeStackResource = async (input) => {
|
|
592
|
+
return cloudformation().send(new DescribeStackResourceCommand(input));
|
|
593
|
+
};
|
|
594
|
+
var doesStackExist = async ({ stackName }) => {
|
|
595
|
+
log5.info(logPrefix3, `Checking if stack ${stackName} already exists...`);
|
|
596
|
+
try {
|
|
597
|
+
await describeStacks({ stackName });
|
|
598
|
+
log5.info(logPrefix3, `Stack ${stackName} already exists.`);
|
|
599
|
+
return true;
|
|
600
|
+
} catch (error) {
|
|
601
|
+
if (error.Code === "ValidationError") {
|
|
602
|
+
log5.info(logPrefix3, `Stack ${stackName} does not exist.`);
|
|
603
|
+
return false;
|
|
604
|
+
}
|
|
605
|
+
throw error;
|
|
606
|
+
}
|
|
607
|
+
};
|
|
608
|
+
var describeStackEvents = async ({
|
|
609
|
+
stackName
|
|
610
|
+
}) => {
|
|
611
|
+
log5.error(logPrefix3, "Stack events:");
|
|
612
|
+
const { StackEvents } = await cloudformation().send(
|
|
613
|
+
new DescribeStackEventsCommand({ StackName: stackName })
|
|
614
|
+
);
|
|
615
|
+
const events = (StackEvents || []).filter(({ Timestamp }) => {
|
|
616
|
+
return Date.now() - Number(Timestamp) < 10 * 60 * 1e3;
|
|
617
|
+
}).filter(({ ResourceStatusReason }) => {
|
|
618
|
+
return ResourceStatusReason;
|
|
619
|
+
}).reverse();
|
|
620
|
+
events.forEach(({ LogicalResourceId, ResourceStatusReason }) => {
|
|
621
|
+
return log5.event(LogicalResourceId, ResourceStatusReason);
|
|
622
|
+
});
|
|
623
|
+
return events;
|
|
624
|
+
};
|
|
625
|
+
var describeStack = async ({ stackName }) => {
|
|
626
|
+
const stacks = await describeStacks({ stackName });
|
|
627
|
+
if (!stacks) {
|
|
628
|
+
throw new Error(`Stack ${stackName} not found and cannot be described.`);
|
|
629
|
+
}
|
|
630
|
+
return stacks[0];
|
|
631
|
+
};
|
|
632
|
+
var getStackOutput = async ({
|
|
633
|
+
stackName,
|
|
634
|
+
outputKey
|
|
635
|
+
}) => {
|
|
636
|
+
const { Outputs = [] } = await describeStack({ stackName });
|
|
637
|
+
const output = Outputs?.find(({ OutputKey }) => {
|
|
638
|
+
return OutputKey === outputKey;
|
|
639
|
+
});
|
|
640
|
+
if (!output) {
|
|
641
|
+
throw new Error(`Output ${outputKey} doesn't exist on ${stackName} stack`);
|
|
642
|
+
}
|
|
643
|
+
return output;
|
|
644
|
+
};
|
|
645
|
+
var saveEnvironmentOutput = async ({
|
|
646
|
+
outputs,
|
|
647
|
+
stackName
|
|
648
|
+
}) => {
|
|
649
|
+
const envFile = {
|
|
650
|
+
stackName,
|
|
651
|
+
environment: getEnvironment(),
|
|
652
|
+
projectName: getProjectName(),
|
|
653
|
+
packageName: getPackageName()
|
|
654
|
+
};
|
|
655
|
+
envFile.outputs = outputs.reduce((acc, output) => {
|
|
656
|
+
if (!output.OutputKey || !output) {
|
|
657
|
+
return acc;
|
|
658
|
+
}
|
|
659
|
+
return {
|
|
660
|
+
...acc,
|
|
661
|
+
[output.OutputKey]: output
|
|
662
|
+
};
|
|
663
|
+
}, {});
|
|
664
|
+
const dotCarlinFolderPath = path.join(process.cwd(), ".carlin");
|
|
665
|
+
if (!fs3.existsSync(dotCarlinFolderPath)) {
|
|
666
|
+
await fs3.promises.mkdir(dotCarlinFolderPath);
|
|
667
|
+
}
|
|
668
|
+
const filePath = path.join(dotCarlinFolderPath, `${stackName}.json`);
|
|
669
|
+
await fs3.promises.writeFile(filePath, JSON.stringify(envFile, null, 2));
|
|
670
|
+
const latestFilePath = path.join(
|
|
671
|
+
dotCarlinFolderPath,
|
|
672
|
+
LATEST_DEPLOY_OUTPUTS_FILENAME
|
|
673
|
+
);
|
|
674
|
+
await fs3.promises.writeFile(latestFilePath, JSON.stringify(envFile, null, 2));
|
|
675
|
+
};
|
|
676
|
+
var printStackOutputsAfterDeploy = async ({
|
|
677
|
+
stackName
|
|
678
|
+
}) => {
|
|
679
|
+
const {
|
|
680
|
+
EnableTerminationProtection,
|
|
681
|
+
StackName,
|
|
682
|
+
Outputs = []
|
|
683
|
+
} = await describeStack({ stackName });
|
|
684
|
+
await saveEnvironmentOutput({ stackName, outputs: Outputs });
|
|
685
|
+
log5.output("Describe Stack");
|
|
686
|
+
log5.output("StackName", StackName);
|
|
687
|
+
log5.output("EnableTerminationProtection", EnableTerminationProtection);
|
|
688
|
+
Outputs.forEach(({ OutputKey, OutputValue, Description, ExportName }) => {
|
|
689
|
+
log5.output(
|
|
690
|
+
`${OutputKey}`,
|
|
691
|
+
[
|
|
692
|
+
"",
|
|
693
|
+
`OutputKey: ${OutputKey}`,
|
|
694
|
+
`OutputValue: ${OutputValue}`,
|
|
695
|
+
`Description: ${Description}`,
|
|
696
|
+
`ExportName: ${ExportName}`,
|
|
697
|
+
""
|
|
698
|
+
].join("\n")
|
|
699
|
+
);
|
|
700
|
+
});
|
|
701
|
+
};
|
|
702
|
+
var deleteStack = async ({ stackName }) => {
|
|
703
|
+
log5.info(logPrefix3, `Deleting stack ${stackName}...`);
|
|
704
|
+
await cloudformation().send(new DeleteStackCommand({ StackName: stackName }));
|
|
705
|
+
try {
|
|
706
|
+
await cloudFormationV2().waitFor("stackDeleteComplete", { StackName: stackName }).promise();
|
|
707
|
+
} catch (err) {
|
|
708
|
+
log5.error(logPrefix3, `An error occurred when deleting stack ${stackName}.`);
|
|
709
|
+
await describeStackEvents({ stackName });
|
|
710
|
+
throw err;
|
|
711
|
+
}
|
|
712
|
+
log5.info(logPrefix3, `Stack ${stackName} deleted.`);
|
|
713
|
+
};
|
|
714
|
+
var createStack = async ({
|
|
715
|
+
params
|
|
716
|
+
}) => {
|
|
717
|
+
const { StackName: stackName = "" } = params;
|
|
718
|
+
log5.info(logPrefix3, `Creating stack ${stackName}...`);
|
|
719
|
+
await cloudformation().send(new CreateStackCommand(params));
|
|
720
|
+
try {
|
|
721
|
+
await cloudFormationV2().waitFor("stackCreateComplete", { StackName: stackName }).promise();
|
|
722
|
+
} catch (err) {
|
|
723
|
+
log5.error(logPrefix3, `An error occurred when creating stack ${stackName}.`);
|
|
724
|
+
await describeStackEvents({ stackName });
|
|
725
|
+
await deleteStack({ stackName });
|
|
726
|
+
throw err;
|
|
727
|
+
}
|
|
728
|
+
log5.info(logPrefix3, `Stack ${stackName} was created.`);
|
|
729
|
+
};
|
|
730
|
+
var updateStack = async ({
|
|
731
|
+
params
|
|
732
|
+
}) => {
|
|
733
|
+
const { StackName: stackName = "" } = params;
|
|
734
|
+
log5.info(logPrefix3, `Updating stack ${stackName}...`);
|
|
735
|
+
try {
|
|
736
|
+
await cloudformation().send(new UpdateStackCommand(params));
|
|
737
|
+
await cloudFormationV2().waitFor("stackUpdateComplete", { StackName: stackName }).promise();
|
|
738
|
+
} catch (error) {
|
|
739
|
+
if (error.message === "No updates are to be performed.") {
|
|
740
|
+
log5.info(logPrefix3, error.message);
|
|
741
|
+
return;
|
|
742
|
+
}
|
|
743
|
+
log5.error(logPrefix3, "An error occurred when updating stack.");
|
|
744
|
+
await describeStackEvents({ stackName });
|
|
745
|
+
throw error;
|
|
746
|
+
}
|
|
747
|
+
log5.info(logPrefix3, `Stack ${stackName} was updated.`);
|
|
748
|
+
};
|
|
749
|
+
var enableTerminationProtection = async ({
|
|
750
|
+
stackName
|
|
751
|
+
}) => {
|
|
752
|
+
log5.info(logPrefix3, `Enabling termination protection...`);
|
|
753
|
+
try {
|
|
754
|
+
await cloudformation().send(
|
|
755
|
+
new UpdateTerminationProtectionCommand({
|
|
756
|
+
EnableTerminationProtection: true,
|
|
757
|
+
StackName: stackName
|
|
758
|
+
})
|
|
759
|
+
);
|
|
760
|
+
} catch (err) {
|
|
761
|
+
log5.error(
|
|
762
|
+
logPrefix3,
|
|
763
|
+
"An error occurred when enabling termination protection"
|
|
764
|
+
);
|
|
765
|
+
throw err;
|
|
766
|
+
}
|
|
767
|
+
};
|
|
768
|
+
var deploy = async ({
|
|
769
|
+
terminationProtection = false,
|
|
770
|
+
...paramsAndTemplate
|
|
771
|
+
}) => {
|
|
772
|
+
const { params, template } = await addDefaults(paramsAndTemplate);
|
|
773
|
+
const stackName = params.StackName;
|
|
774
|
+
if (!stackName) {
|
|
775
|
+
throw new Error("StackName is required");
|
|
776
|
+
}
|
|
777
|
+
delete params.TemplateBody;
|
|
778
|
+
delete params.TemplateURL;
|
|
779
|
+
if (isTemplateBodyGreaterThanMaxSize(template)) {
|
|
780
|
+
const { url } = await uploadTemplateToBaseStackBucket({
|
|
781
|
+
stackName,
|
|
782
|
+
template
|
|
783
|
+
});
|
|
784
|
+
params.TemplateURL = url;
|
|
785
|
+
} else {
|
|
786
|
+
params.TemplateBody = JSON.stringify(template);
|
|
787
|
+
}
|
|
788
|
+
params.Capabilities = [
|
|
789
|
+
"CAPABILITY_AUTO_EXPAND",
|
|
790
|
+
"CAPABILITY_IAM",
|
|
791
|
+
"CAPABILITY_NAMED_IAM"
|
|
792
|
+
];
|
|
793
|
+
if (await doesStackExist({ stackName })) {
|
|
794
|
+
await updateStack({ params });
|
|
795
|
+
} else {
|
|
796
|
+
await createStack({ params });
|
|
797
|
+
}
|
|
798
|
+
if (terminationProtection || !!getEnvironment()) {
|
|
799
|
+
await enableTerminationProtection({ stackName });
|
|
800
|
+
}
|
|
801
|
+
await printStackOutputsAfterDeploy({ stackName });
|
|
802
|
+
return describeStack({ stackName });
|
|
803
|
+
};
|
|
804
|
+
var canDestroyStack = async ({ stackName }) => {
|
|
805
|
+
const { EnableTerminationProtection } = await describeStack({ stackName });
|
|
806
|
+
if (EnableTerminationProtection) {
|
|
807
|
+
return false;
|
|
808
|
+
}
|
|
809
|
+
return true;
|
|
810
|
+
};
|
|
811
|
+
var validateTemplate = async ({
|
|
812
|
+
stackName,
|
|
813
|
+
template
|
|
814
|
+
}) => {
|
|
815
|
+
const validateTemplateCommandInput = {};
|
|
816
|
+
if (isTemplateBodyGreaterThanMaxSize(template)) {
|
|
817
|
+
const { url } = await uploadTemplateToBaseStackBucket({
|
|
818
|
+
stackName,
|
|
819
|
+
template
|
|
820
|
+
});
|
|
821
|
+
validateTemplateCommandInput.TemplateURL = url;
|
|
822
|
+
} else {
|
|
823
|
+
validateTemplateCommandInput.TemplateBody = JSON.stringify(template);
|
|
824
|
+
}
|
|
825
|
+
await cloudformation().send(
|
|
826
|
+
new ValidateTemplateCommand(validateTemplateCommandInput)
|
|
827
|
+
);
|
|
828
|
+
};
|
|
829
|
+
|
|
830
|
+
// src/deploy/baseStack/getBucketTemplate.ts
|
|
831
|
+
var getBucketTemplate = () => {
|
|
832
|
+
return {
|
|
833
|
+
AWSTemplateFormatVersion: "2010-09-09",
|
|
834
|
+
Resources: {
|
|
835
|
+
[BASE_STACK_BUCKET_LOGICAL_NAME]: {
|
|
836
|
+
Type: "AWS::S3::Bucket",
|
|
837
|
+
DeletionPolicy: "Retain",
|
|
838
|
+
Properties: {
|
|
839
|
+
LifecycleConfiguration: {
|
|
840
|
+
Rules: [
|
|
841
|
+
{
|
|
842
|
+
ExpirationInDays: 1,
|
|
843
|
+
Prefix: BASE_STACK_BUCKET_TEMPLATES_FOLDER,
|
|
844
|
+
Status: "Enabled"
|
|
845
|
+
},
|
|
846
|
+
{
|
|
847
|
+
NoncurrentVersionExpirationInDays: 3,
|
|
848
|
+
Status: "Enabled"
|
|
849
|
+
}
|
|
850
|
+
]
|
|
851
|
+
},
|
|
852
|
+
/**
|
|
853
|
+
* This is necessary because if we update Lambda code without change
|
|
854
|
+
* CloudFormation template, the Lambda will not be updated.
|
|
855
|
+
*/
|
|
856
|
+
VersioningConfiguration: {
|
|
857
|
+
Status: "Enabled"
|
|
858
|
+
}
|
|
859
|
+
}
|
|
860
|
+
}
|
|
861
|
+
},
|
|
862
|
+
Outputs: {
|
|
863
|
+
[BASE_STACK_BUCKET_LOGICAL_NAME]: {
|
|
864
|
+
Value: { Ref: BASE_STACK_BUCKET_LOGICAL_NAME },
|
|
865
|
+
Export: {
|
|
866
|
+
Name: BASE_STACK_BUCKET_NAME_EXPORTED_NAME
|
|
867
|
+
}
|
|
868
|
+
}
|
|
869
|
+
}
|
|
870
|
+
};
|
|
871
|
+
};
|
|
872
|
+
var getLambdaImageBuilderTemplate = () => {
|
|
873
|
+
const CODE_BUILD_PROJECT_LOGS_LOGICAL_ID2 = "CodeBuildProjectLogsLogGroup";
|
|
874
|
+
const CODE_BUILD_PROJECT_SERVICE_ROLE_LOGICAL_ID2 = "ImageCodeBuildProjectIAMRole";
|
|
875
|
+
return {
|
|
876
|
+
AWSTemplateFormatVersion: "2010-09-09",
|
|
877
|
+
Resources: {
|
|
878
|
+
[CODE_BUILD_PROJECT_LOGS_LOGICAL_ID2]: {
|
|
879
|
+
Type: "AWS::Logs::LogGroup",
|
|
880
|
+
DeletionPolicy: "Delete",
|
|
881
|
+
Properties: {}
|
|
882
|
+
},
|
|
883
|
+
[CODE_BUILD_PROJECT_SERVICE_ROLE_LOGICAL_ID2]: {
|
|
884
|
+
Type: "AWS::IAM::Role",
|
|
885
|
+
Properties: {
|
|
886
|
+
AssumeRolePolicyDocument: {
|
|
887
|
+
Version: "2012-10-17",
|
|
888
|
+
Statement: [
|
|
889
|
+
{
|
|
890
|
+
Effect: "Allow",
|
|
891
|
+
Principal: {
|
|
892
|
+
Service: "codebuild.amazonaws.com"
|
|
893
|
+
},
|
|
894
|
+
Action: "sts:AssumeRole"
|
|
895
|
+
}
|
|
896
|
+
]
|
|
897
|
+
},
|
|
898
|
+
Path: getIamPath(),
|
|
899
|
+
Policies: [
|
|
900
|
+
{
|
|
901
|
+
PolicyName: `${CODE_BUILD_PROJECT_SERVICE_ROLE_LOGICAL_ID2}Policy`,
|
|
902
|
+
PolicyDocument: {
|
|
903
|
+
Version: "2012-10-17",
|
|
904
|
+
Statement: [
|
|
905
|
+
{
|
|
906
|
+
Effect: "Allow",
|
|
907
|
+
Action: ["logs:CreateLogStream", "logs:PutLogEvents"],
|
|
908
|
+
Resource: "*"
|
|
909
|
+
},
|
|
910
|
+
{
|
|
911
|
+
Effect: "Allow",
|
|
912
|
+
Action: ["ecr:GetAuthorizationToken"],
|
|
913
|
+
Resource: "*"
|
|
914
|
+
},
|
|
915
|
+
{
|
|
916
|
+
Effect: "Allow",
|
|
917
|
+
Action: [
|
|
918
|
+
"ecr:BatchCheckLayerAvailability",
|
|
919
|
+
"ecr:CompleteLayerUpload",
|
|
920
|
+
"ecr:InitiateLayerUpload",
|
|
921
|
+
"ecr:PutImage",
|
|
922
|
+
"ecr:UploadLayerPart"
|
|
923
|
+
],
|
|
924
|
+
Resource: "*"
|
|
925
|
+
},
|
|
926
|
+
{
|
|
927
|
+
Effect: "Allow",
|
|
928
|
+
Action: "s3:GetObject",
|
|
929
|
+
Resource: [
|
|
930
|
+
{
|
|
931
|
+
"Fn::Sub": [
|
|
932
|
+
// eslint-disable-next-line no-template-curly-in-string
|
|
933
|
+
"arn:aws:s3:::${BucketName}/*",
|
|
934
|
+
{
|
|
935
|
+
BucketName: {
|
|
936
|
+
Ref: BASE_STACK_BUCKET_LOGICAL_NAME
|
|
937
|
+
}
|
|
938
|
+
}
|
|
939
|
+
]
|
|
940
|
+
}
|
|
941
|
+
]
|
|
942
|
+
}
|
|
943
|
+
]
|
|
944
|
+
}
|
|
945
|
+
}
|
|
946
|
+
]
|
|
947
|
+
}
|
|
948
|
+
},
|
|
949
|
+
[BASE_STACK_LAMBDA_IMAGE_BUILDER_LOGICAL_NAME]: {
|
|
950
|
+
Type: "AWS::CodeBuild::Project",
|
|
951
|
+
Properties: {
|
|
952
|
+
Artifacts: {
|
|
953
|
+
Type: "NO_ARTIFACTS"
|
|
954
|
+
},
|
|
955
|
+
Cache: {
|
|
956
|
+
Location: "LOCAL",
|
|
957
|
+
Modes: ["LOCAL_DOCKER_LAYER_CACHE"],
|
|
958
|
+
Type: "LOCAL"
|
|
959
|
+
},
|
|
960
|
+
Description: "Create Lambda image.",
|
|
961
|
+
Environment: {
|
|
962
|
+
ComputeType: "BUILD_GENERAL1_SMALL",
|
|
963
|
+
EnvironmentVariables: [
|
|
964
|
+
{
|
|
965
|
+
Name: "AWS_ACCOUNT_ID",
|
|
966
|
+
Value: { Ref: "AWS::AccountId" }
|
|
967
|
+
},
|
|
968
|
+
{
|
|
969
|
+
Name: "AWS_REGION",
|
|
970
|
+
Value: { Ref: "AWS::Region" }
|
|
971
|
+
},
|
|
972
|
+
{
|
|
973
|
+
Name: "IMAGE_TAG",
|
|
974
|
+
Value: "latest"
|
|
975
|
+
},
|
|
976
|
+
{
|
|
977
|
+
Name: "LAMBDA_EXTERNALS",
|
|
978
|
+
Value: ""
|
|
979
|
+
}
|
|
980
|
+
],
|
|
981
|
+
Image: "aws/codebuild/standard:3.0",
|
|
982
|
+
ImagePullCredentialsType: "CODEBUILD",
|
|
983
|
+
PrivilegedMode: true,
|
|
984
|
+
Type: "LINUX_CONTAINER"
|
|
985
|
+
},
|
|
986
|
+
LogsConfig: {
|
|
987
|
+
CloudWatchLogs: {
|
|
988
|
+
Status: "ENABLED",
|
|
989
|
+
GroupName: { Ref: CODE_BUILD_PROJECT_LOGS_LOGICAL_ID2 }
|
|
990
|
+
}
|
|
991
|
+
},
|
|
992
|
+
ServiceRole: {
|
|
993
|
+
"Fn::GetAtt": [CODE_BUILD_PROJECT_SERVICE_ROLE_LOGICAL_ID2, "Arn"]
|
|
994
|
+
},
|
|
995
|
+
Source: {
|
|
996
|
+
BuildSpec: yaml4.dump({
|
|
997
|
+
version: "0.2",
|
|
998
|
+
phases: {
|
|
999
|
+
install: {
|
|
1000
|
+
commands: [
|
|
1001
|
+
"echo install started on `date`",
|
|
1002
|
+
"npm init -y",
|
|
1003
|
+
/**
|
|
1004
|
+
* https://stackoverflow.com/a/51433146/8786986
|
|
1005
|
+
*/
|
|
1006
|
+
"npm install --save --package-lock-only --no-package-lock $LAMBDA_EXTERNALS",
|
|
1007
|
+
"ls"
|
|
1008
|
+
]
|
|
1009
|
+
},
|
|
1010
|
+
pre_build: {
|
|
1011
|
+
commands: [
|
|
1012
|
+
"echo pre_build started on `date`",
|
|
1013
|
+
"$(aws ecr get-login --no-include-email --region $AWS_REGION)"
|
|
1014
|
+
]
|
|
1015
|
+
},
|
|
1016
|
+
build: {
|
|
1017
|
+
commands: [
|
|
1018
|
+
"echo build started on `date`",
|
|
1019
|
+
"echo Building the repository image...",
|
|
1020
|
+
'echo "$DOCKERFILE" > Dockerfile',
|
|
1021
|
+
"docker build -t $REPOSITORY_ECR_REPOSITORY:$IMAGE_TAG -f Dockerfile .",
|
|
1022
|
+
"docker tag $REPOSITORY_ECR_REPOSITORY:$IMAGE_TAG $AWS_ACCOUNT_ID.dkr.ecr.$AWS_REGION.amazonaws.com/$REPOSITORY_ECR_REPOSITORY:$IMAGE_TAG"
|
|
1023
|
+
]
|
|
1024
|
+
},
|
|
1025
|
+
post_build: {
|
|
1026
|
+
commands: [
|
|
1027
|
+
"echo post_build completed on `date`",
|
|
1028
|
+
"echo Pushing the repository image...",
|
|
1029
|
+
"docker push $AWS_ACCOUNT_ID.dkr.ecr.$AWS_REGION.amazonaws.com/$REPOSITORY_ECR_REPOSITORY:$IMAGE_TAG"
|
|
1030
|
+
]
|
|
1031
|
+
}
|
|
1032
|
+
}
|
|
1033
|
+
}),
|
|
1034
|
+
Type: "NO_SOURCE"
|
|
1035
|
+
},
|
|
1036
|
+
TimeoutInMinutes: 60
|
|
1037
|
+
}
|
|
1038
|
+
}
|
|
1039
|
+
},
|
|
1040
|
+
Outputs: {
|
|
1041
|
+
[BASE_STACK_LAMBDA_IMAGE_BUILDER_LOGICAL_NAME]: {
|
|
1042
|
+
Value: { Ref: BASE_STACK_LAMBDA_IMAGE_BUILDER_LOGICAL_NAME },
|
|
1043
|
+
Export: {
|
|
1044
|
+
Name: BASE_STACK_LAMBDA_IMAGE_BUILDER_EXPORTED_NAME
|
|
1045
|
+
}
|
|
1046
|
+
}
|
|
1047
|
+
}
|
|
1048
|
+
};
|
|
1049
|
+
};
|
|
1050
|
+
|
|
1051
|
+
// src/deploy/baseStack/getLambdaLayerBuilderTemplate.ts
|
|
1052
|
+
var CODE_BUILD_PROJECT_LOGS_GROUP_LOGICAL_ID = `${BASE_STACK_LAMBDA_LAYER_BUILDER_LOGICAL_NAME}LogsLogGroup`;
|
|
1053
|
+
var CODE_BUILD_PROJECT_IAM_ROLE_LOGICAL_ID = `${BASE_STACK_LAMBDA_LAYER_BUILDER_LOGICAL_NAME}Role`;
|
|
1054
|
+
var getBuildSpec = () => {
|
|
1055
|
+
return `
|
|
1056
|
+
version: 0.2
|
|
1057
|
+
phases:
|
|
1058
|
+
install:
|
|
1059
|
+
runtime-versions:
|
|
1060
|
+
nodejs: ${NODE_VERSION}
|
|
1061
|
+
commands:
|
|
1062
|
+
- npm i --no-bin-links --no-optional --no-package-lock --no-save --no-shrinkwrap $PACKAGE_NAME
|
|
1063
|
+
- mkdir nodejs
|
|
1064
|
+
- mv node_modules nodejs/node_modules
|
|
1065
|
+
artifacts:
|
|
1066
|
+
files:
|
|
1067
|
+
- nodejs/**/*
|
|
1068
|
+
name: $PACKAGE_NAME.zip
|
|
1069
|
+
`.trim();
|
|
1070
|
+
};
|
|
1071
|
+
var getLambdaLayerBuilderTemplate = () => {
|
|
1072
|
+
return {
|
|
1073
|
+
Resources: {
|
|
1074
|
+
[CODE_BUILD_PROJECT_IAM_ROLE_LOGICAL_ID]: {
|
|
1075
|
+
Type: "AWS::IAM::Role",
|
|
1076
|
+
Properties: {
|
|
1077
|
+
AssumeRolePolicyDocument: {
|
|
1078
|
+
Version: "2012-10-17",
|
|
1079
|
+
Statement: [
|
|
1080
|
+
{
|
|
1081
|
+
Effect: "Allow",
|
|
1082
|
+
Principal: {
|
|
1083
|
+
Service: ["codebuild.amazonaws.com"]
|
|
1084
|
+
},
|
|
1085
|
+
Action: ["sts:AssumeRole"]
|
|
1086
|
+
}
|
|
1087
|
+
]
|
|
1088
|
+
},
|
|
1089
|
+
Path: getIamPath(),
|
|
1090
|
+
Policies: [
|
|
1091
|
+
{
|
|
1092
|
+
PolicyName: `${CODE_BUILD_PROJECT_IAM_ROLE_LOGICAL_ID}Policy`,
|
|
1093
|
+
PolicyDocument: {
|
|
1094
|
+
Version: "2012-10-17",
|
|
1095
|
+
Statement: [
|
|
1096
|
+
{
|
|
1097
|
+
Effect: "Allow",
|
|
1098
|
+
Action: ["logs:CreateLogStream", "logs:PutLogEvents"],
|
|
1099
|
+
Resource: "*"
|
|
1100
|
+
},
|
|
1101
|
+
{
|
|
1102
|
+
Effect: "Allow",
|
|
1103
|
+
Action: ["s3:*"],
|
|
1104
|
+
Resource: [
|
|
1105
|
+
{
|
|
1106
|
+
"Fn::Sub": [
|
|
1107
|
+
// eslint-disable-next-line no-template-curly-in-string
|
|
1108
|
+
"arn:aws:s3:::${BucketName}",
|
|
1109
|
+
{
|
|
1110
|
+
BucketName: {
|
|
1111
|
+
Ref: BASE_STACK_BUCKET_LOGICAL_NAME
|
|
1112
|
+
}
|
|
1113
|
+
}
|
|
1114
|
+
]
|
|
1115
|
+
},
|
|
1116
|
+
{
|
|
1117
|
+
"Fn::Sub": [
|
|
1118
|
+
// eslint-disable-next-line no-template-curly-in-string
|
|
1119
|
+
"arn:aws:s3:::${BucketName}/*",
|
|
1120
|
+
{
|
|
1121
|
+
BucketName: {
|
|
1122
|
+
Ref: BASE_STACK_BUCKET_LOGICAL_NAME
|
|
1123
|
+
}
|
|
1124
|
+
}
|
|
1125
|
+
]
|
|
1126
|
+
}
|
|
1127
|
+
]
|
|
1128
|
+
}
|
|
1129
|
+
]
|
|
1130
|
+
}
|
|
1131
|
+
}
|
|
1132
|
+
]
|
|
1133
|
+
}
|
|
1134
|
+
},
|
|
1135
|
+
[CODE_BUILD_PROJECT_LOGS_GROUP_LOGICAL_ID]: {
|
|
1136
|
+
Type: "AWS::Logs::LogGroup",
|
|
1137
|
+
DeletionPolicy: "Delete",
|
|
1138
|
+
Properties: {}
|
|
1139
|
+
},
|
|
1140
|
+
[BASE_STACK_LAMBDA_LAYER_BUILDER_LOGICAL_NAME]: {
|
|
1141
|
+
Type: "AWS::CodeBuild::Project",
|
|
1142
|
+
Properties: {
|
|
1143
|
+
Artifacts: {
|
|
1144
|
+
Location: { Ref: BASE_STACK_BUCKET_LOGICAL_NAME },
|
|
1145
|
+
NamespaceType: "NONE",
|
|
1146
|
+
OverrideArtifactName: true,
|
|
1147
|
+
Packaging: "ZIP",
|
|
1148
|
+
Path: "lambda-layers/packages",
|
|
1149
|
+
Type: "S3"
|
|
1150
|
+
},
|
|
1151
|
+
Environment: {
|
|
1152
|
+
ComputeType: "BUILD_GENERAL1_SMALL",
|
|
1153
|
+
/**
|
|
1154
|
+
* Image should match the runtime of the buildspec.
|
|
1155
|
+
* https://docs.aws.amazon.com/codebuild/latest/userguide/build-env-ref-available.html
|
|
1156
|
+
*/
|
|
1157
|
+
Image: "aws/codebuild/standard:7.0",
|
|
1158
|
+
Type: "LINUX_CONTAINER"
|
|
1159
|
+
},
|
|
1160
|
+
LogsConfig: {
|
|
1161
|
+
CloudWatchLogs: {
|
|
1162
|
+
GroupName: {
|
|
1163
|
+
Ref: `${CODE_BUILD_PROJECT_LOGS_GROUP_LOGICAL_ID}`
|
|
1164
|
+
},
|
|
1165
|
+
Status: "ENABLED"
|
|
1166
|
+
}
|
|
1167
|
+
},
|
|
1168
|
+
ServiceRole: {
|
|
1169
|
+
"Fn::GetAtt": `${CODE_BUILD_PROJECT_IAM_ROLE_LOGICAL_ID}.Arn`
|
|
1170
|
+
},
|
|
1171
|
+
Source: {
|
|
1172
|
+
BuildSpec: getBuildSpec(),
|
|
1173
|
+
Type: "NO_SOURCE"
|
|
1174
|
+
}
|
|
1175
|
+
}
|
|
1176
|
+
}
|
|
1177
|
+
},
|
|
1178
|
+
Outputs: {
|
|
1179
|
+
[BASE_STACK_LAMBDA_LAYER_BUILDER_LOGICAL_NAME]: {
|
|
1180
|
+
Value: { Ref: BASE_STACK_LAMBDA_LAYER_BUILDER_LOGICAL_NAME }
|
|
1181
|
+
}
|
|
1182
|
+
}
|
|
1183
|
+
};
|
|
1184
|
+
};
|
|
1185
|
+
var getVpcTemplate = () => {
|
|
1186
|
+
const vpcName = `${pascalCase(NAME)}VPC`;
|
|
1187
|
+
const EC2_INTERNET_GATEWAY_LOGICAL_ID = "EC2InternetGateway";
|
|
1188
|
+
const EC2_ROUTE_TABLE_LOGICAL_ID = "EC2RouteTable";
|
|
1189
|
+
const EC2_VPC_LOGICAL_ID = "EC2VCP";
|
|
1190
|
+
const template = {
|
|
1191
|
+
AWSTemplateFormatVersion: "2010-09-09",
|
|
1192
|
+
Mappings: {
|
|
1193
|
+
CidrMappings: {
|
|
1194
|
+
VPC: {
|
|
1195
|
+
CIDR: "10.0.0.0/16"
|
|
1196
|
+
}
|
|
1197
|
+
}
|
|
1198
|
+
},
|
|
1199
|
+
Resources: {
|
|
1200
|
+
[EC2_VPC_LOGICAL_ID]: {
|
|
1201
|
+
Type: "AWS::EC2::VPC",
|
|
1202
|
+
Properties: {
|
|
1203
|
+
CidrBlock: {
|
|
1204
|
+
"Fn::FindInMap": ["CidrMappings", "VPC", "CIDR"]
|
|
1205
|
+
},
|
|
1206
|
+
EnableDnsHostnames: true,
|
|
1207
|
+
EnableDnsSupport: true,
|
|
1208
|
+
Tags: [
|
|
1209
|
+
{
|
|
1210
|
+
Key: "Name",
|
|
1211
|
+
Value: vpcName
|
|
1212
|
+
}
|
|
1213
|
+
]
|
|
1214
|
+
}
|
|
1215
|
+
},
|
|
1216
|
+
[EC2_INTERNET_GATEWAY_LOGICAL_ID]: {
|
|
1217
|
+
Type: "AWS::EC2::InternetGateway",
|
|
1218
|
+
Properties: {}
|
|
1219
|
+
},
|
|
1220
|
+
EC2VPCGatewayAttachment: {
|
|
1221
|
+
Type: "AWS::EC2::VPCGatewayAttachment",
|
|
1222
|
+
Properties: {
|
|
1223
|
+
InternetGatewayId: {
|
|
1224
|
+
Ref: EC2_INTERNET_GATEWAY_LOGICAL_ID
|
|
1225
|
+
},
|
|
1226
|
+
VpcId: {
|
|
1227
|
+
Ref: EC2_VPC_LOGICAL_ID
|
|
1228
|
+
}
|
|
1229
|
+
}
|
|
1230
|
+
},
|
|
1231
|
+
[EC2_ROUTE_TABLE_LOGICAL_ID]: {
|
|
1232
|
+
Type: "AWS::EC2::RouteTable",
|
|
1233
|
+
Properties: {
|
|
1234
|
+
Tags: [
|
|
1235
|
+
{
|
|
1236
|
+
Key: "Name",
|
|
1237
|
+
Value: {
|
|
1238
|
+
"Fn::Join": [" ", [vpcName, "-", EC2_ROUTE_TABLE_LOGICAL_ID]]
|
|
1239
|
+
}
|
|
1240
|
+
}
|
|
1241
|
+
],
|
|
1242
|
+
VpcId: {
|
|
1243
|
+
Ref: EC2_VPC_LOGICAL_ID
|
|
1244
|
+
}
|
|
1245
|
+
}
|
|
1246
|
+
},
|
|
1247
|
+
EC2Route: {
|
|
1248
|
+
Type: "AWS::EC2::Route",
|
|
1249
|
+
Properties: {
|
|
1250
|
+
DestinationCidrBlock: "0.0.0.0/0",
|
|
1251
|
+
GatewayId: {
|
|
1252
|
+
Ref: EC2_INTERNET_GATEWAY_LOGICAL_ID
|
|
1253
|
+
},
|
|
1254
|
+
RouteTableId: {
|
|
1255
|
+
Ref: EC2_ROUTE_TABLE_LOGICAL_ID
|
|
1256
|
+
}
|
|
1257
|
+
}
|
|
1258
|
+
}
|
|
1259
|
+
},
|
|
1260
|
+
Outputs: {
|
|
1261
|
+
VPCId: {
|
|
1262
|
+
Value: {
|
|
1263
|
+
Ref: EC2_VPC_LOGICAL_ID
|
|
1264
|
+
},
|
|
1265
|
+
Export: {
|
|
1266
|
+
Name: BASE_STACK_VPC_ID_EXPORTED_NAME
|
|
1267
|
+
}
|
|
1268
|
+
},
|
|
1269
|
+
VPCDefaultSecurityGroup: {
|
|
1270
|
+
Value: {
|
|
1271
|
+
"Fn::GetAtt": [EC2_VPC_LOGICAL_ID, "DefaultSecurityGroup"]
|
|
1272
|
+
},
|
|
1273
|
+
Export: {
|
|
1274
|
+
Name: BASE_STACK_VPC_DEFAULT_SECURITY_GROUP_EXPORTED_NAME
|
|
1275
|
+
}
|
|
1276
|
+
}
|
|
1277
|
+
}
|
|
1278
|
+
};
|
|
1279
|
+
[
|
|
1280
|
+
BASE_STACK_VPC_PUBLIC_SUBNET_0_EXPORTED_NAME,
|
|
1281
|
+
BASE_STACK_VPC_PUBLIC_SUBNET_1_EXPORTED_NAME,
|
|
1282
|
+
BASE_STACK_VPC_PUBLIC_SUBNET_2_EXPORTED_NAME
|
|
1283
|
+
].forEach((publicSubnetExportedName, index) => {
|
|
1284
|
+
const publicSubnetLogicalId = `PublicSubnet${index}EC2Subnet`;
|
|
1285
|
+
const publicSubnetCidrMappings = `PublicSubnet${index}`;
|
|
1286
|
+
template.Mappings.CidrMappings[publicSubnetCidrMappings] = {
|
|
1287
|
+
CIDR: `10.0.${index}.0/24`
|
|
1288
|
+
};
|
|
1289
|
+
template.Resources[publicSubnetLogicalId] = {
|
|
1290
|
+
Type: "AWS::EC2::Subnet",
|
|
1291
|
+
Properties: {
|
|
1292
|
+
AvailabilityZone: {
|
|
1293
|
+
"Fn::Select": [
|
|
1294
|
+
index,
|
|
1295
|
+
{
|
|
1296
|
+
"Fn::GetAZs": {
|
|
1297
|
+
Ref: "AWS::Region"
|
|
1298
|
+
}
|
|
1299
|
+
}
|
|
1300
|
+
]
|
|
1301
|
+
},
|
|
1302
|
+
CidrBlock: {
|
|
1303
|
+
"Fn::FindInMap": ["CidrMappings", publicSubnetCidrMappings, "CIDR"]
|
|
1304
|
+
},
|
|
1305
|
+
MapPublicIpOnLaunch: true,
|
|
1306
|
+
Tags: [
|
|
1307
|
+
{
|
|
1308
|
+
Key: "Name",
|
|
1309
|
+
Value: {
|
|
1310
|
+
"Fn::Join": [
|
|
1311
|
+
" ",
|
|
1312
|
+
[EC2_VPC_LOGICAL_ID, "-", publicSubnetLogicalId]
|
|
1313
|
+
]
|
|
1314
|
+
}
|
|
1315
|
+
}
|
|
1316
|
+
],
|
|
1317
|
+
VpcId: {
|
|
1318
|
+
Ref: EC2_VPC_LOGICAL_ID
|
|
1319
|
+
}
|
|
1320
|
+
}
|
|
1321
|
+
};
|
|
1322
|
+
template.Resources[`PublicSubnet${index}EC2SubnetRouteTableAssociation`] = {
|
|
1323
|
+
Type: "AWS::EC2::SubnetRouteTableAssociation",
|
|
1324
|
+
Properties: {
|
|
1325
|
+
RouteTableId: {
|
|
1326
|
+
Ref: EC2_ROUTE_TABLE_LOGICAL_ID
|
|
1327
|
+
},
|
|
1328
|
+
SubnetId: {
|
|
1329
|
+
Ref: publicSubnetLogicalId
|
|
1330
|
+
}
|
|
1331
|
+
}
|
|
1332
|
+
};
|
|
1333
|
+
if (!template.Outputs) {
|
|
1334
|
+
template.Outputs = {};
|
|
1335
|
+
}
|
|
1336
|
+
template.Outputs[publicSubnetLogicalId] = {
|
|
1337
|
+
Value: {
|
|
1338
|
+
Ref: publicSubnetLogicalId
|
|
1339
|
+
},
|
|
1340
|
+
Export: {
|
|
1341
|
+
Name: publicSubnetExportedName
|
|
1342
|
+
}
|
|
1343
|
+
};
|
|
1344
|
+
});
|
|
1345
|
+
return template;
|
|
1346
|
+
};
|
|
1347
|
+
var setPreDefinedStackName = (stackName) => {
|
|
1348
|
+
setEnvVar("STACK_NAME", stackName);
|
|
1349
|
+
};
|
|
1350
|
+
var STACK_NAME_MAX_LENGTH = 100;
|
|
1351
|
+
var limitStackName = (stackName) => {
|
|
1352
|
+
return `${stackName}`.substring(0, STACK_NAME_MAX_LENGTH);
|
|
1353
|
+
};
|
|
1354
|
+
var getStackName = async () => {
|
|
1355
|
+
if (getEnvVar("STACK_NAME")) {
|
|
1356
|
+
return getEnvVar("STACK_NAME");
|
|
1357
|
+
}
|
|
1358
|
+
const [currentBranch, environment, packageName] = await Promise.all([
|
|
1359
|
+
getCurrentBranch(),
|
|
1360
|
+
getEnvironment(),
|
|
1361
|
+
getPackageName()
|
|
1362
|
+
]);
|
|
1363
|
+
const firstName = packageName ? pascalCase(packageName) : `Stack-${Math.round(Math.random() * 1e5)}`;
|
|
1364
|
+
const secondName = (() => {
|
|
1365
|
+
if (environment) {
|
|
1366
|
+
return environment;
|
|
1367
|
+
}
|
|
1368
|
+
if (currentBranch) {
|
|
1369
|
+
return kebabCase(currentBranch);
|
|
1370
|
+
}
|
|
1371
|
+
return void 0;
|
|
1372
|
+
})();
|
|
1373
|
+
const name = [firstName, secondName].filter((word) => {
|
|
1374
|
+
return !!word;
|
|
1375
|
+
}).join("-");
|
|
1376
|
+
return limitStackName(name);
|
|
1377
|
+
};
|
|
1378
|
+
var deployErrorLogs = ({
|
|
1379
|
+
error,
|
|
1380
|
+
logPrefix: logPrefix22
|
|
1381
|
+
}) => {
|
|
1382
|
+
log5.error(logPrefix22, `An error occurred. Cannot deploy ${logPrefix22}.`);
|
|
1383
|
+
log5.error(logPrefix22, "Error message: %j", error?.message);
|
|
1384
|
+
};
|
|
1385
|
+
var handleDeployError = ({
|
|
1386
|
+
error,
|
|
1387
|
+
logPrefix: logPrefix22
|
|
1388
|
+
}) => {
|
|
1389
|
+
deployErrorLogs({ error, logPrefix: logPrefix22 });
|
|
1390
|
+
process.exit(1);
|
|
1391
|
+
};
|
|
1392
|
+
var handleDeployInitialization = async ({
|
|
1393
|
+
logPrefix: logPrefix22,
|
|
1394
|
+
stackName: preDefinedStackName
|
|
1395
|
+
}) => {
|
|
1396
|
+
log5.info(logPrefix22, `Starting deploy ${logPrefix22}...`);
|
|
1397
|
+
if (preDefinedStackName) {
|
|
1398
|
+
setPreDefinedStackName(preDefinedStackName);
|
|
1399
|
+
}
|
|
1400
|
+
const stackName = await getStackName();
|
|
1401
|
+
log5.info(logPrefix22, `stackName: ${stackName}`);
|
|
1402
|
+
return { stackName };
|
|
1403
|
+
};
|
|
1404
|
+
var logPrefix4 = "base-stack";
|
|
1405
|
+
var baseStackTemplate = deepmerge.all([
|
|
1406
|
+
getBucketTemplate(),
|
|
1407
|
+
getLambdaImageBuilderTemplate(),
|
|
1408
|
+
getLambdaLayerBuilderTemplate(),
|
|
1409
|
+
getVpcTemplate()
|
|
1410
|
+
]);
|
|
1411
|
+
var deployBaseStack = async () => {
|
|
1412
|
+
try {
|
|
1413
|
+
const { stackName } = await handleDeployInitialization({
|
|
1414
|
+
logPrefix: logPrefix4,
|
|
1415
|
+
stackName: BASE_STACK_NAME
|
|
1416
|
+
});
|
|
1417
|
+
await deploy({
|
|
1418
|
+
template: baseStackTemplate,
|
|
1419
|
+
params: { StackName: stackName },
|
|
1420
|
+
terminationProtection: true
|
|
1421
|
+
});
|
|
1422
|
+
} catch (error) {
|
|
1423
|
+
handleDeployError({ error, logPrefix: logPrefix4 });
|
|
1424
|
+
}
|
|
1425
|
+
};
|
|
1426
|
+
|
|
1427
|
+
// src/deploy/baseStack/command.ts
|
|
1428
|
+
var deployBaseStackCommand = {
|
|
1429
|
+
command: "base-stack",
|
|
1430
|
+
describe: "Create base resources.",
|
|
1431
|
+
handler: deployBaseStack
|
|
1432
|
+
};
|
|
1433
|
+
|
|
1434
|
+
// src/deploy/cicd/config.ts
|
|
1435
|
+
var ECS_TASK_DEFAULT_CPU = "2048";
|
|
1436
|
+
var ECS_TASK_DEFAULT_MEMORY = "4096";
|
|
1437
|
+
var PIPELINE_ECS_TASK_EXECUTION_STAGE_NAME = `PipelineRunECSTasksStage`;
|
|
1438
|
+
var PIPELINE_ECS_TASK_EXECUTION_MANUAL_APPROVAL_ACTION_NAME = `PipelineRunECSTasksApproval`;
|
|
1439
|
+
|
|
1440
|
+
// src/deploy/cicd/pipelines.ts
|
|
1441
|
+
var pipelines = ["pr", "main", "tag"];
|
|
1442
|
+
var options = {
|
|
1443
|
+
cpu: {
|
|
1444
|
+
type: "string"
|
|
1445
|
+
},
|
|
1446
|
+
memory: {
|
|
1447
|
+
type: "string"
|
|
1448
|
+
},
|
|
1449
|
+
pipelines: {
|
|
1450
|
+
choices: pipelines,
|
|
1451
|
+
coerce: (values) => {
|
|
1452
|
+
return values.map((value) => {
|
|
1453
|
+
return camelCase(value);
|
|
1454
|
+
});
|
|
1455
|
+
},
|
|
1456
|
+
default: [],
|
|
1457
|
+
description: "Pipelines that will be implemented with the CICD stack.",
|
|
1458
|
+
type: "array"
|
|
1459
|
+
},
|
|
1460
|
+
"update-repository": {
|
|
1461
|
+
alias: ["ur"],
|
|
1462
|
+
description: "Determine if the repository image will be updated.",
|
|
1463
|
+
default: true,
|
|
1464
|
+
type: "boolean"
|
|
1465
|
+
},
|
|
1466
|
+
"ssh-key": {
|
|
1467
|
+
demandOption: true,
|
|
1468
|
+
type: "string"
|
|
1469
|
+
},
|
|
1470
|
+
"ssh-url": {
|
|
1471
|
+
demandOption: true,
|
|
1472
|
+
type: "string"
|
|
1473
|
+
},
|
|
1474
|
+
"slack-webhook-url": {
|
|
1475
|
+
type: "string"
|
|
1476
|
+
},
|
|
1477
|
+
/**
|
|
1478
|
+
* This option has the format:
|
|
1479
|
+
*
|
|
1480
|
+
* ```ts
|
|
1481
|
+
* Array<{
|
|
1482
|
+
* name: string,
|
|
1483
|
+
* value: string,
|
|
1484
|
+
* }>
|
|
1485
|
+
* ```
|
|
1486
|
+
*/
|
|
1487
|
+
"task-environment": {
|
|
1488
|
+
alias: ["te"],
|
|
1489
|
+
default: [],
|
|
1490
|
+
describe: "A list of environment variables that will be passed to the ECS container task.",
|
|
1491
|
+
type: "array"
|
|
1492
|
+
}
|
|
1493
|
+
};
|
|
1494
|
+
var getCicdConfig = () => {
|
|
1495
|
+
const { parsed } = yargs(hideBin(process.argv)).config();
|
|
1496
|
+
if (!parsed) {
|
|
1497
|
+
return false;
|
|
1498
|
+
}
|
|
1499
|
+
const { argv } = parsed;
|
|
1500
|
+
const config = Object.keys(options).reduce((acc, key) => {
|
|
1501
|
+
const value = argv[key];
|
|
1502
|
+
if (value) {
|
|
1503
|
+
acc[key] = value;
|
|
1504
|
+
}
|
|
1505
|
+
return acc;
|
|
1506
|
+
}, {});
|
|
1507
|
+
return config;
|
|
1508
|
+
};
|
|
1509
|
+
|
|
1510
|
+
// src/deploy/cicd/getTriggerPipelineObjectKey.ts
|
|
1511
|
+
var getTriggerPipelinesObjectKey = ({
|
|
1512
|
+
prefix,
|
|
1513
|
+
pipeline
|
|
1514
|
+
}) => {
|
|
1515
|
+
return `${prefix}/${pipeline}.zip`;
|
|
1516
|
+
};
|
|
1517
|
+
var API_LOGICAL_ID = "ApiV1ServerlessApi";
|
|
1518
|
+
var CODE_BUILD_PROJECT_LOGS_LOGICAL_ID = "RepositoryImageCodeBuildProjectLogsLogGroup";
|
|
1519
|
+
var CODE_BUILD_PROJECT_SERVICE_ROLE_LOGICAL_ID = "RepositoryImageCodeBuildProjectIAMRole";
|
|
1520
|
+
var ECR_REPOSITORY_LOGICAL_ID = "RepositoryECRRepository";
|
|
1521
|
+
var FUNCTION_IAM_ROLE_LOGICAL_ID = "ApiV1ServerlessFunctionIAMRole";
|
|
1522
|
+
var ECS_TASK_REPORT_HANDLER_LAMBDA_FUNCTION_LOGICAL_ID = "EcsTaskReportHandler";
|
|
1523
|
+
var PROCESS_ENV_REPOSITORY_IMAGE_CODE_BUILD_PROJECT_NAME = "REPOSITORY_IMAGE_CODE_BUILD_PROJECT_NAME";
|
|
1524
|
+
var REPOSITORY_ECS_TASK_CONTAINER_NAME = "RepositoryECSTaskContainerName";
|
|
1525
|
+
var REPOSITORY_ECS_TASK_DEFINITION_LOGICAL_ID = "RepositoryECSTaskDefinition";
|
|
1526
|
+
var REPOSITORY_IMAGE_CODE_BUILD_PROJECT_LOGICAL_ID = "RepositoryImageCodeBuildProject";
|
|
1527
|
+
var REPOSITORY_TASKS_ECS_CLUSTER_LOGICAL_ID = "RepositoryTasksECSCluster";
|
|
1528
|
+
var REPOSITORY_TASKS_ECS_CLUSTER_LOGS_LOG_GROUP_LOGICAL_ID = "RepositoryTasksECSClusterLogsLogGroup";
|
|
1529
|
+
var REPOSITORY_TASKS_ECS_TASK_DEFINITION_EXECUTION_ROLE_LOGICAL_ID = "RepositoryTasksECSTaskDefinitionExecutionRoleIAMRole";
|
|
1530
|
+
var REPOSITORY_TASKS_ECS_TASK_DEFINITION_TASK_ROLE_LOGICAL_ID = "RepositoryTasksECSTaskDefinitionTaskRoleIAMRole";
|
|
1531
|
+
var PIPELINES_ARTIFACT_STORE_S3_BUCKET_LOGICAL_ID = "PipelinesArtifactStoreS3Bucket";
|
|
1532
|
+
var PIPELINES_ROLE_LOGICAL_ID = "PipelinesMainIAMRole";
|
|
1533
|
+
var PIPELINES_MAIN_LOGICAL_ID = "PipelinesMainCodePipeline";
|
|
1534
|
+
var PIPELINES_TAG_LOGICAL_ID = "PipelinesTagCodePipeline";
|
|
1535
|
+
var PIPELINES_HANDLER_LAMBDA_FUNCTION_LOGICAL_ID = "PipelinesHandlerLambdaFunction";
|
|
1536
|
+
var IMAGE_UPDATER_SCHEDULE_SERVERLESS_FUNCTION_LOGICAL_ID = "ImageUpdaterScheduleServerlessFunction";
|
|
1537
|
+
var getRepositoryImageBuilder = () => {
|
|
1538
|
+
const nodeRuntimeNumber = NODE_RUNTIME.replace("nodejs", "").replace(
|
|
1539
|
+
".x",
|
|
1540
|
+
""
|
|
1541
|
+
);
|
|
1542
|
+
return {
|
|
1543
|
+
Type: "AWS::CodeBuild::Project",
|
|
1544
|
+
Properties: {
|
|
1545
|
+
Artifacts: {
|
|
1546
|
+
Type: "NO_ARTIFACTS"
|
|
1547
|
+
},
|
|
1548
|
+
Cache: {
|
|
1549
|
+
Location: "LOCAL",
|
|
1550
|
+
Modes: ["LOCAL_DOCKER_LAYER_CACHE"],
|
|
1551
|
+
Type: "LOCAL"
|
|
1552
|
+
},
|
|
1553
|
+
Description: "Create repository image.",
|
|
1554
|
+
Environment: {
|
|
1555
|
+
ComputeType: "BUILD_GENERAL1_SMALL",
|
|
1556
|
+
EnvironmentVariables: [
|
|
1557
|
+
{
|
|
1558
|
+
Name: "AWS_ACCOUNT_ID",
|
|
1559
|
+
Value: { Ref: "AWS::AccountId" }
|
|
1560
|
+
},
|
|
1561
|
+
{
|
|
1562
|
+
Name: "AWS_REGION",
|
|
1563
|
+
Value: { Ref: "AWS::Region" }
|
|
1564
|
+
},
|
|
1565
|
+
{
|
|
1566
|
+
Name: "DOCKERFILE",
|
|
1567
|
+
Value: {
|
|
1568
|
+
"Fn::Sub": [
|
|
1569
|
+
"FROM public.ecr.aws/ubuntu/ubuntu:20.04_stable",
|
|
1570
|
+
// https://stackoverflow.com/a/59693182/8786986
|
|
1571
|
+
"ENV DEBIAN_FRONTEND noninteractive",
|
|
1572
|
+
// Make sure apt is up to date
|
|
1573
|
+
"RUN apt-get update --fix-missing",
|
|
1574
|
+
"RUN apt-get install -y curl",
|
|
1575
|
+
"RUN apt-get install -y git",
|
|
1576
|
+
"RUN apt-get install -y jq",
|
|
1577
|
+
// Install Node.js
|
|
1578
|
+
`RUN curl -fsSL https://deb.nodesource.com/setup_${nodeRuntimeNumber}.x | bash -`,
|
|
1579
|
+
"RUN apt-get install -y nodejs",
|
|
1580
|
+
// Clean cache
|
|
1581
|
+
"RUN apt-get clean",
|
|
1582
|
+
// Install Yarn
|
|
1583
|
+
"RUN npm install -g yarn",
|
|
1584
|
+
// Install carlin CLI
|
|
1585
|
+
"RUN yarn global add carlin",
|
|
1586
|
+
// Configure git
|
|
1587
|
+
"RUN git config --global user.name carlin",
|
|
1588
|
+
"RUN git config --global user.email carlin@ttoss.dev",
|
|
1589
|
+
"RUN mkdir /root/.ssh/",
|
|
1590
|
+
"COPY ./id_rsa /root/.ssh/id_rsa",
|
|
1591
|
+
"RUN chmod 600 /root/.ssh/id_rsa",
|
|
1592
|
+
// Make sure your domain is accepted
|
|
1593
|
+
"RUN touch /root/.ssh/known_hosts",
|
|
1594
|
+
"RUN ssh-keyscan github.com >> /root/.ssh/known_hosts",
|
|
1595
|
+
// Copy repository
|
|
1596
|
+
"COPY . /home",
|
|
1597
|
+
// Go to repository directory
|
|
1598
|
+
"WORKDIR /home/repository",
|
|
1599
|
+
// Set Yarn cache
|
|
1600
|
+
"RUN mkdir -p /home/yarn-cache",
|
|
1601
|
+
"RUN yarn config set cache-folder /home/yarn-cache",
|
|
1602
|
+
"RUN yarn install",
|
|
1603
|
+
// Used in case of yarn.lock is modified.
|
|
1604
|
+
"RUN git checkout -- yarn.lock"
|
|
1605
|
+
].join("\n")
|
|
1606
|
+
}
|
|
1607
|
+
},
|
|
1608
|
+
{
|
|
1609
|
+
Name: "IMAGE_TAG",
|
|
1610
|
+
Value: "latest"
|
|
1611
|
+
},
|
|
1612
|
+
{
|
|
1613
|
+
Name: "REPOSITORY_ECR_REPOSITORY",
|
|
1614
|
+
Value: { Ref: ECR_REPOSITORY_LOGICAL_ID }
|
|
1615
|
+
},
|
|
1616
|
+
{
|
|
1617
|
+
Name: "SSH_KEY",
|
|
1618
|
+
Value: { Ref: "SSHKey" }
|
|
1619
|
+
},
|
|
1620
|
+
{
|
|
1621
|
+
Name: "SSH_URL",
|
|
1622
|
+
Value: { Ref: "SSHUrl" }
|
|
1623
|
+
}
|
|
1624
|
+
],
|
|
1625
|
+
Image: "aws/codebuild/standard:3.0",
|
|
1626
|
+
ImagePullCredentialsType: "CODEBUILD",
|
|
1627
|
+
/**
|
|
1628
|
+
* Enables running the Docker daemon inside a Docker container. Set to
|
|
1629
|
+
* true only if the build project is used to build Docker images.
|
|
1630
|
+
* Otherwise, a build that attempts to interact with the Docker daemon
|
|
1631
|
+
* fails. The default setting is false."
|
|
1632
|
+
* https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codebuild-project-environment.html#cfn-codebuild-project-environment-privilegedmode
|
|
1633
|
+
*/
|
|
1634
|
+
PrivilegedMode: true,
|
|
1635
|
+
Type: "LINUX_CONTAINER"
|
|
1636
|
+
},
|
|
1637
|
+
LogsConfig: {
|
|
1638
|
+
CloudWatchLogs: {
|
|
1639
|
+
Status: "ENABLED",
|
|
1640
|
+
GroupName: { Ref: CODE_BUILD_PROJECT_LOGS_LOGICAL_ID }
|
|
1641
|
+
}
|
|
1642
|
+
},
|
|
1643
|
+
ServiceRole: {
|
|
1644
|
+
"Fn::GetAtt": [CODE_BUILD_PROJECT_SERVICE_ROLE_LOGICAL_ID, "Arn"]
|
|
1645
|
+
},
|
|
1646
|
+
Source: {
|
|
1647
|
+
BuildSpec: yaml4.dump({
|
|
1648
|
+
version: "0.2",
|
|
1649
|
+
phases: {
|
|
1650
|
+
install: {
|
|
1651
|
+
commands: [
|
|
1652
|
+
"echo install started on `date`",
|
|
1653
|
+
`echo "$SSH_KEY" > ~/.ssh/id_rsa`,
|
|
1654
|
+
"chmod 600 ~/.ssh/id_rsa",
|
|
1655
|
+
"rm -rf repository",
|
|
1656
|
+
"git clone $SSH_URL repository",
|
|
1657
|
+
"cd repository",
|
|
1658
|
+
"ls"
|
|
1659
|
+
]
|
|
1660
|
+
},
|
|
1661
|
+
pre_build: {
|
|
1662
|
+
commands: ["echo pre_build started on `date`"]
|
|
1663
|
+
},
|
|
1664
|
+
build: {
|
|
1665
|
+
commands: [
|
|
1666
|
+
"echo build started on `date`",
|
|
1667
|
+
"$(aws ecr get-login --no-include-email --region $AWS_REGION)",
|
|
1668
|
+
"echo Building the repository image...",
|
|
1669
|
+
"cd ../",
|
|
1670
|
+
"cp ~/.ssh/id_rsa .",
|
|
1671
|
+
'echo "$DOCKERFILE" > Dockerfile',
|
|
1672
|
+
"cat Dockerfile",
|
|
1673
|
+
"docker build -t $REPOSITORY_ECR_REPOSITORY:$IMAGE_TAG -f Dockerfile .",
|
|
1674
|
+
"docker tag $REPOSITORY_ECR_REPOSITORY:$IMAGE_TAG $AWS_ACCOUNT_ID.dkr.ecr.$AWS_REGION.amazonaws.com/$REPOSITORY_ECR_REPOSITORY:$IMAGE_TAG",
|
|
1675
|
+
"echo Pushing the repository image...",
|
|
1676
|
+
"docker push $AWS_ACCOUNT_ID.dkr.ecr.$AWS_REGION.amazonaws.com/$REPOSITORY_ECR_REPOSITORY:$IMAGE_TAG"
|
|
1677
|
+
]
|
|
1678
|
+
},
|
|
1679
|
+
post_build: {
|
|
1680
|
+
commands: ["echo post_build completed on `date`"]
|
|
1681
|
+
}
|
|
1682
|
+
}
|
|
1683
|
+
}),
|
|
1684
|
+
Type: "NO_SOURCE"
|
|
1685
|
+
},
|
|
1686
|
+
TimeoutInMinutes: 15
|
|
1687
|
+
}
|
|
1688
|
+
};
|
|
1689
|
+
};
|
|
1690
|
+
var triggerPipelinesObjectKeyPrefix = [
|
|
1691
|
+
"cicd",
|
|
1692
|
+
"pipelines",
|
|
1693
|
+
"triggers",
|
|
1694
|
+
getProjectName()
|
|
1695
|
+
].join("/");
|
|
1696
|
+
var getCicdTemplate = ({
|
|
1697
|
+
pipelines: pipelines2 = [],
|
|
1698
|
+
cpu = ECS_TASK_DEFAULT_CPU,
|
|
1699
|
+
memory = ECS_TASK_DEFAULT_MEMORY,
|
|
1700
|
+
s3: s32,
|
|
1701
|
+
slackWebhookUrl,
|
|
1702
|
+
taskEnvironment = []
|
|
1703
|
+
}) => {
|
|
1704
|
+
const resources2 = {};
|
|
1705
|
+
const executeEcsTaskVariables = {
|
|
1706
|
+
ECS_CLUSTER_ARN: {
|
|
1707
|
+
"Fn::GetAtt": [REPOSITORY_TASKS_ECS_CLUSTER_LOGICAL_ID, "Arn"]
|
|
1708
|
+
},
|
|
1709
|
+
ECS_CONTAINER_NAME: REPOSITORY_ECS_TASK_CONTAINER_NAME,
|
|
1710
|
+
ECS_TASK_DEFINITION: {
|
|
1711
|
+
Ref: REPOSITORY_ECS_TASK_DEFINITION_LOGICAL_ID
|
|
1712
|
+
},
|
|
1713
|
+
VPC_SECURITY_GROUP: {
|
|
1714
|
+
"Fn::ImportValue": BASE_STACK_VPC_DEFAULT_SECURITY_GROUP_EXPORTED_NAME
|
|
1715
|
+
},
|
|
1716
|
+
VPC_PUBLIC_SUBNET_0: {
|
|
1717
|
+
"Fn::ImportValue": BASE_STACK_VPC_PUBLIC_SUBNET_0_EXPORTED_NAME
|
|
1718
|
+
},
|
|
1719
|
+
VPC_PUBLIC_SUBNET_1: {
|
|
1720
|
+
"Fn::ImportValue": BASE_STACK_VPC_PUBLIC_SUBNET_1_EXPORTED_NAME
|
|
1721
|
+
},
|
|
1722
|
+
VPC_PUBLIC_SUBNET_2: {
|
|
1723
|
+
"Fn::ImportValue": BASE_STACK_VPC_PUBLIC_SUBNET_2_EXPORTED_NAME
|
|
1724
|
+
},
|
|
1725
|
+
ECS_TASK_REPORT_HANDLER_NAME: {
|
|
1726
|
+
Ref: ECS_TASK_REPORT_HANDLER_LAMBDA_FUNCTION_LOGICAL_ID
|
|
1727
|
+
}
|
|
1728
|
+
};
|
|
1729
|
+
const getEcrRepositoryResource = () => {
|
|
1730
|
+
return {
|
|
1731
|
+
Type: "AWS::ECR::Repository",
|
|
1732
|
+
Properties: {
|
|
1733
|
+
LifecyclePolicy: {
|
|
1734
|
+
LifecyclePolicyText: JSON.stringify(
|
|
1735
|
+
{
|
|
1736
|
+
rules: [
|
|
1737
|
+
{
|
|
1738
|
+
rulePriority: 1,
|
|
1739
|
+
description: "Only keep the latest image",
|
|
1740
|
+
selection: {
|
|
1741
|
+
tagStatus: "any",
|
|
1742
|
+
countType: "imageCountMoreThan",
|
|
1743
|
+
countNumber: 1
|
|
1744
|
+
},
|
|
1745
|
+
action: {
|
|
1746
|
+
type: "expire"
|
|
1747
|
+
}
|
|
1748
|
+
}
|
|
1749
|
+
]
|
|
1750
|
+
},
|
|
1751
|
+
null,
|
|
1752
|
+
2
|
|
1753
|
+
)
|
|
1754
|
+
}
|
|
1755
|
+
}
|
|
1756
|
+
};
|
|
1757
|
+
};
|
|
1758
|
+
resources2[ECR_REPOSITORY_LOGICAL_ID] = getEcrRepositoryResource();
|
|
1759
|
+
const commonFunctionProperties = {
|
|
1760
|
+
CodeUri: {
|
|
1761
|
+
Bucket: s32.bucket,
|
|
1762
|
+
Key: s32.key,
|
|
1763
|
+
Version: s32.versionId
|
|
1764
|
+
},
|
|
1765
|
+
Role: {
|
|
1766
|
+
"Fn::GetAtt": [FUNCTION_IAM_ROLE_LOGICAL_ID, "Arn"]
|
|
1767
|
+
},
|
|
1768
|
+
Runtime: NODE_RUNTIME,
|
|
1769
|
+
Timeout: 60
|
|
1770
|
+
};
|
|
1771
|
+
(() => {
|
|
1772
|
+
resources2[CODE_BUILD_PROJECT_LOGS_LOGICAL_ID] = {
|
|
1773
|
+
Type: "AWS::Logs::LogGroup",
|
|
1774
|
+
DeletionPolicy: "Delete",
|
|
1775
|
+
Properties: {}
|
|
1776
|
+
};
|
|
1777
|
+
resources2[CODE_BUILD_PROJECT_SERVICE_ROLE_LOGICAL_ID] = {
|
|
1778
|
+
Type: "AWS::IAM::Role",
|
|
1779
|
+
Properties: {
|
|
1780
|
+
AssumeRolePolicyDocument: {
|
|
1781
|
+
Version: "2012-10-17",
|
|
1782
|
+
Statement: [
|
|
1783
|
+
{
|
|
1784
|
+
Effect: "Allow",
|
|
1785
|
+
Principal: {
|
|
1786
|
+
Service: "codebuild.amazonaws.com"
|
|
1787
|
+
},
|
|
1788
|
+
Action: "sts:AssumeRole"
|
|
1789
|
+
}
|
|
1790
|
+
]
|
|
1791
|
+
},
|
|
1792
|
+
Path: getIamPath(),
|
|
1793
|
+
Policies: [
|
|
1794
|
+
{
|
|
1795
|
+
PolicyName: `${CODE_BUILD_PROJECT_SERVICE_ROLE_LOGICAL_ID}Policy`,
|
|
1796
|
+
PolicyDocument: {
|
|
1797
|
+
Version: "2012-10-17",
|
|
1798
|
+
Statement: [
|
|
1799
|
+
{
|
|
1800
|
+
Effect: "Allow",
|
|
1801
|
+
Action: ["logs:CreateLogStream", "logs:PutLogEvents"],
|
|
1802
|
+
Resource: "*"
|
|
1803
|
+
},
|
|
1804
|
+
{
|
|
1805
|
+
Effect: "Allow",
|
|
1806
|
+
Action: ["ecr:GetAuthorizationToken"],
|
|
1807
|
+
Resource: "*"
|
|
1808
|
+
},
|
|
1809
|
+
{
|
|
1810
|
+
Effect: "Allow",
|
|
1811
|
+
Action: [
|
|
1812
|
+
"ecr:BatchCheckLayerAvailability",
|
|
1813
|
+
"ecr:CompleteLayerUpload",
|
|
1814
|
+
"ecr:InitiateLayerUpload",
|
|
1815
|
+
"ecr:PutImage",
|
|
1816
|
+
"ecr:UploadLayerPart"
|
|
1817
|
+
],
|
|
1818
|
+
Resource: {
|
|
1819
|
+
"Fn::GetAtt": [ECR_REPOSITORY_LOGICAL_ID, "Arn"]
|
|
1820
|
+
}
|
|
1821
|
+
}
|
|
1822
|
+
]
|
|
1823
|
+
}
|
|
1824
|
+
}
|
|
1825
|
+
]
|
|
1826
|
+
}
|
|
1827
|
+
};
|
|
1828
|
+
resources2[REPOSITORY_IMAGE_CODE_BUILD_PROJECT_LOGICAL_ID] = getRepositoryImageBuilder();
|
|
1829
|
+
const cicdConfig = {
|
|
1830
|
+
...getCicdConfig(),
|
|
1831
|
+
"ssh-key": "/root/.ssh/id_rsa",
|
|
1832
|
+
environment: getEnvironment()
|
|
1833
|
+
};
|
|
1834
|
+
resources2[IMAGE_UPDATER_SCHEDULE_SERVERLESS_FUNCTION_LOGICAL_ID] = {
|
|
1835
|
+
Type: "AWS::Serverless::Function",
|
|
1836
|
+
Properties: {
|
|
1837
|
+
...commonFunctionProperties,
|
|
1838
|
+
Events: {
|
|
1839
|
+
Schedule: {
|
|
1840
|
+
Type: "Schedule",
|
|
1841
|
+
Properties: {
|
|
1842
|
+
Schedule: "rate(7 days)"
|
|
1843
|
+
}
|
|
1844
|
+
}
|
|
1845
|
+
},
|
|
1846
|
+
Environment: {
|
|
1847
|
+
Variables: {
|
|
1848
|
+
[PROCESS_ENV_REPOSITORY_IMAGE_CODE_BUILD_PROJECT_NAME]: {
|
|
1849
|
+
Ref: REPOSITORY_IMAGE_CODE_BUILD_PROJECT_LOGICAL_ID
|
|
1850
|
+
},
|
|
1851
|
+
CICD_CONFIG: JSON.stringify(cicdConfig),
|
|
1852
|
+
...executeEcsTaskVariables
|
|
1853
|
+
}
|
|
1854
|
+
},
|
|
1855
|
+
Handler: "index.imageUpdaterScheduleHandler"
|
|
1856
|
+
}
|
|
1857
|
+
};
|
|
1858
|
+
})();
|
|
1859
|
+
const createApiResources = () => {
|
|
1860
|
+
resources2[API_LOGICAL_ID] = {
|
|
1861
|
+
Type: "AWS::Serverless::Api",
|
|
1862
|
+
Properties: {
|
|
1863
|
+
Auth: {
|
|
1864
|
+
ApiKeyRequired: false
|
|
1865
|
+
},
|
|
1866
|
+
StageName: "v1"
|
|
1867
|
+
}
|
|
1868
|
+
};
|
|
1869
|
+
resources2[FUNCTION_IAM_ROLE_LOGICAL_ID] = {
|
|
1870
|
+
Type: "AWS::IAM::Role",
|
|
1871
|
+
Properties: {
|
|
1872
|
+
AssumeRolePolicyDocument: {
|
|
1873
|
+
Version: "2012-10-17",
|
|
1874
|
+
Statement: [
|
|
1875
|
+
{
|
|
1876
|
+
Effect: "Allow",
|
|
1877
|
+
Principal: {
|
|
1878
|
+
Service: "lambda.amazonaws.com"
|
|
1879
|
+
},
|
|
1880
|
+
Action: ["sts:AssumeRole"]
|
|
1881
|
+
}
|
|
1882
|
+
]
|
|
1883
|
+
},
|
|
1884
|
+
ManagedPolicyArns: [
|
|
1885
|
+
"arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole"
|
|
1886
|
+
],
|
|
1887
|
+
Path: getIamPath(),
|
|
1888
|
+
Policies: [
|
|
1889
|
+
{
|
|
1890
|
+
PolicyName: `${FUNCTION_IAM_ROLE_LOGICAL_ID}Policy`,
|
|
1891
|
+
PolicyDocument: {
|
|
1892
|
+
Version: "2012-10-17",
|
|
1893
|
+
Statement: [
|
|
1894
|
+
{
|
|
1895
|
+
Effect: "Allow",
|
|
1896
|
+
Action: ["codebuild:StartBuild"],
|
|
1897
|
+
Resource: {
|
|
1898
|
+
"Fn::GetAtt": [
|
|
1899
|
+
REPOSITORY_IMAGE_CODE_BUILD_PROJECT_LOGICAL_ID,
|
|
1900
|
+
"Arn"
|
|
1901
|
+
]
|
|
1902
|
+
}
|
|
1903
|
+
},
|
|
1904
|
+
{
|
|
1905
|
+
Effect: "Allow",
|
|
1906
|
+
Action: ["iam:PassRole"],
|
|
1907
|
+
Resource: [
|
|
1908
|
+
{
|
|
1909
|
+
"Fn::GetAtt": [
|
|
1910
|
+
REPOSITORY_TASKS_ECS_TASK_DEFINITION_EXECUTION_ROLE_LOGICAL_ID,
|
|
1911
|
+
"Arn"
|
|
1912
|
+
]
|
|
1913
|
+
},
|
|
1914
|
+
{
|
|
1915
|
+
"Fn::GetAtt": [
|
|
1916
|
+
REPOSITORY_TASKS_ECS_TASK_DEFINITION_TASK_ROLE_LOGICAL_ID,
|
|
1917
|
+
"Arn"
|
|
1918
|
+
]
|
|
1919
|
+
}
|
|
1920
|
+
]
|
|
1921
|
+
},
|
|
1922
|
+
{
|
|
1923
|
+
Effect: "Allow",
|
|
1924
|
+
Action: ["ecs:DescribeTasks"],
|
|
1925
|
+
Resource: "*"
|
|
1926
|
+
},
|
|
1927
|
+
{
|
|
1928
|
+
Effect: "Allow",
|
|
1929
|
+
Action: ["ecs:RunTask"],
|
|
1930
|
+
Resource: [
|
|
1931
|
+
{
|
|
1932
|
+
Ref: REPOSITORY_ECS_TASK_DEFINITION_LOGICAL_ID
|
|
1933
|
+
}
|
|
1934
|
+
]
|
|
1935
|
+
},
|
|
1936
|
+
{
|
|
1937
|
+
Action: [
|
|
1938
|
+
"codepipeline:PutApprovalResult",
|
|
1939
|
+
"codepipeline:GetJobDetails",
|
|
1940
|
+
"codepipeline:GetPipelineState",
|
|
1941
|
+
"codepipeline:PutJobSuccessResult",
|
|
1942
|
+
"codepipeline:PutJobFailureResult"
|
|
1943
|
+
],
|
|
1944
|
+
Effect: "Allow",
|
|
1945
|
+
Resource: "*"
|
|
1946
|
+
},
|
|
1947
|
+
{
|
|
1948
|
+
Action: "s3:*",
|
|
1949
|
+
Effect: "Allow",
|
|
1950
|
+
Resource: {
|
|
1951
|
+
"Fn::Sub": [
|
|
1952
|
+
`arn:aws:s3:::\${BucketName}/${triggerPipelinesObjectKeyPrefix}*`,
|
|
1953
|
+
{
|
|
1954
|
+
BucketName: {
|
|
1955
|
+
"Fn::ImportValue": BASE_STACK_BUCKET_NAME_EXPORTED_NAME
|
|
1956
|
+
}
|
|
1957
|
+
}
|
|
1958
|
+
]
|
|
1959
|
+
}
|
|
1960
|
+
}
|
|
1961
|
+
]
|
|
1962
|
+
}
|
|
1963
|
+
}
|
|
1964
|
+
]
|
|
1965
|
+
}
|
|
1966
|
+
};
|
|
1967
|
+
resources2[ECS_TASK_REPORT_HANDLER_LAMBDA_FUNCTION_LOGICAL_ID] = {
|
|
1968
|
+
Type: "AWS::Serverless::Function",
|
|
1969
|
+
Properties: {
|
|
1970
|
+
...commonFunctionProperties,
|
|
1971
|
+
Environment: {
|
|
1972
|
+
Variables: {
|
|
1973
|
+
ECS_TASK_LOGS_LOG_GROUP: {
|
|
1974
|
+
Ref: REPOSITORY_TASKS_ECS_CLUSTER_LOGS_LOG_GROUP_LOGICAL_ID
|
|
1975
|
+
},
|
|
1976
|
+
ECS_TASK_CONTAINER_NAME: REPOSITORY_ECS_TASK_CONTAINER_NAME,
|
|
1977
|
+
SLACK_WEBHOOK_URL: slackWebhookUrl
|
|
1978
|
+
}
|
|
1979
|
+
},
|
|
1980
|
+
Handler: "index.ecsTaskReportHandler"
|
|
1981
|
+
}
|
|
1982
|
+
};
|
|
1983
|
+
resources2.CicdApiV1ServerlessFunction = {
|
|
1984
|
+
Type: "AWS::Serverless::Function",
|
|
1985
|
+
Properties: {
|
|
1986
|
+
...commonFunctionProperties,
|
|
1987
|
+
Events: {
|
|
1988
|
+
ApiEvent: {
|
|
1989
|
+
Type: "Api",
|
|
1990
|
+
Properties: {
|
|
1991
|
+
Method: "POST",
|
|
1992
|
+
Path: "/cicd",
|
|
1993
|
+
RestApiId: { Ref: API_LOGICAL_ID }
|
|
1994
|
+
}
|
|
1995
|
+
}
|
|
1996
|
+
},
|
|
1997
|
+
Environment: {
|
|
1998
|
+
Variables: {
|
|
1999
|
+
[PROCESS_ENV_REPOSITORY_IMAGE_CODE_BUILD_PROJECT_NAME]: {
|
|
2000
|
+
Ref: REPOSITORY_IMAGE_CODE_BUILD_PROJECT_LOGICAL_ID
|
|
2001
|
+
},
|
|
2002
|
+
...executeEcsTaskVariables
|
|
2003
|
+
}
|
|
2004
|
+
},
|
|
2005
|
+
Handler: "index.cicdApiV1Handler"
|
|
2006
|
+
}
|
|
2007
|
+
};
|
|
2008
|
+
resources2.GitHubWebhooksApiV1ServerlessFunction = {
|
|
2009
|
+
Type: "AWS::Serverless::Function",
|
|
2010
|
+
Properties: {
|
|
2011
|
+
...commonFunctionProperties,
|
|
2012
|
+
Events: {
|
|
2013
|
+
ApiEvent: {
|
|
2014
|
+
Type: "Api",
|
|
2015
|
+
Properties: {
|
|
2016
|
+
Method: "POST",
|
|
2017
|
+
Path: "/github/webhooks",
|
|
2018
|
+
RestApiId: { Ref: API_LOGICAL_ID }
|
|
2019
|
+
}
|
|
2020
|
+
}
|
|
2021
|
+
},
|
|
2022
|
+
Environment: {
|
|
2023
|
+
Variables: {
|
|
2024
|
+
BASE_STACK_BUCKET_NAME: {
|
|
2025
|
+
"Fn::ImportValue": BASE_STACK_BUCKET_NAME_EXPORTED_NAME
|
|
2026
|
+
},
|
|
2027
|
+
TRIGGER_PIPELINES_OBJECT_KEY_PREFIX: triggerPipelinesObjectKeyPrefix,
|
|
2028
|
+
PIPELINES_JSON: JSON.stringify(pipelines2),
|
|
2029
|
+
...executeEcsTaskVariables
|
|
2030
|
+
}
|
|
2031
|
+
},
|
|
2032
|
+
Handler: "index.githubWebhooksApiV1Handler"
|
|
2033
|
+
}
|
|
2034
|
+
};
|
|
2035
|
+
};
|
|
2036
|
+
createApiResources();
|
|
2037
|
+
(() => {
|
|
2038
|
+
resources2[REPOSITORY_TASKS_ECS_CLUSTER_LOGICAL_ID] = {
|
|
2039
|
+
Type: "AWS::ECS::Cluster",
|
|
2040
|
+
Properties: {}
|
|
2041
|
+
};
|
|
2042
|
+
resources2[REPOSITORY_TASKS_ECS_CLUSTER_LOGS_LOG_GROUP_LOGICAL_ID] = {
|
|
2043
|
+
Type: "AWS::Logs::LogGroup",
|
|
2044
|
+
DeletionPolicy: "Delete",
|
|
2045
|
+
Properties: {}
|
|
2046
|
+
};
|
|
2047
|
+
resources2[REPOSITORY_TASKS_ECS_TASK_DEFINITION_EXECUTION_ROLE_LOGICAL_ID] = {
|
|
2048
|
+
Type: "AWS::IAM::Role",
|
|
2049
|
+
Properties: {
|
|
2050
|
+
AssumeRolePolicyDocument: {
|
|
2051
|
+
Version: "2012-10-17",
|
|
2052
|
+
Statement: [
|
|
2053
|
+
{
|
|
2054
|
+
Effect: "Allow",
|
|
2055
|
+
Principal: {
|
|
2056
|
+
Service: "ecs-tasks.amazonaws.com"
|
|
2057
|
+
},
|
|
2058
|
+
Action: "sts:AssumeRole"
|
|
2059
|
+
}
|
|
2060
|
+
]
|
|
2061
|
+
},
|
|
2062
|
+
ManagedPolicyArns: [
|
|
2063
|
+
"arn:aws:iam::aws:policy/service-role/AmazonECSTaskExecutionRolePolicy"
|
|
2064
|
+
],
|
|
2065
|
+
Path: getIamPath()
|
|
2066
|
+
}
|
|
2067
|
+
};
|
|
2068
|
+
resources2[REPOSITORY_TASKS_ECS_TASK_DEFINITION_TASK_ROLE_LOGICAL_ID] = {
|
|
2069
|
+
Type: "AWS::IAM::Role",
|
|
2070
|
+
Properties: {
|
|
2071
|
+
AssumeRolePolicyDocument: {
|
|
2072
|
+
Version: "2012-10-17",
|
|
2073
|
+
Statement: [
|
|
2074
|
+
{
|
|
2075
|
+
Effect: "Allow",
|
|
2076
|
+
Principal: {
|
|
2077
|
+
Service: "ecs-tasks.amazonaws.com"
|
|
2078
|
+
},
|
|
2079
|
+
Action: "sts:AssumeRole"
|
|
2080
|
+
}
|
|
2081
|
+
]
|
|
2082
|
+
},
|
|
2083
|
+
ManagedPolicyArns: [
|
|
2084
|
+
"arn:aws:iam::aws:policy/job-function/ViewOnlyAccess"
|
|
2085
|
+
],
|
|
2086
|
+
Path: getIamPath(),
|
|
2087
|
+
/**
|
|
2088
|
+
* TODO: improve the policies rules.
|
|
2089
|
+
*/
|
|
2090
|
+
Policies: [
|
|
2091
|
+
{
|
|
2092
|
+
PolicyName: `${REPOSITORY_TASKS_ECS_TASK_DEFINITION_TASK_ROLE_LOGICAL_ID}Policy`,
|
|
2093
|
+
PolicyDocument: {
|
|
2094
|
+
Version: "2012-10-17",
|
|
2095
|
+
Statement: [
|
|
2096
|
+
{
|
|
2097
|
+
Effect: "Allow",
|
|
2098
|
+
Action: ["*"],
|
|
2099
|
+
Resource: "*"
|
|
2100
|
+
}
|
|
2101
|
+
]
|
|
2102
|
+
}
|
|
2103
|
+
}
|
|
2104
|
+
]
|
|
2105
|
+
}
|
|
2106
|
+
};
|
|
2107
|
+
resources2[REPOSITORY_ECS_TASK_DEFINITION_LOGICAL_ID] = {
|
|
2108
|
+
Type: "AWS::ECS::TaskDefinition",
|
|
2109
|
+
Properties: {
|
|
2110
|
+
ContainerDefinitions: [
|
|
2111
|
+
{
|
|
2112
|
+
Environment: [
|
|
2113
|
+
{
|
|
2114
|
+
/**
|
|
2115
|
+
* https://docs.aws.amazon.com/AmazonECS/latest/developerguide/container-metadata.html#enable-metadata
|
|
2116
|
+
*/
|
|
2117
|
+
Name: "ECS_ENABLE_CONTAINER_METADATA",
|
|
2118
|
+
Value: "true"
|
|
2119
|
+
},
|
|
2120
|
+
{
|
|
2121
|
+
Name: "CI",
|
|
2122
|
+
Value: "true"
|
|
2123
|
+
},
|
|
2124
|
+
...taskEnvironment.map((te) => {
|
|
2125
|
+
return {
|
|
2126
|
+
Name: te.name,
|
|
2127
|
+
Value: te.value
|
|
2128
|
+
};
|
|
2129
|
+
})
|
|
2130
|
+
],
|
|
2131
|
+
Image: {
|
|
2132
|
+
"Fn::Sub": [
|
|
2133
|
+
// eslint-disable-next-line no-template-curly-in-string
|
|
2134
|
+
"${AWS::AccountId}.dkr.ecr.${AWS::Region}.amazonaws.com/${RepositoryECR}:latest",
|
|
2135
|
+
{
|
|
2136
|
+
RepositoryECR: { Ref: ECR_REPOSITORY_LOGICAL_ID }
|
|
2137
|
+
}
|
|
2138
|
+
]
|
|
2139
|
+
},
|
|
2140
|
+
LogConfiguration: {
|
|
2141
|
+
LogDriver: "awslogs",
|
|
2142
|
+
Options: {
|
|
2143
|
+
"awslogs-group": {
|
|
2144
|
+
Ref: REPOSITORY_TASKS_ECS_CLUSTER_LOGS_LOG_GROUP_LOGICAL_ID
|
|
2145
|
+
},
|
|
2146
|
+
"awslogs-region": { Ref: "AWS::Region" },
|
|
2147
|
+
"awslogs-stream-prefix": "ecs"
|
|
2148
|
+
}
|
|
2149
|
+
},
|
|
2150
|
+
Name: REPOSITORY_ECS_TASK_CONTAINER_NAME
|
|
2151
|
+
}
|
|
2152
|
+
],
|
|
2153
|
+
Cpu: cpu,
|
|
2154
|
+
ExecutionRoleArn: {
|
|
2155
|
+
"Fn::GetAtt": [
|
|
2156
|
+
REPOSITORY_TASKS_ECS_TASK_DEFINITION_EXECUTION_ROLE_LOGICAL_ID,
|
|
2157
|
+
"Arn"
|
|
2158
|
+
]
|
|
2159
|
+
},
|
|
2160
|
+
Memory: memory,
|
|
2161
|
+
NetworkMode: "awsvpc",
|
|
2162
|
+
RequiresCompatibilities: ["FARGATE"],
|
|
2163
|
+
TaskRoleArn: {
|
|
2164
|
+
"Fn::GetAtt": [
|
|
2165
|
+
REPOSITORY_TASKS_ECS_TASK_DEFINITION_TASK_ROLE_LOGICAL_ID,
|
|
2166
|
+
"Arn"
|
|
2167
|
+
]
|
|
2168
|
+
}
|
|
2169
|
+
}
|
|
2170
|
+
};
|
|
2171
|
+
})();
|
|
2172
|
+
if (pipelines2.includes("main") || pipelines2.includes("tag")) {
|
|
2173
|
+
resources2[PIPELINES_ARTIFACT_STORE_S3_BUCKET_LOGICAL_ID] = {
|
|
2174
|
+
Type: "AWS::S3::Bucket",
|
|
2175
|
+
Properties: {
|
|
2176
|
+
LifecycleConfiguration: {
|
|
2177
|
+
Rules: [
|
|
2178
|
+
{
|
|
2179
|
+
/**
|
|
2180
|
+
* We won't use the artifacts forever.
|
|
2181
|
+
*/
|
|
2182
|
+
ExpirationInDays: 7,
|
|
2183
|
+
Status: "Enabled"
|
|
2184
|
+
}
|
|
2185
|
+
]
|
|
2186
|
+
}
|
|
2187
|
+
}
|
|
2188
|
+
};
|
|
2189
|
+
resources2[PIPELINES_HANDLER_LAMBDA_FUNCTION_LOGICAL_ID] = {
|
|
2190
|
+
Type: "AWS::Lambda::Function",
|
|
2191
|
+
Properties: {
|
|
2192
|
+
Code: {
|
|
2193
|
+
S3Bucket: s32.bucket,
|
|
2194
|
+
S3Key: s32.key,
|
|
2195
|
+
S3ObjectVersion: s32.versionId
|
|
2196
|
+
},
|
|
2197
|
+
Environment: {
|
|
2198
|
+
Variables: {
|
|
2199
|
+
...executeEcsTaskVariables
|
|
2200
|
+
}
|
|
2201
|
+
},
|
|
2202
|
+
Handler: "index.pipelinesHandler",
|
|
2203
|
+
MemorySize: 128,
|
|
2204
|
+
Role: {
|
|
2205
|
+
"Fn::GetAtt": [FUNCTION_IAM_ROLE_LOGICAL_ID, "Arn"]
|
|
2206
|
+
},
|
|
2207
|
+
Runtime: NODE_RUNTIME,
|
|
2208
|
+
Timeout: 60
|
|
2209
|
+
}
|
|
2210
|
+
};
|
|
2211
|
+
resources2[PIPELINES_ROLE_LOGICAL_ID] = {
|
|
2212
|
+
Type: "AWS::IAM::Role",
|
|
2213
|
+
Properties: {
|
|
2214
|
+
AssumeRolePolicyDocument: {
|
|
2215
|
+
Version: "2012-10-17",
|
|
2216
|
+
Statement: [
|
|
2217
|
+
{
|
|
2218
|
+
Effect: "Allow",
|
|
2219
|
+
Principal: {
|
|
2220
|
+
Service: "codepipeline.amazonaws.com"
|
|
2221
|
+
},
|
|
2222
|
+
Action: "sts:AssumeRole"
|
|
2223
|
+
}
|
|
2224
|
+
]
|
|
2225
|
+
},
|
|
2226
|
+
ManagedPolicyArns: [],
|
|
2227
|
+
Path: getIamPath(),
|
|
2228
|
+
Policies: [
|
|
2229
|
+
{
|
|
2230
|
+
PolicyName: `${PIPELINES_ROLE_LOGICAL_ID}Policy`,
|
|
2231
|
+
PolicyDocument: {
|
|
2232
|
+
Version: "2012-10-17",
|
|
2233
|
+
Statement: [
|
|
2234
|
+
{
|
|
2235
|
+
Effect: "Allow",
|
|
2236
|
+
Action: "lambda:InvokeFunction",
|
|
2237
|
+
Resource: [
|
|
2238
|
+
{
|
|
2239
|
+
"Fn::GetAtt": [
|
|
2240
|
+
PIPELINES_HANDLER_LAMBDA_FUNCTION_LOGICAL_ID,
|
|
2241
|
+
"Arn"
|
|
2242
|
+
]
|
|
2243
|
+
}
|
|
2244
|
+
]
|
|
2245
|
+
},
|
|
2246
|
+
{
|
|
2247
|
+
Effect: "Allow",
|
|
2248
|
+
Action: "s3:*",
|
|
2249
|
+
Resource: [
|
|
2250
|
+
{
|
|
2251
|
+
"Fn::GetAtt": [
|
|
2252
|
+
PIPELINES_ARTIFACT_STORE_S3_BUCKET_LOGICAL_ID,
|
|
2253
|
+
"Arn"
|
|
2254
|
+
]
|
|
2255
|
+
},
|
|
2256
|
+
{
|
|
2257
|
+
"Fn::Sub": `arn:aws:s3:::\${${PIPELINES_ARTIFACT_STORE_S3_BUCKET_LOGICAL_ID}}/*`
|
|
2258
|
+
}
|
|
2259
|
+
]
|
|
2260
|
+
},
|
|
2261
|
+
{
|
|
2262
|
+
Effect: "Allow",
|
|
2263
|
+
Action: "s3:*",
|
|
2264
|
+
Resource: {
|
|
2265
|
+
"Fn::Sub": [
|
|
2266
|
+
`arn:aws:s3:::\${BucketName}/${triggerPipelinesObjectKeyPrefix}*`,
|
|
2267
|
+
{
|
|
2268
|
+
BucketName: {
|
|
2269
|
+
"Fn::ImportValue": BASE_STACK_BUCKET_NAME_EXPORTED_NAME
|
|
2270
|
+
}
|
|
2271
|
+
}
|
|
2272
|
+
]
|
|
2273
|
+
}
|
|
2274
|
+
},
|
|
2275
|
+
{
|
|
2276
|
+
Effect: "Allow",
|
|
2277
|
+
Action: ["s3:Get*", "s3:List*"],
|
|
2278
|
+
Resource: {
|
|
2279
|
+
"Fn::Sub": [
|
|
2280
|
+
`arn:aws:s3:::\${BucketName}`,
|
|
2281
|
+
{
|
|
2282
|
+
BucketName: {
|
|
2283
|
+
"Fn::ImportValue": BASE_STACK_BUCKET_NAME_EXPORTED_NAME
|
|
2284
|
+
}
|
|
2285
|
+
}
|
|
2286
|
+
]
|
|
2287
|
+
}
|
|
2288
|
+
}
|
|
2289
|
+
]
|
|
2290
|
+
}
|
|
2291
|
+
}
|
|
2292
|
+
]
|
|
2293
|
+
}
|
|
2294
|
+
};
|
|
2295
|
+
const getCodePipelinePipeline = (pipeline) => {
|
|
2296
|
+
const pipelinePascalCase = pascalCase(pipeline);
|
|
2297
|
+
const pipelineS3SourceOutputName = `Pipeline${pipelinePascalCase}S3SourceOutput`;
|
|
2298
|
+
return {
|
|
2299
|
+
Type: "AWS::CodePipeline::Pipeline",
|
|
2300
|
+
Properties: {
|
|
2301
|
+
ArtifactStore: {
|
|
2302
|
+
Location: { Ref: PIPELINES_ARTIFACT_STORE_S3_BUCKET_LOGICAL_ID },
|
|
2303
|
+
Type: "S3"
|
|
2304
|
+
},
|
|
2305
|
+
RestartExecutionOnUpdate: false,
|
|
2306
|
+
RoleArn: {
|
|
2307
|
+
"Fn::GetAtt": [PIPELINES_ROLE_LOGICAL_ID, "Arn"]
|
|
2308
|
+
},
|
|
2309
|
+
Stages: [
|
|
2310
|
+
{
|
|
2311
|
+
Actions: [
|
|
2312
|
+
{
|
|
2313
|
+
ActionTypeId: {
|
|
2314
|
+
Category: "Source",
|
|
2315
|
+
Owner: "AWS",
|
|
2316
|
+
Provider: "S3",
|
|
2317
|
+
Version: 1
|
|
2318
|
+
},
|
|
2319
|
+
Configuration: {
|
|
2320
|
+
S3Bucket: {
|
|
2321
|
+
"Fn::ImportValue": BASE_STACK_BUCKET_NAME_EXPORTED_NAME
|
|
2322
|
+
},
|
|
2323
|
+
S3ObjectKey: getTriggerPipelinesObjectKey({
|
|
2324
|
+
prefix: triggerPipelinesObjectKeyPrefix,
|
|
2325
|
+
pipeline
|
|
2326
|
+
})
|
|
2327
|
+
},
|
|
2328
|
+
Name: `Pipeline${pipelinePascalCase}S3SourceAction`,
|
|
2329
|
+
OutputArtifacts: [
|
|
2330
|
+
{
|
|
2331
|
+
Name: pipelineS3SourceOutputName
|
|
2332
|
+
}
|
|
2333
|
+
]
|
|
2334
|
+
}
|
|
2335
|
+
],
|
|
2336
|
+
Name: `Pipeline${pipelinePascalCase}S3SourceStage`
|
|
2337
|
+
},
|
|
2338
|
+
{
|
|
2339
|
+
Actions: [
|
|
2340
|
+
{
|
|
2341
|
+
ActionTypeId: {
|
|
2342
|
+
Category: "Invoke",
|
|
2343
|
+
Owner: "AWS",
|
|
2344
|
+
Provider: "Lambda",
|
|
2345
|
+
Version: 1
|
|
2346
|
+
},
|
|
2347
|
+
Configuration: {
|
|
2348
|
+
FunctionName: {
|
|
2349
|
+
Ref: PIPELINES_HANDLER_LAMBDA_FUNCTION_LOGICAL_ID
|
|
2350
|
+
},
|
|
2351
|
+
UserParameters: /* @__PURE__ */ (() => {
|
|
2352
|
+
return pipeline;
|
|
2353
|
+
})()
|
|
2354
|
+
},
|
|
2355
|
+
InputArtifacts: [
|
|
2356
|
+
{
|
|
2357
|
+
Name: pipelineS3SourceOutputName
|
|
2358
|
+
}
|
|
2359
|
+
],
|
|
2360
|
+
Name: `Pipeline${pipelinePascalCase}RunECSTasksAction`
|
|
2361
|
+
},
|
|
2362
|
+
{
|
|
2363
|
+
ActionTypeId: {
|
|
2364
|
+
Category: "Approval",
|
|
2365
|
+
Owner: "AWS",
|
|
2366
|
+
Provider: "Manual",
|
|
2367
|
+
Version: 1
|
|
2368
|
+
},
|
|
2369
|
+
Name: PIPELINE_ECS_TASK_EXECUTION_MANUAL_APPROVAL_ACTION_NAME
|
|
2370
|
+
}
|
|
2371
|
+
],
|
|
2372
|
+
Name: PIPELINE_ECS_TASK_EXECUTION_STAGE_NAME
|
|
2373
|
+
}
|
|
2374
|
+
]
|
|
2375
|
+
}
|
|
2376
|
+
};
|
|
2377
|
+
};
|
|
2378
|
+
if (pipelines2.includes("main")) {
|
|
2379
|
+
resources2[PIPELINES_MAIN_LOGICAL_ID] = getCodePipelinePipeline("main");
|
|
2380
|
+
}
|
|
2381
|
+
if (pipelines2.includes("tag")) {
|
|
2382
|
+
resources2[PIPELINES_TAG_LOGICAL_ID] = getCodePipelinePipeline("tag");
|
|
2383
|
+
}
|
|
2384
|
+
}
|
|
2385
|
+
return {
|
|
2386
|
+
AWSTemplateFormatVersion: "2010-09-09",
|
|
2387
|
+
Transform: "AWS::Serverless-2016-10-31",
|
|
2388
|
+
Resources: resources2,
|
|
2389
|
+
Parameters: {
|
|
2390
|
+
SSHKey: {
|
|
2391
|
+
NoEcho: true,
|
|
2392
|
+
Type: "String"
|
|
2393
|
+
},
|
|
2394
|
+
SSHUrl: {
|
|
2395
|
+
Type: "String"
|
|
2396
|
+
}
|
|
2397
|
+
},
|
|
2398
|
+
Outputs: {
|
|
2399
|
+
[REPOSITORY_IMAGE_CODE_BUILD_PROJECT_LOGICAL_ID]: {
|
|
2400
|
+
Value: { Ref: REPOSITORY_IMAGE_CODE_BUILD_PROJECT_LOGICAL_ID }
|
|
2401
|
+
},
|
|
2402
|
+
ApiV1Endpoint: {
|
|
2403
|
+
Description: "CICD API v1 stage endpoint.",
|
|
2404
|
+
Value: {
|
|
2405
|
+
"Fn::Sub": `https://\${${API_LOGICAL_ID}}.execute-api.\${AWS::Region}.amazonaws.com/v1/`
|
|
2406
|
+
}
|
|
2407
|
+
}
|
|
2408
|
+
}
|
|
2409
|
+
};
|
|
2410
|
+
};
|
|
2411
|
+
var logPrefix5 = "lambda";
|
|
2412
|
+
var buildLambdaCode = async ({
|
|
2413
|
+
lambdaEntryPoints,
|
|
2414
|
+
lambdaEntryPointsBaseDir = ".",
|
|
2415
|
+
lambdaExternal = [],
|
|
2416
|
+
lambdaFormat = "esm",
|
|
2417
|
+
lambdaOutdir
|
|
2418
|
+
}) => {
|
|
2419
|
+
log5.info(logPrefix5, "Building Lambda single file...");
|
|
2420
|
+
if (fs4__default.existsSync(lambdaOutdir)) {
|
|
2421
|
+
fs4__default.rmSync(lambdaOutdir, { recursive: true });
|
|
2422
|
+
}
|
|
2423
|
+
const entryPoints = lambdaEntryPoints.map((entryPoint) => {
|
|
2424
|
+
return path3__default.resolve(process.cwd(), lambdaEntryPointsBaseDir, entryPoint);
|
|
2425
|
+
});
|
|
2426
|
+
const { errors } = esbuild.buildSync({
|
|
2427
|
+
banner: {
|
|
2428
|
+
js: "// Powered by carlin (https://ttoss.dev/docs/carlin/)"
|
|
2429
|
+
},
|
|
2430
|
+
bundle: true,
|
|
2431
|
+
entryPoints,
|
|
2432
|
+
external: [
|
|
2433
|
+
/**
|
|
2434
|
+
* Only AWS SDK v3 on Node.js 18.x or higher.
|
|
2435
|
+
* https://aws.amazon.com/blogs/compute/node-js-18-x-runtime-now-available-in-aws-lambda/
|
|
2436
|
+
*/
|
|
2437
|
+
"@aws-sdk/*",
|
|
2438
|
+
...builtinModules,
|
|
2439
|
+
...lambdaExternal
|
|
2440
|
+
],
|
|
2441
|
+
/**
|
|
2442
|
+
* Some packages as `graphql` are not compatible with ESM yet.
|
|
2443
|
+
* https://github.com/graphql/graphql-js/issues/3603
|
|
2444
|
+
*/
|
|
2445
|
+
format: lambdaFormat,
|
|
2446
|
+
/**
|
|
2447
|
+
* https://esbuild.github.io/api/#minify
|
|
2448
|
+
*/
|
|
2449
|
+
minifySyntax: true,
|
|
2450
|
+
platform: "node",
|
|
2451
|
+
splitting: lambdaFormat === "esm",
|
|
2452
|
+
outbase: path3__default.join(process.cwd(), lambdaEntryPointsBaseDir),
|
|
2453
|
+
outdir: path3__default.join(process.cwd(), lambdaOutdir),
|
|
2454
|
+
outExtension: { ".js": lambdaFormat === "esm" ? ".mjs" : ".cjs" },
|
|
2455
|
+
target: typescriptConfig.target,
|
|
2456
|
+
treeShaking: true
|
|
2457
|
+
});
|
|
2458
|
+
if (errors.length > 0) {
|
|
2459
|
+
throw errors;
|
|
2460
|
+
}
|
|
2461
|
+
};
|
|
2462
|
+
var lambdaLayerStackNamePrefix = `LambdaLayer`;
|
|
2463
|
+
var getPackageLambdaLayerStackName = (packageName) => {
|
|
2464
|
+
const [scopedName, version] = packageName.split("@").filter((part) => {
|
|
2465
|
+
return !!part;
|
|
2466
|
+
});
|
|
2467
|
+
return [
|
|
2468
|
+
lambdaLayerStackNamePrefix,
|
|
2469
|
+
pascalCase(scopedName),
|
|
2470
|
+
version.replace(/[^0-9.]/g, "").replace(/\./g, "-")
|
|
2471
|
+
].join("-");
|
|
2472
|
+
};
|
|
2473
|
+
var logPrefix6 = "lambda-layer";
|
|
2474
|
+
var createLambdaLayerZipFile = async ({
|
|
2475
|
+
codeBuildProjectName,
|
|
2476
|
+
packageName
|
|
2477
|
+
}) => {
|
|
2478
|
+
log5.info(logPrefix6, `Creating zip file for package ${packageName}...`);
|
|
2479
|
+
const codeBuild2 = new AWS.CodeBuild();
|
|
2480
|
+
const { build } = await codeBuild2.startBuild({
|
|
2481
|
+
environmentVariablesOverride: [
|
|
2482
|
+
{
|
|
2483
|
+
name: "PACKAGE_NAME",
|
|
2484
|
+
value: packageName
|
|
2485
|
+
}
|
|
2486
|
+
],
|
|
2487
|
+
projectName: codeBuildProjectName
|
|
2488
|
+
}).promise();
|
|
2489
|
+
if (!build?.id) {
|
|
2490
|
+
throw new Error("Cannot start build.");
|
|
2491
|
+
}
|
|
2492
|
+
const result = await waitCodeBuildFinish({
|
|
2493
|
+
buildId: build.id,
|
|
2494
|
+
name: packageName
|
|
2495
|
+
});
|
|
2496
|
+
if (result.artifacts?.location) {
|
|
2497
|
+
const location = result.artifacts.location.split("/");
|
|
2498
|
+
const bucket = location.shift()?.replace("arn:aws:s3:::", "");
|
|
2499
|
+
if (!bucket) {
|
|
2500
|
+
throw new Error("Cannot retrieve bucket name.");
|
|
2501
|
+
}
|
|
2502
|
+
const key = location.join("/");
|
|
2503
|
+
return { bucket, key };
|
|
2504
|
+
}
|
|
2505
|
+
throw new Error(`Cannot get artifact location for package ${packageName}`);
|
|
2506
|
+
};
|
|
2507
|
+
var getLambdaLayerTemplate = ({
|
|
2508
|
+
bucket,
|
|
2509
|
+
key,
|
|
2510
|
+
packageName
|
|
2511
|
+
}) => {
|
|
2512
|
+
const description = packageName.substring(0, 256);
|
|
2513
|
+
return {
|
|
2514
|
+
AWSTemplateFormatVersion: "2010-09-09",
|
|
2515
|
+
Resources: {
|
|
2516
|
+
LambdaLayer: {
|
|
2517
|
+
Type: "AWS::Lambda::LayerVersion",
|
|
2518
|
+
Properties: {
|
|
2519
|
+
CompatibleRuntimes: [NODE_RUNTIME],
|
|
2520
|
+
Content: {
|
|
2521
|
+
S3Bucket: bucket,
|
|
2522
|
+
S3Key: key
|
|
2523
|
+
},
|
|
2524
|
+
Description: description,
|
|
2525
|
+
LayerName: { Ref: "AWS::StackName" }
|
|
2526
|
+
}
|
|
2527
|
+
}
|
|
2528
|
+
},
|
|
2529
|
+
Outputs: {
|
|
2530
|
+
LambdaLayerVersion: {
|
|
2531
|
+
Description: description,
|
|
2532
|
+
Value: { Ref: "LambdaLayer" },
|
|
2533
|
+
Export: {
|
|
2534
|
+
Name: { Ref: "AWS::StackName" }
|
|
2535
|
+
}
|
|
2536
|
+
}
|
|
2537
|
+
}
|
|
2538
|
+
};
|
|
2539
|
+
};
|
|
2540
|
+
var getPackagesThatAreNotDeployed = async ({
|
|
2541
|
+
packages
|
|
2542
|
+
}) => {
|
|
2543
|
+
return (await Promise.all(
|
|
2544
|
+
packages.map(async (packageName) => {
|
|
2545
|
+
const stackName = getPackageLambdaLayerStackName(packageName);
|
|
2546
|
+
return await doesStackExist({ stackName }) ? "" : packageName;
|
|
2547
|
+
})
|
|
2548
|
+
)).filter((packageName) => {
|
|
2549
|
+
return !!packageName;
|
|
2550
|
+
});
|
|
2551
|
+
};
|
|
2552
|
+
var deployLambdaLayer = async ({
|
|
2553
|
+
packages,
|
|
2554
|
+
deployIfExists = true
|
|
2555
|
+
}) => {
|
|
2556
|
+
try {
|
|
2557
|
+
const packagesToBeDeployed = deployIfExists ? packages : await getPackagesThatAreNotDeployed({ packages });
|
|
2558
|
+
if (packagesToBeDeployed.length === 0) {
|
|
2559
|
+
return;
|
|
2560
|
+
}
|
|
2561
|
+
const codeBuildProjectName = await getBaseStackResource(
|
|
2562
|
+
"BASE_STACK_LAMBDA_LAYER_BUILDER_LOGICAL_NAME"
|
|
2563
|
+
);
|
|
2564
|
+
if (!codeBuildProjectName) {
|
|
2565
|
+
throw new Error(
|
|
2566
|
+
"Cannot deploy lambda-layer because AWS CodeBuild project doesn't exist."
|
|
2567
|
+
);
|
|
2568
|
+
}
|
|
2569
|
+
const deployLambdaLayerSinglePackage = async (packageName) => {
|
|
2570
|
+
try {
|
|
2571
|
+
const { bucket, key } = await createLambdaLayerZipFile({
|
|
2572
|
+
codeBuildProjectName,
|
|
2573
|
+
packageName
|
|
2574
|
+
});
|
|
2575
|
+
const lambdaLayerTemplate = getLambdaLayerTemplate({
|
|
2576
|
+
packageName,
|
|
2577
|
+
bucket,
|
|
2578
|
+
key
|
|
2579
|
+
});
|
|
2580
|
+
await deploy({
|
|
2581
|
+
template: lambdaLayerTemplate,
|
|
2582
|
+
terminationProtection: true,
|
|
2583
|
+
params: { StackName: getPackageLambdaLayerStackName(packageName) }
|
|
2584
|
+
});
|
|
2585
|
+
} catch (error) {
|
|
2586
|
+
handleDeployError({ error, logPrefix: logPrefix6 });
|
|
2587
|
+
}
|
|
2588
|
+
};
|
|
2589
|
+
await Promise.all(
|
|
2590
|
+
packagesToBeDeployed.map((packageName) => {
|
|
2591
|
+
return deployLambdaLayerSinglePackage(packageName);
|
|
2592
|
+
})
|
|
2593
|
+
);
|
|
2594
|
+
} catch (error) {
|
|
2595
|
+
handleDeployError({ error, logPrefix: logPrefix6 });
|
|
2596
|
+
}
|
|
2597
|
+
};
|
|
2598
|
+
var logPrefix7 = "lambda";
|
|
2599
|
+
var deployLambdaLayers = async ({
|
|
2600
|
+
lambdaExternal = []
|
|
2601
|
+
}) => {
|
|
2602
|
+
if (lambdaExternal.length === 0) {
|
|
2603
|
+
return;
|
|
2604
|
+
}
|
|
2605
|
+
log5.info(
|
|
2606
|
+
logPrefix7,
|
|
2607
|
+
`--lambda-externals [${lambdaExternal.join(
|
|
2608
|
+
", "
|
|
2609
|
+
)}] was found. Creating other layers...`
|
|
2610
|
+
);
|
|
2611
|
+
const { dependencies = {} } = (() => {
|
|
2612
|
+
try {
|
|
2613
|
+
return JSON.parse(
|
|
2614
|
+
fs3.readFileSync(path.resolve(process.cwd(), "package.json"), "utf8")
|
|
2615
|
+
);
|
|
2616
|
+
} catch (err) {
|
|
2617
|
+
log5.error(
|
|
2618
|
+
logPrefix7,
|
|
2619
|
+
"Cannot read package.json. Error message: %j",
|
|
2620
|
+
err.message
|
|
2621
|
+
);
|
|
2622
|
+
return {};
|
|
2623
|
+
}
|
|
2624
|
+
})();
|
|
2625
|
+
const packages = lambdaExternal.map((external) => {
|
|
2626
|
+
try {
|
|
2627
|
+
const semver2 = dependencies[external].replace(/(~|\^)/g, "");
|
|
2628
|
+
return `${external}@${semver2}`;
|
|
2629
|
+
} catch {
|
|
2630
|
+
throw new Error(`Cannot find ${external} on package.json dependencies.`);
|
|
2631
|
+
}
|
|
2632
|
+
});
|
|
2633
|
+
await deployLambdaLayer({ packages, deployIfExists: false });
|
|
2634
|
+
};
|
|
2635
|
+
new AWS.CodeBuild({ region: AWS_DEFAULT_REGION });
|
|
2636
|
+
var uploadCodeToECR = async ({
|
|
2637
|
+
bucket,
|
|
2638
|
+
key,
|
|
2639
|
+
lambdaExternal,
|
|
2640
|
+
lambdaDockerfile
|
|
2641
|
+
}) => {
|
|
2642
|
+
{
|
|
2643
|
+
throw new Error("uploadCodeToECR not finished yet.");
|
|
2644
|
+
}
|
|
2645
|
+
};
|
|
2646
|
+
var logPrefix8 = "lambda";
|
|
2647
|
+
var zipFileName = "lambda.zip";
|
|
2648
|
+
var uploadCodeToS3 = async ({
|
|
2649
|
+
stackName,
|
|
2650
|
+
lambdaOutdir
|
|
2651
|
+
}) => {
|
|
2652
|
+
log5.info(logPrefix8, `Uploading code to S3...`);
|
|
2653
|
+
const zip = new AdmZip();
|
|
2654
|
+
const zipFile = `${lambdaOutdir}/${zipFileName}`;
|
|
2655
|
+
if (fs4__default.existsSync(zipFile)) {
|
|
2656
|
+
await fs4__default.promises.rm(zipFile);
|
|
2657
|
+
}
|
|
2658
|
+
zip.addLocalFolder(lambdaOutdir);
|
|
2659
|
+
zip.writeZip(`${lambdaOutdir}/${zipFileName}`);
|
|
2660
|
+
const bucketName = await getBaseStackResource(
|
|
2661
|
+
"BASE_STACK_BUCKET_LOGICAL_NAME"
|
|
2662
|
+
);
|
|
2663
|
+
return uploadFileToS3({
|
|
2664
|
+
bucket: bucketName,
|
|
2665
|
+
contentType: "application/zip",
|
|
2666
|
+
key: `lambdas/${stackName}/${zipFileName}`,
|
|
2667
|
+
file: zip.toBuffer()
|
|
2668
|
+
});
|
|
2669
|
+
};
|
|
2670
|
+
var logPrefix9 = "lambda";
|
|
2671
|
+
var deployLambdaCode = async ({
|
|
2672
|
+
lambdaDockerfile,
|
|
2673
|
+
lambdaExternal = [],
|
|
2674
|
+
lambdaImage,
|
|
2675
|
+
lambdaEntryPoints,
|
|
2676
|
+
lambdaEntryPointsBaseDir = "src",
|
|
2677
|
+
lambdaFormat,
|
|
2678
|
+
lambdaOutdir = "dist",
|
|
2679
|
+
stackName
|
|
2680
|
+
}) => {
|
|
2681
|
+
if (!lambdaEntryPoints.length) {
|
|
2682
|
+
return {};
|
|
2683
|
+
}
|
|
2684
|
+
log5.info(logPrefix9, "Deploying Lambda code...");
|
|
2685
|
+
for (const entryPoint of lambdaEntryPoints) {
|
|
2686
|
+
const entryPointPath = path3__default.resolve(lambdaEntryPointsBaseDir, entryPoint);
|
|
2687
|
+
if (!fs4__default.existsSync(entryPointPath)) {
|
|
2688
|
+
throw new Error(`Entry point ${entryPointPath} does not exist.`);
|
|
2689
|
+
}
|
|
2690
|
+
}
|
|
2691
|
+
await buildLambdaCode({
|
|
2692
|
+
lambdaExternal,
|
|
2693
|
+
lambdaEntryPoints,
|
|
2694
|
+
lambdaEntryPointsBaseDir,
|
|
2695
|
+
lambdaFormat,
|
|
2696
|
+
lambdaOutdir
|
|
2697
|
+
});
|
|
2698
|
+
const { bucket, key, versionId } = await uploadCodeToS3({
|
|
2699
|
+
stackName,
|
|
2700
|
+
lambdaOutdir
|
|
2701
|
+
});
|
|
2702
|
+
if (!lambdaImage) {
|
|
2703
|
+
await deployLambdaLayers({ lambdaExternal });
|
|
2704
|
+
return { bucket, key, versionId };
|
|
2705
|
+
}
|
|
2706
|
+
const { imageUri } = await uploadCodeToECR({
|
|
2707
|
+
bucket,
|
|
2708
|
+
key,
|
|
2709
|
+
versionId,
|
|
2710
|
+
lambdaDockerfile,
|
|
2711
|
+
lambdaExternal
|
|
2712
|
+
});
|
|
2713
|
+
return { imageUri };
|
|
2714
|
+
};
|
|
2715
|
+
var getCicdStackName = () => {
|
|
2716
|
+
const project = getProjectName();
|
|
2717
|
+
return pascalCase([NAME, "Cicd", project].join(" "));
|
|
2718
|
+
};
|
|
2719
|
+
var logPrefix10 = "cicd";
|
|
2720
|
+
var getLambdaInput = (extension) => {
|
|
2721
|
+
return path.resolve(__dirname, `lambdas/index.${extension}`);
|
|
2722
|
+
};
|
|
2723
|
+
var deployCicdLambdas = async ({ stackName }) => {
|
|
2724
|
+
const lambdaInput = (() => {
|
|
2725
|
+
if (fs3.existsSync(getLambdaInput("js"))) {
|
|
2726
|
+
return getLambdaInput("js");
|
|
2727
|
+
}
|
|
2728
|
+
if (fs3.existsSync(getLambdaInput("ts"))) {
|
|
2729
|
+
return getLambdaInput("ts");
|
|
2730
|
+
}
|
|
2731
|
+
throw new Error("Cannot read CICD lambdas file.");
|
|
2732
|
+
})();
|
|
2733
|
+
const s32 = await deployLambdaCode({
|
|
2734
|
+
lambdaEntryPoints: [lambdaInput],
|
|
2735
|
+
lambdaExternal: [],
|
|
2736
|
+
/**
|
|
2737
|
+
* Needs stackName to define the S3 key.
|
|
2738
|
+
*/
|
|
2739
|
+
stackName
|
|
2740
|
+
});
|
|
2741
|
+
if (!s32 || !s32.bucket) {
|
|
2742
|
+
throw new Error(
|
|
2743
|
+
"Cannot retrieve bucket in which Lambda code was deployed."
|
|
2744
|
+
);
|
|
2745
|
+
}
|
|
2746
|
+
return s32;
|
|
2747
|
+
};
|
|
2748
|
+
var waitRepositoryImageUpdate = async ({
|
|
2749
|
+
stackName
|
|
2750
|
+
}) => {
|
|
2751
|
+
log5.info(logPrefix10, "Starting repository image update...");
|
|
2752
|
+
const { OutputValue: projectName } = await getStackOutput({
|
|
2753
|
+
stackName,
|
|
2754
|
+
outputKey: REPOSITORY_IMAGE_CODE_BUILD_PROJECT_LOGICAL_ID
|
|
2755
|
+
});
|
|
2756
|
+
if (!projectName) {
|
|
2757
|
+
throw new Error(`Cannot retrieve repository image CodeBuild project name.`);
|
|
2758
|
+
}
|
|
2759
|
+
const build = await startCodeBuildBuild({ projectName });
|
|
2760
|
+
if (build.id) {
|
|
2761
|
+
await waitCodeBuildFinish({ buildId: build.id, name: stackName });
|
|
2762
|
+
}
|
|
2763
|
+
};
|
|
2764
|
+
var deployCicd = async ({
|
|
2765
|
+
cpu,
|
|
2766
|
+
memory,
|
|
2767
|
+
pipelines: pipelines2,
|
|
2768
|
+
updateRepository,
|
|
2769
|
+
slackWebhookUrl,
|
|
2770
|
+
sshKey,
|
|
2771
|
+
sshUrl,
|
|
2772
|
+
taskEnvironment
|
|
2773
|
+
}) => {
|
|
2774
|
+
try {
|
|
2775
|
+
const { stackName } = await handleDeployInitialization({
|
|
2776
|
+
logPrefix: logPrefix10,
|
|
2777
|
+
stackName: getCicdStackName()
|
|
2778
|
+
});
|
|
2779
|
+
await deploy({
|
|
2780
|
+
template: getCicdTemplate({
|
|
2781
|
+
cpu,
|
|
2782
|
+
memory,
|
|
2783
|
+
pipelines: pipelines2,
|
|
2784
|
+
s3: await deployCicdLambdas({ stackName }),
|
|
2785
|
+
slackWebhookUrl,
|
|
2786
|
+
taskEnvironment
|
|
2787
|
+
}),
|
|
2788
|
+
params: {
|
|
2789
|
+
StackName: stackName,
|
|
2790
|
+
Parameters: [
|
|
2791
|
+
{ ParameterKey: "SSHUrl", ParameterValue: sshUrl },
|
|
2792
|
+
{ ParameterKey: "SSHKey", ParameterValue: sshKey }
|
|
2793
|
+
]
|
|
2794
|
+
},
|
|
2795
|
+
terminationProtection: true
|
|
2796
|
+
});
|
|
2797
|
+
if (updateRepository) {
|
|
2798
|
+
await waitRepositoryImageUpdate({ stackName });
|
|
2799
|
+
}
|
|
2800
|
+
} catch (error) {
|
|
2801
|
+
handleDeployError({ error, logPrefix: logPrefix10 });
|
|
2802
|
+
}
|
|
2803
|
+
};
|
|
2804
|
+
var readSSHKey = (dir) => {
|
|
2805
|
+
return fs3.readFileSync(dir, "utf-8");
|
|
2806
|
+
};
|
|
2807
|
+
var logPrefix11 = "deploy-cicd";
|
|
2808
|
+
var deployCicdCommand = {
|
|
2809
|
+
command: "cicd",
|
|
2810
|
+
describe: "Deploy CICD.",
|
|
2811
|
+
builder: (yargs3) => {
|
|
2812
|
+
return yargs3.options(addGroupToOptions(options, "Deploy CICD Options"));
|
|
2813
|
+
},
|
|
2814
|
+
handler: ({ destroy: destroy2, ...rest }) => {
|
|
2815
|
+
if (destroy2) {
|
|
2816
|
+
log5.info(logPrefix11, `${NAME} doesn't destroy CICD stack.`);
|
|
2817
|
+
} else {
|
|
2818
|
+
deployCicd({
|
|
2819
|
+
...rest,
|
|
2820
|
+
sshKey: readSSHKey(rest["ssh-key"])
|
|
2821
|
+
});
|
|
2822
|
+
}
|
|
2823
|
+
}
|
|
2824
|
+
};
|
|
2825
|
+
var cloudFormationTypes = [
|
|
2826
|
+
{
|
|
2827
|
+
tag: "!Equals",
|
|
2828
|
+
options: {
|
|
2829
|
+
kind: "sequence",
|
|
2830
|
+
construct: (data) => {
|
|
2831
|
+
return { "Fn::Equals": data };
|
|
2832
|
+
}
|
|
2833
|
+
}
|
|
2834
|
+
},
|
|
2835
|
+
{
|
|
2836
|
+
tag: "!FindInMap",
|
|
2837
|
+
options: {
|
|
2838
|
+
kind: "sequence",
|
|
2839
|
+
construct: (data) => {
|
|
2840
|
+
return { "Fn::FindInMap": data };
|
|
2841
|
+
}
|
|
2842
|
+
}
|
|
2843
|
+
},
|
|
2844
|
+
{
|
|
2845
|
+
tag: "!GetAtt",
|
|
2846
|
+
options: {
|
|
2847
|
+
kind: "scalar",
|
|
2848
|
+
construct: (data) => {
|
|
2849
|
+
return { "Fn::GetAtt": data.split(".") };
|
|
2850
|
+
}
|
|
2851
|
+
}
|
|
2852
|
+
},
|
|
2853
|
+
{
|
|
2854
|
+
tag: "!GetAtt",
|
|
2855
|
+
options: {
|
|
2856
|
+
kind: "sequence",
|
|
2857
|
+
construct: (data) => {
|
|
2858
|
+
return { "Fn::GetAtt": data };
|
|
2859
|
+
}
|
|
2860
|
+
}
|
|
2861
|
+
},
|
|
2862
|
+
{
|
|
2863
|
+
tag: "!If",
|
|
2864
|
+
options: {
|
|
2865
|
+
kind: "sequence",
|
|
2866
|
+
construct: (data) => {
|
|
2867
|
+
return { "Fn::If": data };
|
|
2868
|
+
}
|
|
2869
|
+
}
|
|
2870
|
+
},
|
|
2871
|
+
{
|
|
2872
|
+
tag: "!ImportValue",
|
|
2873
|
+
options: {
|
|
2874
|
+
kind: "scalar",
|
|
2875
|
+
construct: (data) => {
|
|
2876
|
+
return { "Fn::ImportValue": data };
|
|
2877
|
+
}
|
|
2878
|
+
}
|
|
2879
|
+
},
|
|
2880
|
+
{
|
|
2881
|
+
tag: "!Join",
|
|
2882
|
+
options: {
|
|
2883
|
+
kind: "sequence",
|
|
2884
|
+
construct: (data) => {
|
|
2885
|
+
return { "Fn::Join": data };
|
|
2886
|
+
}
|
|
2887
|
+
}
|
|
2888
|
+
},
|
|
2889
|
+
{
|
|
2890
|
+
tag: "!Not",
|
|
2891
|
+
options: {
|
|
2892
|
+
kind: "sequence",
|
|
2893
|
+
construct: (data) => {
|
|
2894
|
+
return { "Fn::Not": data };
|
|
2895
|
+
}
|
|
2896
|
+
}
|
|
2897
|
+
},
|
|
2898
|
+
{
|
|
2899
|
+
tag: "!Ref",
|
|
2900
|
+
options: {
|
|
2901
|
+
kind: "scalar",
|
|
2902
|
+
construct: (data) => {
|
|
2903
|
+
return { Ref: data };
|
|
2904
|
+
}
|
|
2905
|
+
}
|
|
2906
|
+
},
|
|
2907
|
+
{
|
|
2908
|
+
tag: "!Sub",
|
|
2909
|
+
options: {
|
|
2910
|
+
kind: "scalar",
|
|
2911
|
+
construct: (data) => {
|
|
2912
|
+
return { "Fn::Sub": data };
|
|
2913
|
+
}
|
|
2914
|
+
}
|
|
2915
|
+
},
|
|
2916
|
+
{
|
|
2917
|
+
tag: "!Sub",
|
|
2918
|
+
options: {
|
|
2919
|
+
kind: "sequence",
|
|
2920
|
+
construct: (data) => {
|
|
2921
|
+
return { "Fn::Sub": data };
|
|
2922
|
+
}
|
|
2923
|
+
}
|
|
2924
|
+
}
|
|
2925
|
+
];
|
|
2926
|
+
var getYamlTypes = (tagAndTypeArr) => {
|
|
2927
|
+
return tagAndTypeArr.map(({ tag, options: options9 }) => {
|
|
2928
|
+
return new yaml4.Type(tag, options9);
|
|
2929
|
+
});
|
|
2930
|
+
};
|
|
2931
|
+
var getSchema = (tagAndTypeArr = []) => {
|
|
2932
|
+
return yaml4.DEFAULT_SCHEMA.extend(
|
|
2933
|
+
getYamlTypes([...tagAndTypeArr, ...cloudFormationTypes])
|
|
2934
|
+
);
|
|
2935
|
+
};
|
|
2936
|
+
var loadCloudFormationTemplate = (template, tagAndTypeArr = []) => {
|
|
2937
|
+
return yaml4.load(template, { schema: getSchema(tagAndTypeArr) });
|
|
2938
|
+
};
|
|
2939
|
+
|
|
2940
|
+
// ../cloudformation/src/readCloudFormationYamlTemplate.ts
|
|
2941
|
+
var getTypes = () => {
|
|
2942
|
+
return [
|
|
2943
|
+
{
|
|
2944
|
+
tag: `!SubString`,
|
|
2945
|
+
options: {
|
|
2946
|
+
kind: "scalar",
|
|
2947
|
+
construct: (filePath) => {
|
|
2948
|
+
return fs3.readFileSync(path.resolve(process.cwd(), filePath)).toString();
|
|
2949
|
+
}
|
|
2950
|
+
}
|
|
2951
|
+
}
|
|
2952
|
+
];
|
|
2953
|
+
};
|
|
2954
|
+
var readCloudFormationYamlTemplate = ({
|
|
2955
|
+
templatePath
|
|
2956
|
+
}) => {
|
|
2957
|
+
const template = fs3.readFileSync(templatePath).toString();
|
|
2958
|
+
const parsed = loadCloudFormationTemplate(template, getTypes());
|
|
2959
|
+
if (!parsed || typeof parsed === "string") {
|
|
2960
|
+
throw new Error("Cannot parse CloudFormation template.");
|
|
2961
|
+
}
|
|
2962
|
+
return parsed;
|
|
2963
|
+
};
|
|
2964
|
+
var loadConfig = (entryPoint) => {
|
|
2965
|
+
const lastEntryPointName = entryPoint.split("/").pop();
|
|
2966
|
+
const filename = lastEntryPointName?.split(".")[0];
|
|
2967
|
+
const outfile = path3__default.resolve(process.cwd(), "out", filename + ".js");
|
|
2968
|
+
const result = esbuild.buildSync({
|
|
2969
|
+
bundle: true,
|
|
2970
|
+
entryPoints: [entryPoint],
|
|
2971
|
+
/**
|
|
2972
|
+
* ttoss packages cannot be market as external because it'd break the CI.
|
|
2973
|
+
* On CI, ttoss packages point to the TS main file, not the compiled
|
|
2974
|
+
* ones. See more details here https://github.com/ttoss/ttoss/issues/541.
|
|
2975
|
+
*/
|
|
2976
|
+
external: [],
|
|
2977
|
+
format: "cjs",
|
|
2978
|
+
outfile,
|
|
2979
|
+
platform: "node",
|
|
2980
|
+
target: "ES2021",
|
|
2981
|
+
treeShaking: true
|
|
2982
|
+
});
|
|
2983
|
+
if (result.errors.length > 0) {
|
|
2984
|
+
console.error("Error building config file: ", filename);
|
|
2985
|
+
throw result.errors;
|
|
2986
|
+
}
|
|
2987
|
+
try {
|
|
2988
|
+
const config = importSync(outfile);
|
|
2989
|
+
return config.default || config.config;
|
|
2990
|
+
} catch (error) {
|
|
2991
|
+
console.error("Failed importing build config file: ", filename);
|
|
2992
|
+
throw error;
|
|
2993
|
+
}
|
|
2994
|
+
};
|
|
2995
|
+
var readConfigFileSync = ({
|
|
2996
|
+
configFilePath,
|
|
2997
|
+
options: options9
|
|
2998
|
+
}) => {
|
|
2999
|
+
const extension = configFilePath.split(".").pop();
|
|
3000
|
+
if (extension === "yaml" || extension === "yml") {
|
|
3001
|
+
const file = fs3__default.readFileSync(configFilePath, "utf8");
|
|
3002
|
+
return yaml4.load(file);
|
|
3003
|
+
}
|
|
3004
|
+
if (extension === "json") {
|
|
3005
|
+
const file = fs3__default.readFileSync(configFilePath, "utf8");
|
|
3006
|
+
return JSON.parse(file);
|
|
3007
|
+
}
|
|
3008
|
+
if (extension === "js") {
|
|
3009
|
+
return __require(configFilePath);
|
|
3010
|
+
}
|
|
3011
|
+
if (extension === "ts") {
|
|
3012
|
+
let result = loadConfig(configFilePath);
|
|
3013
|
+
if (typeof result === "function") {
|
|
3014
|
+
result = result(options9);
|
|
3015
|
+
}
|
|
3016
|
+
return result;
|
|
3017
|
+
}
|
|
3018
|
+
throw new Error("Unsupported config file extension: " + extension);
|
|
3019
|
+
};
|
|
3020
|
+
var readConfigFile = async ({
|
|
3021
|
+
configFilePath,
|
|
3022
|
+
options: options9
|
|
3023
|
+
}) => {
|
|
3024
|
+
const extension = configFilePath.split(".").pop();
|
|
3025
|
+
if (extension === "ts") {
|
|
3026
|
+
let result = loadConfig(configFilePath);
|
|
3027
|
+
if (typeof result === "function") {
|
|
3028
|
+
result = result(options9);
|
|
3029
|
+
}
|
|
3030
|
+
result = await Promise.resolve(result);
|
|
3031
|
+
return result;
|
|
3032
|
+
}
|
|
3033
|
+
return readConfigFileSync({ configFilePath, options: options9 });
|
|
3034
|
+
};
|
|
3035
|
+
|
|
3036
|
+
// ../cloudformation/src/findAndReadCloudFormationTemplate.ts
|
|
3037
|
+
var defaultTemplatePaths2 = ["ts", "js", "yaml", "yml", "json"].map(
|
|
3038
|
+
(extension) => {
|
|
3039
|
+
return `./src/cloudformation.${extension}`;
|
|
3040
|
+
}
|
|
3041
|
+
);
|
|
3042
|
+
var findAndReadCloudFormationTemplate = async ({
|
|
3043
|
+
templatePath: defaultTemplatePath,
|
|
3044
|
+
options: options9 = {}
|
|
3045
|
+
}) => {
|
|
3046
|
+
const templatePath = defaultTemplatePath || defaultTemplatePaths2.reduce((acc, cur) => {
|
|
3047
|
+
if (acc) {
|
|
3048
|
+
return acc;
|
|
3049
|
+
}
|
|
3050
|
+
return fs4.existsSync(path3.resolve(process.cwd(), cur)) ? cur : acc;
|
|
3051
|
+
}, "");
|
|
3052
|
+
if (!templatePath) {
|
|
3053
|
+
throw new Error("Cannot find a CloudFormation template.");
|
|
3054
|
+
}
|
|
3055
|
+
const extension = templatePath?.split(".").pop();
|
|
3056
|
+
if (["yaml", "yml"].includes(extension)) {
|
|
3057
|
+
return readCloudFormationYamlTemplate({ templatePath });
|
|
3058
|
+
}
|
|
3059
|
+
const configFilePath = path3.resolve(process.cwd(), templatePath);
|
|
3060
|
+
return readConfigFile({ configFilePath, options: options9 });
|
|
3061
|
+
};
|
|
3062
|
+
|
|
3063
|
+
// src/deploy/lambda/getLambdaEntryPointsFromTemplate.ts
|
|
3064
|
+
var getLambdaEntryPointsFromTemplate = (template) => {
|
|
3065
|
+
const lambdaResources = Object.keys(template.Resources).filter((key) => {
|
|
3066
|
+
return ["AWS::Lambda::Function", "AWS::Serverless::Function"].includes(
|
|
3067
|
+
template.Resources[key].Type
|
|
3068
|
+
);
|
|
3069
|
+
});
|
|
3070
|
+
const handlers = lambdaResources.map((key) => {
|
|
3071
|
+
return template.Resources[key].Properties.Handler;
|
|
3072
|
+
});
|
|
3073
|
+
const handlersPaths = handlers.map((handler) => {
|
|
3074
|
+
return handler.split(".")[0] + ".ts";
|
|
3075
|
+
});
|
|
3076
|
+
return handlersPaths;
|
|
3077
|
+
};
|
|
3078
|
+
var logPrefix12 = "cloudformation";
|
|
3079
|
+
log5.addLevel("event", 1e4, { fg: "yellow" });
|
|
3080
|
+
log5.addLevel("output", 1e4, { fg: "blue" });
|
|
3081
|
+
var getCloudformationTemplateOptions = ({
|
|
3082
|
+
cliOptions,
|
|
3083
|
+
stackName
|
|
3084
|
+
}) => {
|
|
3085
|
+
const options9 = {
|
|
3086
|
+
...cliOptions,
|
|
3087
|
+
stackName,
|
|
3088
|
+
environment: getEnvironment(),
|
|
3089
|
+
packageName: getPackageName(),
|
|
3090
|
+
projectName: getProjectName()
|
|
3091
|
+
};
|
|
3092
|
+
return options9;
|
|
3093
|
+
};
|
|
3094
|
+
var deployCloudFormation = async (cliOptions) => {
|
|
3095
|
+
try {
|
|
3096
|
+
const {
|
|
3097
|
+
lambdaDockerfile,
|
|
3098
|
+
lambdaEntryPoints,
|
|
3099
|
+
lambdaEntryPointsBaseDir,
|
|
3100
|
+
lambdaImage,
|
|
3101
|
+
lambdaExternal,
|
|
3102
|
+
lambdaFormat,
|
|
3103
|
+
lambdaOutdir,
|
|
3104
|
+
parameters,
|
|
3105
|
+
template,
|
|
3106
|
+
templatePath
|
|
3107
|
+
} = cliOptions;
|
|
3108
|
+
const { stackName } = await handleDeployInitialization({ logPrefix: logPrefix12 });
|
|
3109
|
+
const cloudFormationTemplate = await (async () => {
|
|
3110
|
+
if (template) {
|
|
3111
|
+
return { ...template };
|
|
3112
|
+
}
|
|
3113
|
+
return findAndReadCloudFormationTemplate({
|
|
3114
|
+
templatePath,
|
|
3115
|
+
options: getCloudformationTemplateOptions({ stackName, cliOptions })
|
|
3116
|
+
});
|
|
3117
|
+
})();
|
|
3118
|
+
parameters?.forEach((parameter) => {
|
|
3119
|
+
if (cloudFormationTemplate.Parameters?.[parameter.key]) {
|
|
3120
|
+
return;
|
|
3121
|
+
}
|
|
3122
|
+
if (!cloudFormationTemplate.Parameters) {
|
|
3123
|
+
cloudFormationTemplate.Parameters = {};
|
|
3124
|
+
}
|
|
3125
|
+
const type = (() => {
|
|
3126
|
+
if (typeof parameter.value === "string") {
|
|
3127
|
+
return "String";
|
|
3128
|
+
}
|
|
3129
|
+
if (typeof parameter.value === "number") {
|
|
3130
|
+
return "Number";
|
|
3131
|
+
}
|
|
3132
|
+
throw new Error(
|
|
3133
|
+
`Parameter assertion failed. Parameter ${parameter.key} value ${parameter.value} is not mapped.`
|
|
3134
|
+
);
|
|
3135
|
+
})();
|
|
3136
|
+
cloudFormationTemplate.Parameters[parameter.key] = {
|
|
3137
|
+
Type: type
|
|
3138
|
+
};
|
|
3139
|
+
});
|
|
3140
|
+
await validateTemplate({ stackName, template: cloudFormationTemplate });
|
|
3141
|
+
const params = {
|
|
3142
|
+
StackName: stackName,
|
|
3143
|
+
Parameters: parameters?.map((parameter) => {
|
|
3144
|
+
return {
|
|
3145
|
+
ParameterKey: parameter.key,
|
|
3146
|
+
ParameterValue: parameter.value,
|
|
3147
|
+
UsePreviousValue: parameter.usePreviousValue,
|
|
3148
|
+
ResolvedValue: parameter.resolvedValue
|
|
3149
|
+
};
|
|
3150
|
+
}) || []
|
|
3151
|
+
};
|
|
3152
|
+
const deployCloudFormationDeployLambdaCode = async () => {
|
|
3153
|
+
const finalLambdaEntryPoints = (() => {
|
|
3154
|
+
if (lambdaEntryPoints && lambdaEntryPoints.length > 0) {
|
|
3155
|
+
return lambdaEntryPoints;
|
|
3156
|
+
}
|
|
3157
|
+
return getLambdaEntryPointsFromTemplate(cloudFormationTemplate);
|
|
3158
|
+
})();
|
|
3159
|
+
const response = await deployLambdaCode({
|
|
3160
|
+
lambdaDockerfile,
|
|
3161
|
+
lambdaExternal,
|
|
3162
|
+
lambdaEntryPoints: finalLambdaEntryPoints,
|
|
3163
|
+
lambdaEntryPointsBaseDir,
|
|
3164
|
+
lambdaFormat,
|
|
3165
|
+
lambdaImage,
|
|
3166
|
+
lambdaOutdir,
|
|
3167
|
+
stackName
|
|
3168
|
+
});
|
|
3169
|
+
if (response) {
|
|
3170
|
+
const { bucket, key, versionId, imageUri } = response;
|
|
3171
|
+
if (imageUri) {
|
|
3172
|
+
cloudFormationTemplate.Parameters = {
|
|
3173
|
+
LambdaImageUri: { Type: "String" },
|
|
3174
|
+
...cloudFormationTemplate.Parameters
|
|
3175
|
+
};
|
|
3176
|
+
params.Parameters.push({
|
|
3177
|
+
ParameterKey: "LambdaImageUri",
|
|
3178
|
+
ParameterValue: imageUri
|
|
3179
|
+
});
|
|
3180
|
+
} else if (bucket && key && versionId) {
|
|
3181
|
+
cloudFormationTemplate.Parameters = {
|
|
3182
|
+
LambdaS3Bucket: { Type: "String" },
|
|
3183
|
+
LambdaS3Key: { Type: "String" },
|
|
3184
|
+
LambdaS3ObjectVersion: { Type: "String" },
|
|
3185
|
+
...cloudFormationTemplate.Parameters
|
|
3186
|
+
};
|
|
3187
|
+
params.Parameters.push(
|
|
3188
|
+
{
|
|
3189
|
+
ParameterKey: "LambdaS3Bucket",
|
|
3190
|
+
ParameterValue: bucket
|
|
3191
|
+
},
|
|
3192
|
+
{
|
|
3193
|
+
ParameterKey: "LambdaS3Key",
|
|
3194
|
+
ParameterValue: key
|
|
3195
|
+
},
|
|
3196
|
+
/**
|
|
3197
|
+
* Used by CloudFormation AWS::Lambda::Function
|
|
3198
|
+
* @see {@link https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-lambda-function-code.html}
|
|
3199
|
+
* and by CloudFormation AWS::Serverless::Function
|
|
3200
|
+
* @see {@link https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/sam-property-function-functioncode.html}
|
|
3201
|
+
*/
|
|
3202
|
+
{
|
|
3203
|
+
ParameterKey: "LambdaS3ObjectVersion",
|
|
3204
|
+
ParameterValue: versionId
|
|
3205
|
+
}
|
|
3206
|
+
);
|
|
3207
|
+
Object.keys(cloudFormationTemplate.Resources).forEach((key2) => {
|
|
3208
|
+
const resource = cloudFormationTemplate.Resources[key2];
|
|
3209
|
+
if (resource.Type === "AWS::Lambda::Function") {
|
|
3210
|
+
if (!resource.Properties.Code) {
|
|
3211
|
+
resource.Properties.Code = {
|
|
3212
|
+
S3Bucket: { Ref: "LambdaS3Bucket" },
|
|
3213
|
+
S3Key: { Ref: "LambdaS3Key" },
|
|
3214
|
+
S3ObjectVersion: { Ref: "LambdaS3ObjectVersion" }
|
|
3215
|
+
};
|
|
3216
|
+
}
|
|
3217
|
+
}
|
|
3218
|
+
if (resource.Type === "AWS::Serverless::Function") {
|
|
3219
|
+
if (!resource.Properties.CodeUri) {
|
|
3220
|
+
resource.Properties.CodeUri = {
|
|
3221
|
+
Bucket: { Ref: "LambdaS3Bucket" },
|
|
3222
|
+
Key: { Ref: "LambdaS3Key" },
|
|
3223
|
+
Version: { Ref: "LambdaS3ObjectVersion" }
|
|
3224
|
+
};
|
|
3225
|
+
}
|
|
3226
|
+
}
|
|
3227
|
+
});
|
|
3228
|
+
}
|
|
3229
|
+
}
|
|
3230
|
+
};
|
|
3231
|
+
await deployCloudFormationDeployLambdaCode();
|
|
3232
|
+
const output = await deploy({
|
|
3233
|
+
params,
|
|
3234
|
+
template: cloudFormationTemplate
|
|
3235
|
+
});
|
|
3236
|
+
return output;
|
|
3237
|
+
} catch (error) {
|
|
3238
|
+
return handleDeployError({ error, logPrefix: logPrefix12 });
|
|
3239
|
+
}
|
|
3240
|
+
};
|
|
3241
|
+
var emptyStackBuckets = async ({ stackName }) => {
|
|
3242
|
+
const buckets = [];
|
|
3243
|
+
await (async ({ nextToken }) => {
|
|
3244
|
+
const {
|
|
3245
|
+
// NextToken,
|
|
3246
|
+
StackResourceSummaries
|
|
3247
|
+
} = await cloudFormationV2().listStackResources({ StackName: stackName, NextToken: nextToken }).promise();
|
|
3248
|
+
(StackResourceSummaries || []).forEach(
|
|
3249
|
+
({ ResourceType, PhysicalResourceId }) => {
|
|
3250
|
+
if (ResourceType === "AWS::S3::Bucket" && PhysicalResourceId) {
|
|
3251
|
+
buckets.push(PhysicalResourceId);
|
|
3252
|
+
}
|
|
3253
|
+
}
|
|
3254
|
+
);
|
|
3255
|
+
})({});
|
|
3256
|
+
return Promise.all(
|
|
3257
|
+
buckets.map((bucket) => {
|
|
3258
|
+
return emptyS3Directory({ bucket });
|
|
3259
|
+
})
|
|
3260
|
+
);
|
|
3261
|
+
};
|
|
3262
|
+
var destroy = async ({ stackName }) => {
|
|
3263
|
+
const environment = getEnvironment();
|
|
3264
|
+
if (environment) {
|
|
3265
|
+
log5.info(
|
|
3266
|
+
logPrefix12,
|
|
3267
|
+
`Cannot destroy stack when environment (${environment}) is defined.`
|
|
3268
|
+
);
|
|
3269
|
+
return;
|
|
3270
|
+
}
|
|
3271
|
+
if (!await doesStackExist({ stackName })) {
|
|
3272
|
+
log5.info(logPrefix12, `Stack ${stackName} doesn't exist.`);
|
|
3273
|
+
return;
|
|
3274
|
+
}
|
|
3275
|
+
if (!await canDestroyStack({ stackName })) {
|
|
3276
|
+
const message = `Stack ${stackName} cannot be destroyed while TerminationProtection is enabled.`;
|
|
3277
|
+
throw new Error(message);
|
|
3278
|
+
}
|
|
3279
|
+
await emptyStackBuckets({ stackName });
|
|
3280
|
+
await deleteStack({ stackName });
|
|
3281
|
+
};
|
|
3282
|
+
var destroyCloudFormation = async ({
|
|
3283
|
+
stackName: defaultStackName
|
|
3284
|
+
} = {}) => {
|
|
3285
|
+
try {
|
|
3286
|
+
log5.info(logPrefix12, "CAUTION! Starting CloudFormation destroy...");
|
|
3287
|
+
const stackName = defaultStackName || await getStackName();
|
|
3288
|
+
log5.info(logPrefix12, `stackName: ${stackName}`);
|
|
3289
|
+
await destroy({ stackName });
|
|
3290
|
+
} catch (error) {
|
|
3291
|
+
handleDeployError({ error, logPrefix: logPrefix12 });
|
|
3292
|
+
}
|
|
3293
|
+
};
|
|
3294
|
+
var logPrefix13 = "deploy-lambda-layer";
|
|
3295
|
+
var packageNameRegex = /@[~^]?([\dvx*]+(?:[-.](?:[\dx*]+|alpha|beta))*)/;
|
|
3296
|
+
var options2 = {
|
|
3297
|
+
packages: {
|
|
3298
|
+
array: true,
|
|
3299
|
+
describe: `NPM packages' names to be deployed as Lambda Layers. It must follow the format: ${packageNameRegex.toString()}.`,
|
|
3300
|
+
required: true,
|
|
3301
|
+
type: "string"
|
|
3302
|
+
}
|
|
3303
|
+
};
|
|
3304
|
+
var deployLambdaLayerCommand = {
|
|
3305
|
+
command: "lambda-layer",
|
|
3306
|
+
describe: "Deploy Lambda Layer.",
|
|
3307
|
+
builder: (yargs3) => {
|
|
3308
|
+
return yargs3.options(addGroupToOptions(options2, "Deploy Lambda Layer Options")).check(({ packages }) => {
|
|
3309
|
+
const invalidPackages = packages.map((packageName) => {
|
|
3310
|
+
return packageNameRegex.test(packageName) ? void 0 : packageName;
|
|
3311
|
+
}).filter((packageName) => {
|
|
3312
|
+
return !!packageName;
|
|
3313
|
+
});
|
|
3314
|
+
if (invalidPackages.length > 0) {
|
|
3315
|
+
throw new Error(
|
|
3316
|
+
`Some package names are invalid: ${invalidPackages.join(
|
|
3317
|
+
", "
|
|
3318
|
+
)}. The package must follow the pattern: ${packageNameRegex.toString()}.`
|
|
3319
|
+
);
|
|
3320
|
+
} else {
|
|
3321
|
+
return true;
|
|
3322
|
+
}
|
|
3323
|
+
});
|
|
3324
|
+
},
|
|
3325
|
+
handler: ({ destroy: destroy2, ...rest }) => {
|
|
3326
|
+
if (destroy2) {
|
|
3327
|
+
log5.info(logPrefix13, `${NAME} doesn't destroy lambda layers.`);
|
|
3328
|
+
} else {
|
|
3329
|
+
deployLambdaLayer(rest);
|
|
3330
|
+
}
|
|
3331
|
+
}
|
|
3332
|
+
};
|
|
3333
|
+
|
|
3334
|
+
// src/deploy/staticApp/findDefaultBuildFolder.ts
|
|
3335
|
+
var defaultBuildFolders = [
|
|
3336
|
+
/**
|
|
3337
|
+
* Create React App default build folder
|
|
3338
|
+
*/
|
|
3339
|
+
"build",
|
|
3340
|
+
/**
|
|
3341
|
+
* Next.js default output folder
|
|
3342
|
+
*/
|
|
3343
|
+
"out",
|
|
3344
|
+
/**
|
|
3345
|
+
* Storybook default output folder
|
|
3346
|
+
*/
|
|
3347
|
+
"storybook-static",
|
|
3348
|
+
/**
|
|
3349
|
+
* Vite.js default build folder
|
|
3350
|
+
*/
|
|
3351
|
+
"dist"
|
|
3352
|
+
];
|
|
3353
|
+
var findDefaultBuildFolder = async () => {
|
|
3354
|
+
const validFolders = await Promise.all(
|
|
3355
|
+
defaultBuildFolders.map(async (directory) => {
|
|
3356
|
+
const allFiles = await getAllFilesInsideADirectory({
|
|
3357
|
+
directory
|
|
3358
|
+
});
|
|
3359
|
+
return { directory, isValid: allFiles.length !== 0 };
|
|
3360
|
+
})
|
|
3361
|
+
);
|
|
3362
|
+
const validFolder = validFolders.reduce((acc, cur) => {
|
|
3363
|
+
if (cur.isValid) {
|
|
3364
|
+
return cur.directory;
|
|
3365
|
+
}
|
|
3366
|
+
return acc;
|
|
3367
|
+
}, "");
|
|
3368
|
+
return validFolder;
|
|
3369
|
+
};
|
|
3370
|
+
|
|
3371
|
+
// src/deploy/staticApp/getStaticAppBucket.ts
|
|
3372
|
+
var STATIC_APP_BUCKET_LOGICAL_ID = "StaticBucket";
|
|
3373
|
+
var getStaticAppBucket = async ({
|
|
3374
|
+
stackName
|
|
3375
|
+
}) => {
|
|
3376
|
+
const params = {
|
|
3377
|
+
LogicalResourceId: STATIC_APP_BUCKET_LOGICAL_ID,
|
|
3378
|
+
StackName: stackName
|
|
3379
|
+
};
|
|
3380
|
+
try {
|
|
3381
|
+
const { StackResourceDetail } = await describeStackResource(params);
|
|
3382
|
+
return StackResourceDetail?.PhysicalResourceId;
|
|
3383
|
+
} catch (error) {
|
|
3384
|
+
return void 0;
|
|
3385
|
+
}
|
|
3386
|
+
};
|
|
3387
|
+
|
|
3388
|
+
// src/deploy/staticApp/staticApp.template.ts
|
|
3389
|
+
var PACKAGE_VERSION = getPackageVersion();
|
|
3390
|
+
var STATIC_APP_BUCKET_LOGICAL_ID2 = "StaticBucket";
|
|
3391
|
+
var CLOUDFRONT_DISTRIBUTION_LOGICAL_ID = "CloudFrontDistribution";
|
|
3392
|
+
var CLOUDFRONT_ORIGIN_ACCESS_CONTROL_LOGICAL_ID = "OriginAccessControl";
|
|
3393
|
+
var ROUTE_53_RECORD_SET_GROUP_LOGICAL_ID = "Route53RecordSetGroup";
|
|
3394
|
+
var ERROR_DOCUMENT = "404/index.html";
|
|
3395
|
+
var CACHE_POLICY_ID = "4135ea2d-6df8-44a3-9df3-4b5a84be39ad";
|
|
3396
|
+
var ORIGIN_REQUEST_POLICY_ID = "88a5eaf4-2fd4-4709-b370-b4c650ea3fcf";
|
|
3397
|
+
var ORIGIN_RESPONSE_POLICY_ID = "eaab4381-ed33-4a86-88ca-d9558dc6cd63";
|
|
3398
|
+
var getBucketStaticWebsiteTemplate = ({
|
|
3399
|
+
spa
|
|
3400
|
+
}) => {
|
|
3401
|
+
return {
|
|
3402
|
+
AWSTemplateFormatVersion: "2010-09-09",
|
|
3403
|
+
Resources: {
|
|
3404
|
+
[STATIC_APP_BUCKET_LOGICAL_ID2]: {
|
|
3405
|
+
Type: "AWS::S3::Bucket",
|
|
3406
|
+
Properties: {
|
|
3407
|
+
CorsConfiguration: {
|
|
3408
|
+
CorsRules: [
|
|
3409
|
+
{
|
|
3410
|
+
AllowedHeaders: ["*"],
|
|
3411
|
+
AllowedMethods: ["GET"],
|
|
3412
|
+
AllowedOrigins: ["*"],
|
|
3413
|
+
Id: "OpenCors",
|
|
3414
|
+
MaxAge: 600
|
|
3415
|
+
}
|
|
3416
|
+
]
|
|
3417
|
+
},
|
|
3418
|
+
PublicAccessBlockConfiguration: {
|
|
3419
|
+
BlockPublicPolicy: false
|
|
3420
|
+
},
|
|
3421
|
+
WebsiteConfiguration: {
|
|
3422
|
+
IndexDocument: `index.html`,
|
|
3423
|
+
ErrorDocument: spa ? "index.html" : ERROR_DOCUMENT
|
|
3424
|
+
}
|
|
3425
|
+
}
|
|
3426
|
+
},
|
|
3427
|
+
[`${STATIC_APP_BUCKET_LOGICAL_ID2}S3BucketPolicy`]: {
|
|
3428
|
+
Type: "AWS::S3::BucketPolicy",
|
|
3429
|
+
Properties: {
|
|
3430
|
+
Bucket: { Ref: STATIC_APP_BUCKET_LOGICAL_ID2 },
|
|
3431
|
+
PolicyDocument: {
|
|
3432
|
+
Statement: [
|
|
3433
|
+
{
|
|
3434
|
+
Action: ["s3:GetObject"],
|
|
3435
|
+
Effect: "Allow",
|
|
3436
|
+
Principal: "*",
|
|
3437
|
+
Resource: {
|
|
3438
|
+
"Fn::Join": [
|
|
3439
|
+
"",
|
|
3440
|
+
[
|
|
3441
|
+
"arn:aws:s3:::",
|
|
3442
|
+
{ Ref: STATIC_APP_BUCKET_LOGICAL_ID2 },
|
|
3443
|
+
"/*"
|
|
3444
|
+
]
|
|
3445
|
+
]
|
|
3446
|
+
}
|
|
3447
|
+
}
|
|
3448
|
+
]
|
|
3449
|
+
}
|
|
3450
|
+
}
|
|
3451
|
+
}
|
|
3452
|
+
},
|
|
3453
|
+
Outputs: {
|
|
3454
|
+
BucketWebsiteURL: {
|
|
3455
|
+
Description: "Bucket static app website URL",
|
|
3456
|
+
Value: {
|
|
3457
|
+
"Fn::GetAtt": [STATIC_APP_BUCKET_LOGICAL_ID2, "WebsiteURL"]
|
|
3458
|
+
}
|
|
3459
|
+
}
|
|
3460
|
+
}
|
|
3461
|
+
};
|
|
3462
|
+
};
|
|
3463
|
+
var getCloudFrontTemplate = ({
|
|
3464
|
+
acm,
|
|
3465
|
+
aliases = [],
|
|
3466
|
+
spa,
|
|
3467
|
+
hostedZoneName
|
|
3468
|
+
}) => {
|
|
3469
|
+
const template = {
|
|
3470
|
+
AWSTemplateFormatVersion: "2010-09-09",
|
|
3471
|
+
Resources: {
|
|
3472
|
+
[STATIC_APP_BUCKET_LOGICAL_ID2]: {
|
|
3473
|
+
Type: "AWS::S3::Bucket",
|
|
3474
|
+
Properties: {
|
|
3475
|
+
PublicAccessBlockConfiguration: {
|
|
3476
|
+
BlockPublicPolicy: false
|
|
3477
|
+
}
|
|
3478
|
+
}
|
|
3479
|
+
},
|
|
3480
|
+
[`${STATIC_APP_BUCKET_LOGICAL_ID2}S3BucketPolicy`]: {
|
|
3481
|
+
Type: "AWS::S3::BucketPolicy",
|
|
3482
|
+
Properties: {
|
|
3483
|
+
Bucket: { Ref: STATIC_APP_BUCKET_LOGICAL_ID2 },
|
|
3484
|
+
PolicyDocument: {
|
|
3485
|
+
Statement: [
|
|
3486
|
+
/**
|
|
3487
|
+
* https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/private-content-restricting-access-to-s3.html
|
|
3488
|
+
*/
|
|
3489
|
+
{
|
|
3490
|
+
Sid: "AllowCloudFrontServicePrincipalReadOnly",
|
|
3491
|
+
Effect: "Allow",
|
|
3492
|
+
Principal: {
|
|
3493
|
+
Service: "cloudfront.amazonaws.com"
|
|
3494
|
+
},
|
|
3495
|
+
Action: "s3:GetObject",
|
|
3496
|
+
Resource: {
|
|
3497
|
+
"Fn::Join": [
|
|
3498
|
+
"",
|
|
3499
|
+
[
|
|
3500
|
+
"arn:aws:s3:::",
|
|
3501
|
+
{ Ref: STATIC_APP_BUCKET_LOGICAL_ID2 },
|
|
3502
|
+
"/*"
|
|
3503
|
+
]
|
|
3504
|
+
]
|
|
3505
|
+
},
|
|
3506
|
+
Condition: {
|
|
3507
|
+
StringEquals: {
|
|
3508
|
+
"AWS:SourceArn": (
|
|
3509
|
+
// 'arn:aws:cloudfront::<AWS account ID>:distribution/<CloudFront distribution ID>',
|
|
3510
|
+
{
|
|
3511
|
+
"Fn::Join": [
|
|
3512
|
+
"",
|
|
3513
|
+
[
|
|
3514
|
+
"arn:aws:cloudfront::",
|
|
3515
|
+
{ Ref: "AWS::AccountId" },
|
|
3516
|
+
":distribution/",
|
|
3517
|
+
{ Ref: CLOUDFRONT_DISTRIBUTION_LOGICAL_ID }
|
|
3518
|
+
]
|
|
3519
|
+
]
|
|
3520
|
+
}
|
|
3521
|
+
)
|
|
3522
|
+
}
|
|
3523
|
+
}
|
|
3524
|
+
}
|
|
3525
|
+
]
|
|
3526
|
+
}
|
|
3527
|
+
}
|
|
3528
|
+
}
|
|
3529
|
+
}
|
|
3530
|
+
};
|
|
3531
|
+
const cloudFrontResources = {
|
|
3532
|
+
[CLOUDFRONT_DISTRIBUTION_LOGICAL_ID]: {
|
|
3533
|
+
Type: "AWS::CloudFront::Distribution",
|
|
3534
|
+
Properties: {
|
|
3535
|
+
DistributionConfig: {
|
|
3536
|
+
Comment: {
|
|
3537
|
+
"Fn::Sub": [
|
|
3538
|
+
"CloudFront Distribution for ${Project} project.",
|
|
3539
|
+
{ Project: { Ref: "Project" } }
|
|
3540
|
+
]
|
|
3541
|
+
},
|
|
3542
|
+
CustomErrorResponses: [403, 404].map((errorCode) => {
|
|
3543
|
+
if (spa) {
|
|
3544
|
+
return {
|
|
3545
|
+
ErrorCachingMinTTL: 60 * 60 * 24,
|
|
3546
|
+
ErrorCode: errorCode,
|
|
3547
|
+
ResponseCode: 200,
|
|
3548
|
+
ResponsePagePath: "/index.html"
|
|
3549
|
+
};
|
|
3550
|
+
}
|
|
3551
|
+
return {
|
|
3552
|
+
ErrorCachingMinTTL: 0,
|
|
3553
|
+
ErrorCode: errorCode,
|
|
3554
|
+
ResponseCode: 404,
|
|
3555
|
+
ResponsePagePath: "/" + ERROR_DOCUMENT
|
|
3556
|
+
};
|
|
3557
|
+
}),
|
|
3558
|
+
DefaultCacheBehavior: {
|
|
3559
|
+
AllowedMethods: ["GET", "HEAD", "OPTIONS"],
|
|
3560
|
+
Compress: true,
|
|
3561
|
+
CachedMethods: ["GET", "HEAD", "OPTIONS"],
|
|
3562
|
+
/**
|
|
3563
|
+
* Caching OPTIONS. Related to OriginRequestPolicyId property.
|
|
3564
|
+
* https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/header-caching.html#header-caching-web-cors
|
|
3565
|
+
*/
|
|
3566
|
+
OriginRequestPolicyId: ORIGIN_REQUEST_POLICY_ID,
|
|
3567
|
+
/**
|
|
3568
|
+
* CachePolicyId property:
|
|
3569
|
+
* https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudfront-distribution-defaultcachebehavior.html#cfn-cloudfront-distribution-defaultcachebehavior-cachepolicyid
|
|
3570
|
+
*/
|
|
3571
|
+
CachePolicyId: CACHE_POLICY_ID,
|
|
3572
|
+
ResponseHeadersPolicyId: ORIGIN_RESPONSE_POLICY_ID,
|
|
3573
|
+
TargetOriginId: { Ref: STATIC_APP_BUCKET_LOGICAL_ID2 },
|
|
3574
|
+
ViewerProtocolPolicy: "redirect-to-https"
|
|
3575
|
+
},
|
|
3576
|
+
DefaultRootObject: "index.html",
|
|
3577
|
+
Enabled: true,
|
|
3578
|
+
HttpVersion: "http2",
|
|
3579
|
+
Origins: [
|
|
3580
|
+
{
|
|
3581
|
+
DomainName: {
|
|
3582
|
+
"Fn::GetAtt": [STATIC_APP_BUCKET_LOGICAL_ID2, "DomainName"]
|
|
3583
|
+
},
|
|
3584
|
+
Id: { Ref: STATIC_APP_BUCKET_LOGICAL_ID2 },
|
|
3585
|
+
OriginAccessControlId: {
|
|
3586
|
+
"Fn::GetAtt": [
|
|
3587
|
+
CLOUDFRONT_ORIGIN_ACCESS_CONTROL_LOGICAL_ID,
|
|
3588
|
+
"Id"
|
|
3589
|
+
]
|
|
3590
|
+
},
|
|
3591
|
+
/**
|
|
3592
|
+
* Note: As of September 2022, an empty OriginAccessIdentity must be specified in S3OriginConfig.
|
|
3593
|
+
*/
|
|
3594
|
+
S3OriginConfig: {
|
|
3595
|
+
OriginAccessIdentity: ""
|
|
3596
|
+
}
|
|
3597
|
+
}
|
|
3598
|
+
]
|
|
3599
|
+
}
|
|
3600
|
+
}
|
|
3601
|
+
},
|
|
3602
|
+
[CLOUDFRONT_ORIGIN_ACCESS_CONTROL_LOGICAL_ID]: {
|
|
3603
|
+
Type: "AWS::CloudFront::OriginAccessControl",
|
|
3604
|
+
Properties: {
|
|
3605
|
+
OriginAccessControlConfig: {
|
|
3606
|
+
Description: {
|
|
3607
|
+
"Fn::Sub": [
|
|
3608
|
+
"Default Origin Access Control for ${Project} project.",
|
|
3609
|
+
{ Project: { Ref: "Project" } }
|
|
3610
|
+
]
|
|
3611
|
+
},
|
|
3612
|
+
Name: {
|
|
3613
|
+
Ref: "AWS::StackName"
|
|
3614
|
+
},
|
|
3615
|
+
OriginAccessControlOriginType: "s3",
|
|
3616
|
+
SigningBehavior: "always",
|
|
3617
|
+
SigningProtocol: "sigv4"
|
|
3618
|
+
}
|
|
3619
|
+
}
|
|
3620
|
+
}
|
|
3621
|
+
};
|
|
3622
|
+
if (acm) {
|
|
3623
|
+
const acmRegex = /^arn:aws:acm:[-a-z0-9]+:\d{12}:certificate\/[-a-z0-9]+$/;
|
|
3624
|
+
const acmCertificateArn = acmRegex.test(acm) ? acm : {
|
|
3625
|
+
"Fn::ImportValue": acm
|
|
3626
|
+
};
|
|
3627
|
+
cloudFrontResources.CloudFrontDistribution.Properties.DistributionConfig = {
|
|
3628
|
+
...cloudFrontResources.CloudFrontDistribution.Properties.DistributionConfig,
|
|
3629
|
+
Aliases: aliases || { Ref: "AWS::NoValue" },
|
|
3630
|
+
ViewerCertificate: {
|
|
3631
|
+
AcmCertificateArn: acmCertificateArn,
|
|
3632
|
+
/**
|
|
3633
|
+
* AWS CloudFront recommendation.
|
|
3634
|
+
*/
|
|
3635
|
+
MinimumProtocolVersion: "TLSv1.2_2021",
|
|
3636
|
+
SslSupportMethod: "sni-only"
|
|
3637
|
+
}
|
|
3638
|
+
};
|
|
3639
|
+
}
|
|
3640
|
+
if (hostedZoneName && aliases) {
|
|
3641
|
+
const recordSets = aliases.map((alias) => {
|
|
3642
|
+
if (alias === hostedZoneName) {
|
|
3643
|
+
return {
|
|
3644
|
+
AliasTarget: {
|
|
3645
|
+
DNSName: {
|
|
3646
|
+
"Fn::GetAtt": `${CLOUDFRONT_DISTRIBUTION_LOGICAL_ID}.DomainName`
|
|
3647
|
+
},
|
|
3648
|
+
/**
|
|
3649
|
+
* https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-route53-aliastarget.html#cfn-route53-aliastarget-hostedzoneid
|
|
3650
|
+
*/
|
|
3651
|
+
HostedZoneId: "Z2FDTNDATAQYW2"
|
|
3652
|
+
},
|
|
3653
|
+
Name: alias,
|
|
3654
|
+
Type: "A"
|
|
3655
|
+
};
|
|
3656
|
+
}
|
|
3657
|
+
return {
|
|
3658
|
+
Name: alias,
|
|
3659
|
+
ResourceRecords: [
|
|
3660
|
+
{
|
|
3661
|
+
"Fn::GetAtt": `${CLOUDFRONT_DISTRIBUTION_LOGICAL_ID}.DomainName`
|
|
3662
|
+
}
|
|
3663
|
+
],
|
|
3664
|
+
TTL: 60,
|
|
3665
|
+
Type: "CNAME"
|
|
3666
|
+
};
|
|
3667
|
+
});
|
|
3668
|
+
const route53RecordSetGroupResources = {
|
|
3669
|
+
[ROUTE_53_RECORD_SET_GROUP_LOGICAL_ID]: {
|
|
3670
|
+
Type: "AWS::Route53::RecordSetGroup",
|
|
3671
|
+
DependsOn: [CLOUDFRONT_DISTRIBUTION_LOGICAL_ID],
|
|
3672
|
+
Properties: {
|
|
3673
|
+
// https://forums.aws.amazon.com/thread.jspa?threadID=103919
|
|
3674
|
+
HostedZoneName: `${hostedZoneName}${hostedZoneName.endsWith(".") ? "" : "."}`,
|
|
3675
|
+
RecordSets: recordSets
|
|
3676
|
+
}
|
|
3677
|
+
}
|
|
3678
|
+
};
|
|
3679
|
+
template.Resources = {
|
|
3680
|
+
...template.Resources,
|
|
3681
|
+
...route53RecordSetGroupResources
|
|
3682
|
+
};
|
|
3683
|
+
}
|
|
3684
|
+
template.Resources = { ...template.Resources, ...cloudFrontResources };
|
|
3685
|
+
const aliasesOutput = (aliases || []).reduce(
|
|
3686
|
+
(acc, alias, index) => {
|
|
3687
|
+
return {
|
|
3688
|
+
...acc,
|
|
3689
|
+
[`Alias${index}URL`]: {
|
|
3690
|
+
Value: `https://${alias}`
|
|
3691
|
+
}
|
|
3692
|
+
};
|
|
3693
|
+
},
|
|
3694
|
+
{}
|
|
3695
|
+
);
|
|
3696
|
+
template.Outputs = {
|
|
3697
|
+
...template.Outputs,
|
|
3698
|
+
...aliasesOutput,
|
|
3699
|
+
CloudFrontURL: {
|
|
3700
|
+
Value: {
|
|
3701
|
+
"Fn::Join": [
|
|
3702
|
+
"",
|
|
3703
|
+
[
|
|
3704
|
+
"https://",
|
|
3705
|
+
{
|
|
3706
|
+
"Fn::GetAtt": `${CLOUDFRONT_DISTRIBUTION_LOGICAL_ID}.DomainName`
|
|
3707
|
+
}
|
|
3708
|
+
]
|
|
3709
|
+
]
|
|
3710
|
+
}
|
|
3711
|
+
},
|
|
3712
|
+
CloudFrontDistributionId: {
|
|
3713
|
+
Value: {
|
|
3714
|
+
Ref: CLOUDFRONT_DISTRIBUTION_LOGICAL_ID
|
|
3715
|
+
}
|
|
3716
|
+
},
|
|
3717
|
+
CurrentVersion: {
|
|
3718
|
+
Value: PACKAGE_VERSION
|
|
3719
|
+
}
|
|
3720
|
+
};
|
|
3721
|
+
return template;
|
|
3722
|
+
};
|
|
3723
|
+
var getStaticAppTemplate = ({
|
|
3724
|
+
acm,
|
|
3725
|
+
aliases,
|
|
3726
|
+
cloudfront,
|
|
3727
|
+
spa,
|
|
3728
|
+
hostedZoneName,
|
|
3729
|
+
region
|
|
3730
|
+
}) => {
|
|
3731
|
+
if (cloudfront) {
|
|
3732
|
+
return getCloudFrontTemplate({
|
|
3733
|
+
acm,
|
|
3734
|
+
aliases,
|
|
3735
|
+
cloudfront,
|
|
3736
|
+
spa,
|
|
3737
|
+
hostedZoneName,
|
|
3738
|
+
region
|
|
3739
|
+
});
|
|
3740
|
+
}
|
|
3741
|
+
return getBucketStaticWebsiteTemplate({ spa });
|
|
3742
|
+
};
|
|
3743
|
+
var CLOUDFRONT_DISTRIBUTION_ID = "CloudFrontDistributionId";
|
|
3744
|
+
var logPrefix14 = "static-app";
|
|
3745
|
+
var invalidateCloudFront = async ({
|
|
3746
|
+
outputs
|
|
3747
|
+
}) => {
|
|
3748
|
+
log5.info(logPrefix14, "Invalidating CloudFront...");
|
|
3749
|
+
if (!outputs) {
|
|
3750
|
+
log5.info(logPrefix14, "Invalidation: outputs do not exist.");
|
|
3751
|
+
return;
|
|
3752
|
+
}
|
|
3753
|
+
const cloudFrontDistributionIDOutput = outputs.find((output) => {
|
|
3754
|
+
return output.OutputKey === CLOUDFRONT_DISTRIBUTION_ID;
|
|
3755
|
+
});
|
|
3756
|
+
if (cloudFrontDistributionIDOutput?.OutputValue) {
|
|
3757
|
+
const distributionId = cloudFrontDistributionIDOutput.OutputValue;
|
|
3758
|
+
const params = {
|
|
3759
|
+
DistributionId: distributionId,
|
|
3760
|
+
InvalidationBatch: {
|
|
3761
|
+
CallerReference: (/* @__PURE__ */ new Date()).toISOString(),
|
|
3762
|
+
Paths: {
|
|
3763
|
+
Items: ["/*"],
|
|
3764
|
+
Quantity: 1
|
|
3765
|
+
}
|
|
3766
|
+
}
|
|
3767
|
+
};
|
|
3768
|
+
const cloudFront = new AWS.CloudFront();
|
|
3769
|
+
try {
|
|
3770
|
+
await cloudFront.createInvalidation(params).promise();
|
|
3771
|
+
log5.info(
|
|
3772
|
+
logPrefix14,
|
|
3773
|
+
`CloudFront Distribution ID ${distributionId} invalidated with success.`
|
|
3774
|
+
);
|
|
3775
|
+
} catch (err) {
|
|
3776
|
+
log5.error(
|
|
3777
|
+
logPrefix14,
|
|
3778
|
+
`Error while trying to invalidate CloudFront distribution ${distributionId}.`
|
|
3779
|
+
);
|
|
3780
|
+
log5.error(logPrefix14, err);
|
|
3781
|
+
}
|
|
3782
|
+
} else {
|
|
3783
|
+
log5.info(
|
|
3784
|
+
logPrefix14,
|
|
3785
|
+
`Cannot invalidate because distribution does not exist.`
|
|
3786
|
+
);
|
|
3787
|
+
}
|
|
3788
|
+
};
|
|
3789
|
+
var logPrefix15 = "static-app";
|
|
3790
|
+
var removeOldVersions = async ({ bucket }) => {
|
|
3791
|
+
try {
|
|
3792
|
+
log5.info(logPrefix15, "Removing old versions...");
|
|
3793
|
+
const { CommonPrefixes = [] } = await s3.listObjectsV2({ Bucket: bucket, Delimiter: "/" }).promise();
|
|
3794
|
+
const versions = CommonPrefixes?.map(({ Prefix }) => {
|
|
3795
|
+
return Prefix?.replace("/", "");
|
|
3796
|
+
}).filter((version) => {
|
|
3797
|
+
return semver.valid(version);
|
|
3798
|
+
}).sort((a, b) => {
|
|
3799
|
+
return semver.gt(a, b) ? -1 : 1;
|
|
3800
|
+
});
|
|
3801
|
+
versions.shift();
|
|
3802
|
+
versions.shift();
|
|
3803
|
+
versions.shift();
|
|
3804
|
+
await Promise.all(
|
|
3805
|
+
versions.map((version) => {
|
|
3806
|
+
return deleteS3Directory({ bucket, directory: `${version}` });
|
|
3807
|
+
})
|
|
3808
|
+
);
|
|
3809
|
+
} catch (error) {
|
|
3810
|
+
log5.info(
|
|
3811
|
+
logPrefix15,
|
|
3812
|
+
`Cannot remove older versions from "${bucket}" bucket.`
|
|
3813
|
+
);
|
|
3814
|
+
}
|
|
3815
|
+
};
|
|
3816
|
+
|
|
3817
|
+
// src/deploy/staticApp/uploadBuiltAppToS3.ts
|
|
3818
|
+
var uploadBuiltAppToS3 = async ({
|
|
3819
|
+
buildFolder: directory,
|
|
3820
|
+
bucket
|
|
3821
|
+
}) => {
|
|
3822
|
+
if (directory) {
|
|
3823
|
+
const files = await getAllFilesInsideADirectory({ directory });
|
|
3824
|
+
if (files.length > 0) {
|
|
3825
|
+
await emptyS3Directory({ bucket });
|
|
3826
|
+
}
|
|
3827
|
+
await uploadDirectoryToS3({ bucket, directory });
|
|
3828
|
+
return;
|
|
3829
|
+
}
|
|
3830
|
+
const defaultDirectory = await findDefaultBuildFolder();
|
|
3831
|
+
if (defaultDirectory) {
|
|
3832
|
+
await emptyS3Directory({ bucket });
|
|
3833
|
+
await uploadDirectoryToS3({ bucket, directory: defaultDirectory });
|
|
3834
|
+
await copyRoot404To404Index({ bucket });
|
|
3835
|
+
return;
|
|
3836
|
+
}
|
|
3837
|
+
throw new Error(
|
|
3838
|
+
`build-folder option wasn't provided and files weren't found in ${defaultBuildFolders.join(
|
|
3839
|
+
", "
|
|
3840
|
+
)} directories.`
|
|
3841
|
+
);
|
|
3842
|
+
};
|
|
3843
|
+
|
|
3844
|
+
// src/deploy/staticApp/deployStaticApp.ts
|
|
3845
|
+
var logPrefix16 = "static-app";
|
|
3846
|
+
var deployStaticApp = async ({
|
|
3847
|
+
acm,
|
|
3848
|
+
aliases,
|
|
3849
|
+
buildFolder,
|
|
3850
|
+
cloudfront,
|
|
3851
|
+
spa,
|
|
3852
|
+
hostedZoneName,
|
|
3853
|
+
region,
|
|
3854
|
+
skipUpload
|
|
3855
|
+
}) => {
|
|
3856
|
+
try {
|
|
3857
|
+
const { stackName } = await handleDeployInitialization({ logPrefix: logPrefix16 });
|
|
3858
|
+
const params = { StackName: stackName };
|
|
3859
|
+
const template = getStaticAppTemplate({
|
|
3860
|
+
acm,
|
|
3861
|
+
aliases,
|
|
3862
|
+
cloudfront,
|
|
3863
|
+
spa,
|
|
3864
|
+
hostedZoneName,
|
|
3865
|
+
region
|
|
3866
|
+
});
|
|
3867
|
+
const bucket = await getStaticAppBucket({ stackName });
|
|
3868
|
+
if (bucket) {
|
|
3869
|
+
if (!skipUpload) {
|
|
3870
|
+
await uploadBuiltAppToS3({ buildFolder, bucket, cloudfront });
|
|
3871
|
+
}
|
|
3872
|
+
const { Outputs } = await deploy({ params, template });
|
|
3873
|
+
await invalidateCloudFront({ outputs: Outputs });
|
|
3874
|
+
if (!skipUpload) {
|
|
3875
|
+
await removeOldVersions({ bucket });
|
|
3876
|
+
}
|
|
3877
|
+
} else {
|
|
3878
|
+
await deploy({ params, template });
|
|
3879
|
+
const newBucket = await getStaticAppBucket({ stackName });
|
|
3880
|
+
if (!newBucket) {
|
|
3881
|
+
throw new Error(`Cannot find bucket at ${stackName}.`);
|
|
3882
|
+
}
|
|
3883
|
+
await uploadBuiltAppToS3({ buildFolder, bucket: newBucket, cloudfront });
|
|
3884
|
+
}
|
|
3885
|
+
} catch (error) {
|
|
3886
|
+
handleDeployError({ error, logPrefix: logPrefix16 });
|
|
3887
|
+
}
|
|
3888
|
+
};
|
|
3889
|
+
var options3 = {
|
|
3890
|
+
acm: {
|
|
3891
|
+
describe: "The ARN of the certificate or the name of the exported variable whose value is the ARN of the certificate that will be associated to CloudFront.",
|
|
3892
|
+
type: "string"
|
|
3893
|
+
},
|
|
3894
|
+
aliases: {
|
|
3895
|
+
describe: "The aliases that will be associated with the CloudFront. See https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/CNAMEs.html",
|
|
3896
|
+
implies: ["acm"],
|
|
3897
|
+
type: "array"
|
|
3898
|
+
},
|
|
3899
|
+
"build-folder": {
|
|
3900
|
+
describe: `The folder that will be uploaded. If not provided, it'll search for the folders "${defaultBuildFolders.join(
|
|
3901
|
+
", "
|
|
3902
|
+
)}."`,
|
|
3903
|
+
type: "string"
|
|
3904
|
+
},
|
|
3905
|
+
cloudfront: {
|
|
3906
|
+
default: false,
|
|
3907
|
+
describe: "A CloudFront resource is created along with S3 if this option is `true`.",
|
|
3908
|
+
require: false,
|
|
3909
|
+
type: "boolean"
|
|
3910
|
+
},
|
|
3911
|
+
"hosted-zone-name": {
|
|
3912
|
+
required: false,
|
|
3913
|
+
describe: `Is the name of a Route 53 hosted zone. If this value is provided, ${NAME} creates the subdomains defined on \`--aliases\` option. E.g. if you have a hosted zone named "sub.domain.com", the value provided may be "sub.domain.com".`,
|
|
3914
|
+
type: "string"
|
|
3915
|
+
},
|
|
3916
|
+
/**
|
|
3917
|
+
* CloudFront triggers can be only in US East (N. Virginia) Region.
|
|
3918
|
+
* https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/lambda-requirements-limits.html#lambda-requirements-cloudfront-triggers
|
|
3919
|
+
*/
|
|
3920
|
+
region: {
|
|
3921
|
+
coerce: () => {
|
|
3922
|
+
return CLOUDFRONT_REGION;
|
|
3923
|
+
},
|
|
3924
|
+
default: CLOUDFRONT_REGION,
|
|
3925
|
+
hidden: true,
|
|
3926
|
+
type: "string"
|
|
3927
|
+
},
|
|
3928
|
+
"skip-upload": {
|
|
3929
|
+
default: false,
|
|
3930
|
+
describe: "Skip files upload to S3. Useful when wanting update only CloudFormation.",
|
|
3931
|
+
type: "boolean"
|
|
3932
|
+
},
|
|
3933
|
+
spa: {
|
|
3934
|
+
default: false,
|
|
3935
|
+
describe: "This option enables CloudFront to serve a single page application (SPA).",
|
|
3936
|
+
require: false,
|
|
3937
|
+
type: "boolean"
|
|
3938
|
+
}
|
|
3939
|
+
};
|
|
3940
|
+
var deployStaticAppCommand = {
|
|
3941
|
+
command: "static-app",
|
|
3942
|
+
describe: "Deploy static app.",
|
|
3943
|
+
builder: (yargs3) => {
|
|
3944
|
+
return yargs3.options(addGroupToOptions(options3, "Deploy Static App Options")).middleware(() => {
|
|
3945
|
+
AWS.config.region = CLOUDFRONT_REGION;
|
|
3946
|
+
});
|
|
3947
|
+
},
|
|
3948
|
+
handler: ({ destroy: destroy2, ...rest }) => {
|
|
3949
|
+
if (destroy2) {
|
|
3950
|
+
destroyCloudFormation();
|
|
3951
|
+
} else {
|
|
3952
|
+
deployStaticApp(rest);
|
|
3953
|
+
}
|
|
3954
|
+
}
|
|
3955
|
+
};
|
|
3956
|
+
var logPrefix17 = "deploy vercel";
|
|
3957
|
+
var makeCommand = (cmds) => {
|
|
3958
|
+
return cmds.filter((cmd) => {
|
|
3959
|
+
return cmd !== void 0 && cmd !== null && cmd !== "";
|
|
3960
|
+
}).join(" ");
|
|
3961
|
+
};
|
|
3962
|
+
var deployVercel = async ({ token }) => {
|
|
3963
|
+
try {
|
|
3964
|
+
log5.info(logPrefix17, "Deploying on Vercel...");
|
|
3965
|
+
const environment = getEnvironment();
|
|
3966
|
+
const finalToken = token || process.env.VERCEL_TOKEN;
|
|
3967
|
+
if (!finalToken) {
|
|
3968
|
+
throw new Error("Missing Vercel token");
|
|
3969
|
+
}
|
|
3970
|
+
const cmdToken = finalToken ? `--token=${finalToken}` : "";
|
|
3971
|
+
const cmdProdFlag = environment === "Production" ? "--prod" : "";
|
|
3972
|
+
const cmdEnvironment = `--environment=${environment === "Production" ? "production" : "preview"}`;
|
|
3973
|
+
const pullCmd = makeCommand([
|
|
3974
|
+
"vercel",
|
|
3975
|
+
"pull",
|
|
3976
|
+
"--yes",
|
|
3977
|
+
cmdEnvironment,
|
|
3978
|
+
cmdToken
|
|
3979
|
+
]);
|
|
3980
|
+
await spawn(pullCmd);
|
|
3981
|
+
const buildCdm = makeCommand(["vercel", "build", cmdProdFlag, cmdToken]);
|
|
3982
|
+
await spawn(buildCdm);
|
|
3983
|
+
const deployCmd = makeCommand([
|
|
3984
|
+
"vercel",
|
|
3985
|
+
"deploy",
|
|
3986
|
+
"--prebuilt",
|
|
3987
|
+
cmdProdFlag,
|
|
3988
|
+
cmdToken
|
|
3989
|
+
]);
|
|
3990
|
+
await spawn(deployCmd);
|
|
3991
|
+
} catch (error) {
|
|
3992
|
+
handleDeployError({ error, logPrefix: logPrefix17 });
|
|
3993
|
+
}
|
|
3994
|
+
};
|
|
3995
|
+
var logPrefix18 = "deploy vercel";
|
|
3996
|
+
var options4 = {
|
|
3997
|
+
token: {
|
|
3998
|
+
describe: "Vercel authorization token.",
|
|
3999
|
+
type: "string"
|
|
4000
|
+
}
|
|
4001
|
+
};
|
|
4002
|
+
var deployVercelCommand = {
|
|
4003
|
+
command: "vercel",
|
|
4004
|
+
describe: "Deploy on Vercel.",
|
|
4005
|
+
builder: (yargs3) => {
|
|
4006
|
+
return yargs3.options(
|
|
4007
|
+
addGroupToOptions(options4, "Deploy on Vercel Options")
|
|
4008
|
+
);
|
|
4009
|
+
},
|
|
4010
|
+
handler: ({ destroy: destroy2, ...rest }) => {
|
|
4011
|
+
if (destroy2) {
|
|
4012
|
+
log5.info(logPrefix18, "Destroy Vercel deployment not implemented yet.");
|
|
4013
|
+
} else {
|
|
4014
|
+
deployVercel(rest);
|
|
4015
|
+
}
|
|
4016
|
+
}
|
|
4017
|
+
};
|
|
4018
|
+
var readDockerfile = (dockerfilePath) => {
|
|
4019
|
+
try {
|
|
4020
|
+
return fs3.readFileSync(path.join(process.cwd(), dockerfilePath), "utf8");
|
|
4021
|
+
} catch {
|
|
4022
|
+
return "";
|
|
4023
|
+
}
|
|
4024
|
+
};
|
|
4025
|
+
var logPrefix19 = "deploy";
|
|
4026
|
+
var checkAwsAccountId = async (awsAccountId) => {
|
|
4027
|
+
try {
|
|
4028
|
+
const currentAwsAccountId = await getAwsAccountId();
|
|
4029
|
+
if (String(awsAccountId) !== String(currentAwsAccountId)) {
|
|
4030
|
+
throw new Error(
|
|
4031
|
+
`AWS account id does not match. Current is "${currentAwsAccountId}" but the defined in configuration files is "${awsAccountId}".`
|
|
4032
|
+
);
|
|
4033
|
+
}
|
|
4034
|
+
} catch (error) {
|
|
4035
|
+
if (error.code === "CredentialsError") {
|
|
4036
|
+
return;
|
|
4037
|
+
}
|
|
4038
|
+
log5.error(logPrefix19, error.message);
|
|
4039
|
+
process.exit();
|
|
4040
|
+
}
|
|
4041
|
+
};
|
|
4042
|
+
var describeDeployCommand = {
|
|
4043
|
+
command: "describe",
|
|
4044
|
+
describe: "Print the outputs of the deployment.",
|
|
4045
|
+
handler: async ({ stackName }) => {
|
|
4046
|
+
try {
|
|
4047
|
+
const newStackName = stackName || await getStackName();
|
|
4048
|
+
await printStackOutputsAfterDeploy({ stackName: newStackName });
|
|
4049
|
+
} catch (error) {
|
|
4050
|
+
log5.info(logPrefix19, "Cannot describe stack. Message: %s", error.message);
|
|
4051
|
+
}
|
|
4052
|
+
}
|
|
4053
|
+
};
|
|
4054
|
+
var options5 = {
|
|
4055
|
+
"aws-account-id": {
|
|
4056
|
+
describe: "AWS account id associated with the deployment.",
|
|
4057
|
+
type: "string"
|
|
4058
|
+
},
|
|
4059
|
+
destroy: {
|
|
4060
|
+
default: false,
|
|
4061
|
+
describe: 'Destroy the deployment. You cannot destroy a deploy when "environment" is defined.',
|
|
4062
|
+
type: "boolean"
|
|
4063
|
+
},
|
|
4064
|
+
"lambda-dockerfile": {
|
|
4065
|
+
coerce: (arg) => {
|
|
4066
|
+
return readDockerfile(arg);
|
|
4067
|
+
},
|
|
4068
|
+
default: "Dockerfile",
|
|
4069
|
+
describe: "Instructions to create the Lambda image.",
|
|
4070
|
+
type: "string"
|
|
4071
|
+
},
|
|
4072
|
+
"lambda-image": {
|
|
4073
|
+
default: false,
|
|
4074
|
+
describe: "A Lambda image will be created instead using S3.",
|
|
4075
|
+
type: "boolean"
|
|
4076
|
+
},
|
|
4077
|
+
"lambda-external": {
|
|
4078
|
+
default: [],
|
|
4079
|
+
describe: "External modules that will not be bundled in the Lambda code.",
|
|
4080
|
+
type: "array"
|
|
4081
|
+
},
|
|
4082
|
+
"lambda-entry-points-base-dir": {
|
|
4083
|
+
default: "src",
|
|
4084
|
+
describe: "Base directory for Lambda entry points.",
|
|
4085
|
+
type: "string"
|
|
4086
|
+
},
|
|
4087
|
+
"lambda-entry-points": {
|
|
4088
|
+
default: [],
|
|
4089
|
+
describe: "This is an array of files that each serve as an input to the bundling algorithm for Lambda functions.",
|
|
4090
|
+
type: "string"
|
|
4091
|
+
},
|
|
4092
|
+
"lambda-format": {
|
|
4093
|
+
choices: ["esm", "cjs"],
|
|
4094
|
+
default: "esm",
|
|
4095
|
+
describe: "Lambda code format.",
|
|
4096
|
+
type: "string"
|
|
4097
|
+
},
|
|
4098
|
+
"lambda-outdir": {
|
|
4099
|
+
default: "dist",
|
|
4100
|
+
describe: "Output directory for built Lambda code.",
|
|
4101
|
+
type: "string"
|
|
4102
|
+
},
|
|
4103
|
+
/**
|
|
4104
|
+
* This option has the format to match [CloudFormation parameter](https://docs.aws.amazon.com/AWSCloudFormation/latest/APIReference/API_Parameter.html).
|
|
4105
|
+
*
|
|
4106
|
+
* ```ts
|
|
4107
|
+
* {
|
|
4108
|
+
* key: string,
|
|
4109
|
+
* value: string,
|
|
4110
|
+
* usePreviousValue: boolean,
|
|
4111
|
+
* resolvedValue: string
|
|
4112
|
+
* }[]
|
|
4113
|
+
* ```
|
|
4114
|
+
*
|
|
4115
|
+
* For example:
|
|
4116
|
+
*
|
|
4117
|
+
* ```ts
|
|
4118
|
+
* [
|
|
4119
|
+
* {
|
|
4120
|
+
* key: 'key1',
|
|
4121
|
+
* value: 'value1',
|
|
4122
|
+
* },
|
|
4123
|
+
* {
|
|
4124
|
+
* key: 'key2',
|
|
4125
|
+
* value: 'value2',
|
|
4126
|
+
* }
|
|
4127
|
+
* ]
|
|
4128
|
+
* ```
|
|
4129
|
+
*
|
|
4130
|
+
* If you want to simplify the usage, you can pass a object with key and value only:
|
|
4131
|
+
*
|
|
4132
|
+
* ```ts
|
|
4133
|
+
* {
|
|
4134
|
+
* key1: 'value1',
|
|
4135
|
+
* key2: 'value2'
|
|
4136
|
+
* }
|
|
4137
|
+
* ```
|
|
4138
|
+
*/
|
|
4139
|
+
parameters: {
|
|
4140
|
+
alias: "p",
|
|
4141
|
+
coerce: (arg) => {
|
|
4142
|
+
if (Array.isArray(arg)) {
|
|
4143
|
+
return arg;
|
|
4144
|
+
}
|
|
4145
|
+
if (typeof arg === "object") {
|
|
4146
|
+
return Object.entries(arg).map(([key, value]) => {
|
|
4147
|
+
return {
|
|
4148
|
+
key,
|
|
4149
|
+
value
|
|
4150
|
+
};
|
|
4151
|
+
});
|
|
4152
|
+
}
|
|
4153
|
+
return [];
|
|
4154
|
+
},
|
|
4155
|
+
default: [],
|
|
4156
|
+
describe: "A list of parameters that will be passed to CloudFormation Parameters when deploying."
|
|
4157
|
+
},
|
|
4158
|
+
"skip-deploy": {
|
|
4159
|
+
alias: "skip",
|
|
4160
|
+
default: false,
|
|
4161
|
+
describe: "Skip the deploy command.",
|
|
4162
|
+
type: "boolean"
|
|
4163
|
+
},
|
|
4164
|
+
"stack-name": {
|
|
4165
|
+
describe: "Set the stack name.",
|
|
4166
|
+
type: "string"
|
|
4167
|
+
},
|
|
4168
|
+
"template-path": {
|
|
4169
|
+
alias: "t",
|
|
4170
|
+
describe: "Path to the CloudFormation template.",
|
|
4171
|
+
type: "string"
|
|
4172
|
+
}
|
|
4173
|
+
};
|
|
4174
|
+
var examples = [
|
|
4175
|
+
[
|
|
4176
|
+
"carlin deploy -t src/cloudformation.template1.yml",
|
|
4177
|
+
"Change the CloudFormation template path."
|
|
4178
|
+
],
|
|
4179
|
+
["carlin deploy -e Production", "Set environment."],
|
|
4180
|
+
[
|
|
4181
|
+
"carlin deploy --lambda-externals momentjs",
|
|
4182
|
+
"Lambda exists. Don't bundle momentjs."
|
|
4183
|
+
],
|
|
4184
|
+
[
|
|
4185
|
+
"carlin deploy --destroy --stack-name StackToBeDeleted",
|
|
4186
|
+
"Destroy a specific stack."
|
|
4187
|
+
]
|
|
4188
|
+
];
|
|
4189
|
+
var deployCommand = {
|
|
4190
|
+
command: "deploy [deploy]",
|
|
4191
|
+
describe: "Deploy cloud resources.",
|
|
4192
|
+
builder: (yargsBuilder) => {
|
|
4193
|
+
yargsBuilder.example(examples).options(addGroupToOptions(options5, "Deploy Options")).middleware(({ stackName }) => {
|
|
4194
|
+
if (stackName) {
|
|
4195
|
+
setPreDefinedStackName(stackName);
|
|
4196
|
+
}
|
|
4197
|
+
}).middleware((argv) => {
|
|
4198
|
+
if (argv.lambdaDockerfile) {
|
|
4199
|
+
Object.assign(argv, {
|
|
4200
|
+
lambdaImage: true
|
|
4201
|
+
});
|
|
4202
|
+
}
|
|
4203
|
+
}).middleware(
|
|
4204
|
+
async ({
|
|
4205
|
+
environments,
|
|
4206
|
+
environment,
|
|
4207
|
+
awsAccountId: defaultAwsAccountId
|
|
4208
|
+
}) => {
|
|
4209
|
+
const envAwsAccountId = (() => {
|
|
4210
|
+
return environments && environment && environments[environment] ? environments[environment].awsAccountId : void 0;
|
|
4211
|
+
})();
|
|
4212
|
+
if (envAwsAccountId) {
|
|
4213
|
+
await checkAwsAccountId(envAwsAccountId);
|
|
4214
|
+
}
|
|
4215
|
+
if (defaultAwsAccountId) {
|
|
4216
|
+
await checkAwsAccountId(defaultAwsAccountId);
|
|
4217
|
+
}
|
|
4218
|
+
}
|
|
4219
|
+
).middleware(({ skipDeploy }) => {
|
|
4220
|
+
if (skipDeploy) {
|
|
4221
|
+
log5.warn(
|
|
4222
|
+
logPrefix19,
|
|
4223
|
+
"Skip deploy flag is true, then the deploy command wasn't executed."
|
|
4224
|
+
);
|
|
4225
|
+
process.exit(0);
|
|
4226
|
+
}
|
|
4227
|
+
}).middleware(({ lambdaExternals, lambdaInput }) => {
|
|
4228
|
+
if (lambdaInput) {
|
|
4229
|
+
throw new Error(
|
|
4230
|
+
'Option "lambdaInput" was removed. Please use "lambdaEntryPoints" instead.'
|
|
4231
|
+
);
|
|
4232
|
+
}
|
|
4233
|
+
if (lambdaExternals) {
|
|
4234
|
+
throw new Error(
|
|
4235
|
+
'Option "lambdaExternals" was removed. Please use "lambdaExternal" instead.'
|
|
4236
|
+
);
|
|
4237
|
+
}
|
|
4238
|
+
});
|
|
4239
|
+
const commands = [
|
|
4240
|
+
deployLambdaLayerCommand,
|
|
4241
|
+
describeDeployCommand,
|
|
4242
|
+
deployBaseStackCommand,
|
|
4243
|
+
deployStaticAppCommand,
|
|
4244
|
+
deployCicdCommand,
|
|
4245
|
+
deployVercelCommand
|
|
4246
|
+
];
|
|
4247
|
+
yargsBuilder.positional("deploy", {
|
|
4248
|
+
choices: commands.map(({ command }) => {
|
|
4249
|
+
return command;
|
|
4250
|
+
}),
|
|
4251
|
+
describe: "Deploy command.",
|
|
4252
|
+
type: "string"
|
|
4253
|
+
});
|
|
4254
|
+
commands.forEach((command) => {
|
|
4255
|
+
return yargsBuilder.command(command);
|
|
4256
|
+
});
|
|
4257
|
+
return yargsBuilder;
|
|
4258
|
+
},
|
|
4259
|
+
handler: ({ destroy: destroy2, ...rest }) => {
|
|
4260
|
+
if (destroy2) {
|
|
4261
|
+
destroyCloudFormation();
|
|
4262
|
+
} else {
|
|
4263
|
+
deployCloudFormation(rest);
|
|
4264
|
+
}
|
|
4265
|
+
}
|
|
4266
|
+
};
|
|
4267
|
+
var logPrefix20 = "cicd-ecs-task-report";
|
|
4268
|
+
var sendEcsTaskReport = async ({ status }) => {
|
|
4269
|
+
if (!process.env.ECS_TASK_REPORT_HANDLER_NAME) {
|
|
4270
|
+
log5.info(logPrefix20, "ECS_TASK_REPORT_HANDLER_NAME not defined.");
|
|
4271
|
+
return;
|
|
4272
|
+
}
|
|
4273
|
+
const lambda = new AWS.Lambda();
|
|
4274
|
+
const payload = { status };
|
|
4275
|
+
if (process.env.ECS_TASK_ARN) {
|
|
4276
|
+
payload.ecsTaskArn = process.env.ECS_TASK_ARN;
|
|
4277
|
+
}
|
|
4278
|
+
if (process.env.PIPELINE_NAME) {
|
|
4279
|
+
payload.pipelineName = process.env.PIPELINE_NAME;
|
|
4280
|
+
}
|
|
4281
|
+
await lambda.invokeAsync({
|
|
4282
|
+
FunctionName: process.env.ECS_TASK_REPORT_HANDLER_NAME,
|
|
4283
|
+
InvokeArgs: JSON.stringify(payload)
|
|
4284
|
+
}).promise();
|
|
4285
|
+
log5.info(logPrefix20, "Report sent.");
|
|
4286
|
+
};
|
|
4287
|
+
var options6 = {
|
|
4288
|
+
status: {
|
|
4289
|
+
choices: ["Approved", "Rejected", "MainTagFound"],
|
|
4290
|
+
demandOption: true,
|
|
4291
|
+
type: "string"
|
|
4292
|
+
}
|
|
4293
|
+
};
|
|
4294
|
+
var ecsTaskReportCommand = {
|
|
4295
|
+
command: "cicd-ecs-task-report",
|
|
4296
|
+
describe: false,
|
|
4297
|
+
builder: (yargs3) => {
|
|
4298
|
+
return yargs3.options(options6);
|
|
4299
|
+
},
|
|
4300
|
+
handler: async (args) => {
|
|
4301
|
+
return sendEcsTaskReport(args);
|
|
4302
|
+
}
|
|
4303
|
+
};
|
|
4304
|
+
var logPrefix21 = "generate-env";
|
|
4305
|
+
var readEnvFile = async ({
|
|
4306
|
+
envFileName,
|
|
4307
|
+
envsPath
|
|
4308
|
+
}) => {
|
|
4309
|
+
try {
|
|
4310
|
+
const content = await fs3.promises.readFile(
|
|
4311
|
+
path.resolve(process.cwd(), envsPath, envFileName),
|
|
4312
|
+
"utf8"
|
|
4313
|
+
);
|
|
4314
|
+
return content;
|
|
4315
|
+
} catch {
|
|
4316
|
+
return void 0;
|
|
4317
|
+
}
|
|
4318
|
+
};
|
|
4319
|
+
var writeEnvFile = async ({
|
|
4320
|
+
envFileName,
|
|
4321
|
+
content
|
|
4322
|
+
}) => {
|
|
4323
|
+
return fs3.promises.writeFile(
|
|
4324
|
+
path.resolve(process.cwd(), envFileName),
|
|
4325
|
+
content
|
|
4326
|
+
);
|
|
4327
|
+
};
|
|
4328
|
+
var generateEnv = async ({
|
|
4329
|
+
defaultEnvironment,
|
|
4330
|
+
path: envsPath
|
|
4331
|
+
}) => {
|
|
4332
|
+
const environment = getEnvironment() || defaultEnvironment;
|
|
4333
|
+
const envFileName = `.env.${environment}`;
|
|
4334
|
+
const envFile = await readEnvFile({ envFileName, envsPath });
|
|
4335
|
+
if (!envFile) {
|
|
4336
|
+
log5.info(
|
|
4337
|
+
logPrefix21,
|
|
4338
|
+
"Env file %s doesn't exist. Skip generating env file.",
|
|
4339
|
+
envFileName
|
|
4340
|
+
);
|
|
4341
|
+
return;
|
|
4342
|
+
}
|
|
4343
|
+
await writeEnvFile({ content: envFile, envFileName: ".env" });
|
|
4344
|
+
log5.info(
|
|
4345
|
+
logPrefix21,
|
|
4346
|
+
"Generate env file %s from %s successfully.",
|
|
4347
|
+
".env",
|
|
4348
|
+
envFileName
|
|
4349
|
+
);
|
|
4350
|
+
};
|
|
4351
|
+
|
|
4352
|
+
// src/generateEnv/generateEnvCommand.ts
|
|
4353
|
+
var DEFAULT_ENVIRONMENT = "Staging";
|
|
4354
|
+
var options7 = {
|
|
4355
|
+
"default-environment": {
|
|
4356
|
+
alias: "d",
|
|
4357
|
+
type: "string",
|
|
4358
|
+
describe: "Default environment.",
|
|
4359
|
+
default: DEFAULT_ENVIRONMENT
|
|
4360
|
+
},
|
|
4361
|
+
path: {
|
|
4362
|
+
alias: "p",
|
|
4363
|
+
type: "string",
|
|
4364
|
+
describe: "Path to the directory where envs files are located.",
|
|
4365
|
+
default: "./"
|
|
4366
|
+
}
|
|
4367
|
+
};
|
|
4368
|
+
var generateEnvCommand = {
|
|
4369
|
+
command: ["generate-env", "ge", "env"],
|
|
4370
|
+
describe: "Generate environment files.",
|
|
4371
|
+
builder: (yargs3) => {
|
|
4372
|
+
return yargs3.options(options7);
|
|
4373
|
+
},
|
|
4374
|
+
handler: (args) => {
|
|
4375
|
+
return generateEnv(args);
|
|
4376
|
+
}
|
|
4377
|
+
};
|
|
4378
|
+
var coerceSetEnvVar = (env) => {
|
|
4379
|
+
return (value) => {
|
|
4380
|
+
setEnvVar(env, value);
|
|
4381
|
+
return value;
|
|
4382
|
+
};
|
|
4383
|
+
};
|
|
4384
|
+
var options8 = {
|
|
4385
|
+
branch: {
|
|
4386
|
+
coerce: coerceSetEnvVar("BRANCH"),
|
|
4387
|
+
require: false,
|
|
4388
|
+
type: "string"
|
|
4389
|
+
},
|
|
4390
|
+
config: {
|
|
4391
|
+
alias: "c",
|
|
4392
|
+
describe: "Path to config file. You can create a config file and set all options there. Valid extensions: .js, .json, .ts, .yml, or .yaml.",
|
|
4393
|
+
require: false,
|
|
4394
|
+
type: "string"
|
|
4395
|
+
},
|
|
4396
|
+
environment: {
|
|
4397
|
+
alias: ["e", "env"],
|
|
4398
|
+
coerce: coerceSetEnvVar("ENVIRONMENT"),
|
|
4399
|
+
type: "string"
|
|
4400
|
+
},
|
|
4401
|
+
environments: {},
|
|
4402
|
+
project: {
|
|
4403
|
+
coerce: coerceSetEnvVar("PROJECT"),
|
|
4404
|
+
require: false,
|
|
4405
|
+
type: "string"
|
|
4406
|
+
},
|
|
4407
|
+
region: {
|
|
4408
|
+
alias: "r",
|
|
4409
|
+
default: AWS_DEFAULT_REGION,
|
|
4410
|
+
describe: "AWS region.",
|
|
4411
|
+
type: "string"
|
|
4412
|
+
}
|
|
4413
|
+
};
|
|
4414
|
+
var getPkgConfig = () => {
|
|
4415
|
+
return NAME;
|
|
4416
|
+
};
|
|
4417
|
+
var getEnv = () => {
|
|
4418
|
+
return constantCase(NAME);
|
|
4419
|
+
};
|
|
4420
|
+
var cli = () => {
|
|
4421
|
+
let finalConfig;
|
|
4422
|
+
const getConfig = () => {
|
|
4423
|
+
const names = ["ts", "js", "yml", "yaml", "json"].map((ext) => {
|
|
4424
|
+
return `${NAME}.${ext}`;
|
|
4425
|
+
});
|
|
4426
|
+
const paths = [];
|
|
4427
|
+
let currentPath = process.cwd();
|
|
4428
|
+
let findUpPath;
|
|
4429
|
+
do {
|
|
4430
|
+
findUpPath = findUpSync(names, { cwd: currentPath });
|
|
4431
|
+
if (findUpPath) {
|
|
4432
|
+
currentPath = path__default.resolve(findUpPath, "../..");
|
|
4433
|
+
paths.push(findUpPath);
|
|
4434
|
+
}
|
|
4435
|
+
} while (findUpPath);
|
|
4436
|
+
const configs = paths.map((p) => {
|
|
4437
|
+
return readConfigFileSync({ configFilePath: p }) || {};
|
|
4438
|
+
});
|
|
4439
|
+
finalConfig = deepmerge.all(configs.reverse());
|
|
4440
|
+
return finalConfig;
|
|
4441
|
+
};
|
|
4442
|
+
const handleEnvironments = (argv, { parsed }) => {
|
|
4443
|
+
const { environment, environments } = argv;
|
|
4444
|
+
if (environment && environments && environments[environment]) {
|
|
4445
|
+
Object.entries(environments[environment]).forEach(([key, value]) => {
|
|
4446
|
+
const isKeyFromCli = (() => {
|
|
4447
|
+
const kebabCaseKey = kebabCase(key);
|
|
4448
|
+
if (parsed?.defaulted?.[kebabCaseKey]) {
|
|
4449
|
+
return false;
|
|
4450
|
+
}
|
|
4451
|
+
if (deepEqual(argv[key], finalConfig[key])) {
|
|
4452
|
+
return false;
|
|
4453
|
+
}
|
|
4454
|
+
return true;
|
|
4455
|
+
})();
|
|
4456
|
+
if (!isKeyFromCli) {
|
|
4457
|
+
argv[key] = value;
|
|
4458
|
+
}
|
|
4459
|
+
});
|
|
4460
|
+
}
|
|
4461
|
+
};
|
|
4462
|
+
return yargs(hideBin(process.argv)).strictCommands().scriptName(NAME).env(getEnv()).options(addGroupToOptions(options8, "Common Options")).middleware((argv) => {
|
|
4463
|
+
const finalEnvironment = argv.environment || process.env.ENVIRONMENT;
|
|
4464
|
+
if (finalEnvironment) {
|
|
4465
|
+
setEnvVar("ENVIRONMENT", finalEnvironment);
|
|
4466
|
+
const envKeys = ["environment", ...options8.environment.alias];
|
|
4467
|
+
const envEntries = envKeys.map((key) => {
|
|
4468
|
+
return [key, finalEnvironment];
|
|
4469
|
+
});
|
|
4470
|
+
Object.assign(argv, Object.fromEntries(envEntries));
|
|
4471
|
+
}
|
|
4472
|
+
}).middleware(handleEnvironments).middleware(({ environment }) => {
|
|
4473
|
+
if (!["string", "undefined"].includes(typeof environment)) {
|
|
4474
|
+
throw new Error(
|
|
4475
|
+
`environment type is invalid. The value: ${JSON.stringify(
|
|
4476
|
+
environment
|
|
4477
|
+
)}`
|
|
4478
|
+
);
|
|
4479
|
+
}
|
|
4480
|
+
}).middleware(({ region }) => {
|
|
4481
|
+
AWS.config.region = region;
|
|
4482
|
+
setEnvVar("REGION", region);
|
|
4483
|
+
}).pkgConf(getPkgConfig()).config(getConfig()).config("config", (configFilePath) => {
|
|
4484
|
+
return readConfigFileSync({ configFilePath });
|
|
4485
|
+
}).command({
|
|
4486
|
+
command: "print-args",
|
|
4487
|
+
describe: false,
|
|
4488
|
+
handler: (argv) => {
|
|
4489
|
+
return console.log(JSON.stringify(argv, null, 2));
|
|
4490
|
+
}
|
|
4491
|
+
}).command(deployCommand).command(ecsTaskReportCommand).command(generateEnvCommand).epilogue(
|
|
4492
|
+
"For more information, read our docs at https://ttoss.dev/docs/carlin/"
|
|
4493
|
+
).help();
|
|
4494
|
+
};
|
|
4495
|
+
|
|
4496
|
+
// src/index.ts
|
|
4497
|
+
cli().parse();
|