carlin 1.19.14 → 1.20.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.js CHANGED
@@ -13,13 +13,15 @@ const generateEnvCommand_1 = require("./generateEnv/generateEnvCommand");
13
13
  const aws_sdk_1 = tslib_1.__importDefault(require("aws-sdk"));
14
14
  const deep_equal_1 = tslib_1.__importDefault(require("deep-equal"));
15
15
  const deepmerge_1 = tslib_1.__importDefault(require("deepmerge"));
16
- const find_up_1 = tslib_1.__importDefault(require("find-up"));
16
+ const findup_sync_1 = tslib_1.__importDefault(require("findup-sync"));
17
17
  const path_1 = tslib_1.__importDefault(require("path"));
18
- const coerceSetEnvVar = (env) => (value) => {
19
- if (value) {
20
- (0, utils_1.setEnvVar)(env, value);
21
- }
22
- return value;
18
+ const coerceSetEnvVar = (env) => {
19
+ return (value) => {
20
+ if (value) {
21
+ (0, utils_1.setEnvVar)(env, value);
22
+ }
23
+ return value;
24
+ };
23
25
  };
24
26
  exports.options = {
25
27
  branch: {
@@ -56,7 +58,9 @@ exports.options = {
56
58
  * You can also provide the options creating a property name `carlin`
57
59
  * inside your `package.json`. [See Yargs reference](https://yargs.js.org/docs/#api-reference-pkgconfkey-cwd).
58
60
  */
59
- const getPkgConfig = () => config_1.NAME;
61
+ const getPkgConfig = () => {
62
+ return config_1.NAME;
63
+ };
60
64
  /**
61
65
  * All options can be passed as environment variables matching the prefix
62
66
  * `CARLIN`. See [Yargs reference](https://yargs.js.org/docs/#api-reference-envprefix).
@@ -115,18 +119,22 @@ const cli = () => {
115
119
  * ```
116
120
  */
117
121
  const getConfig = () => {
118
- const names = ['js', 'yml', 'yaml', 'json', 'ts'].map((ext) => `${config_1.NAME}.${ext}`);
122
+ const names = ['js', 'yml', 'yaml', 'json', 'ts'].map((ext) => {
123
+ return `${config_1.NAME}.${ext}`;
124
+ });
119
125
  const paths = [];
120
126
  let currentPath = process.cwd();
121
127
  let findUpPath;
122
128
  do {
123
- findUpPath = find_up_1.default.sync(names, { cwd: currentPath });
129
+ findUpPath = (0, findup_sync_1.default)(names, { cwd: currentPath });
124
130
  if (findUpPath) {
125
131
  currentPath = path_1.default.resolve(findUpPath, '../..');
126
132
  paths.push(findUpPath);
127
133
  }
128
134
  } while (findUpPath);
129
- const configs = paths.map((p) => (0, utils_1.readObjectFile)({ path: p }) || {});
135
+ const configs = paths.map((p) => {
136
+ return (0, utils_1.readObjectFile)({ path: p }) || {};
137
+ });
130
138
  /**
131
139
  * Using configs.reverser() to get the most far config first. This way the
132
140
  * nearest configs will replace others.
@@ -216,12 +224,16 @@ const cli = () => {
216
224
  })
217
225
  .pkgConf(getPkgConfig())
218
226
  .config(getConfig())
219
- .config('config', (configPath) => (0, utils_1.readObjectFile)({ path: configPath }))
227
+ .config('config', (configPath) => {
228
+ return (0, utils_1.readObjectFile)({ path: configPath });
229
+ })
220
230
  .command({
221
231
  command: 'print-args',
222
232
  describe: false,
223
- // eslint-disable-next-line no-console
224
- handler: (argv) => console.log(JSON.stringify(argv, null, 2)),
233
+ handler: (argv) => {
234
+ // eslint-disable-next-line no-console
235
+ return console.log(JSON.stringify(argv, null, 2));
236
+ },
225
237
  })
226
238
  .command(command_1.deployCommand)
227
239
  .command(ecsTaskReportCommand_1.ecsTaskReportCommand)
package/dist/config.js CHANGED
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.CLOUDFRONT_REGION = exports.AWS_DEFAULT_REGION = exports.NAME = void 0;
3
+ exports.NODE_RUNTIME = exports.CLOUDFRONT_REGION = exports.AWS_DEFAULT_REGION = exports.NAME = void 0;
4
4
  exports.NAME = 'carlin';
5
5
  exports.AWS_DEFAULT_REGION = 'us-east-1';
6
6
  /**
@@ -8,3 +8,4 @@ exports.AWS_DEFAULT_REGION = 'us-east-1';
8
8
  * https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/lambda-requirements-limits.html#lambda-requirements-cloudfront-triggers
9
9
  */
10
10
  exports.CLOUDFRONT_REGION = 'us-east-1';
11
+ exports.NODE_RUNTIME = 'nodejs16.x';
@@ -6,9 +6,10 @@ const config_1 = require("../baseStack/config");
6
6
  const command_options_1 = require("./command.options");
7
7
  const utils_1 = require("../../utils");
8
8
  const config_2 = require("./config");
9
+ const config_3 = require("../../config");
10
+ const getTriggerPipelineObjectKey_1 = require("./getTriggerPipelineObjectKey");
9
11
  const change_case_1 = require("change-case");
10
12
  const js_yaml_1 = tslib_1.__importDefault(require("js-yaml"));
11
- const getTriggerPipelineObjectKey_1 = require("./getTriggerPipelineObjectKey");
12
13
  exports.API_LOGICAL_ID = 'ApiV1ServerlessApi';
13
14
  exports.CODE_BUILD_PROJECT_LOGS_LOGICAL_ID = 'RepositoryImageCodeBuildProjectLogsLogGroup';
14
15
  exports.CODE_BUILD_PROJECT_SERVICE_ROLE_LOGICAL_ID = 'RepositoryImageCodeBuildProjectIAMRole';
@@ -35,153 +36,160 @@ exports.IMAGE_UPDATER_SCHEDULE_SERVERLESS_FUNCTION_LOGICAL_ID = 'ImageUpdaterSch
35
36
  * [BUILD\_GENERAL1\_SMALL environment compute type](https://docs.aws.amazon.com/codebuild/latest/userguide/build-env-ref-compute-types.html)
36
37
  * with Linux as operational system to build the image.
37
38
  */
38
- const getRepositoryImageBuilder = () => ({
39
- Type: 'AWS::CodeBuild::Project',
40
- Properties: {
41
- Artifacts: {
42
- Type: 'NO_ARTIFACTS',
43
- },
44
- Cache: {
45
- Location: 'LOCAL',
46
- Modes: ['LOCAL_DOCKER_LAYER_CACHE'],
47
- Type: 'LOCAL',
48
- },
49
- Description: 'Create repository image.',
50
- Environment: {
51
- ComputeType: 'BUILD_GENERAL1_SMALL',
52
- EnvironmentVariables: [
53
- {
54
- Name: 'AWS_ACCOUNT_ID',
55
- Value: { Ref: 'AWS::AccountId' },
56
- },
57
- {
58
- Name: 'AWS_REGION',
59
- Value: { Ref: 'AWS::Region' },
60
- },
61
- {
62
- Name: 'DOCKERFILE',
63
- Value: {
64
- 'Fn::Sub': [
65
- 'FROM public.ecr.aws/ubuntu/ubuntu:20.04_stable',
66
- // https://stackoverflow.com/a/59693182/8786986
67
- 'ENV DEBIAN_FRONTEND noninteractive',
68
- // Make sure apt is up to date
69
- 'RUN apt-get update --fix-missing',
70
- 'RUN apt-get install -y curl',
71
- 'RUN apt-get install -y git',
72
- 'RUN apt-get install -y jq',
73
- // Install Node.js
74
- 'RUN curl -fsSL https://deb.nodesource.com/setup_lts.x | bash -',
75
- 'RUN apt-get install -y nodejs',
76
- // Clean cache
77
- 'RUN apt-get clean',
78
- // Install Yarn
79
- 'RUN npm install -g yarn',
80
- // Install carlin CLI
81
- 'RUN yarn global add carlin',
82
- // Configure git
83
- 'RUN git config --global user.name carlin',
84
- 'RUN git config --global user.email carlin@ttoss.dev',
85
- 'RUN mkdir /root/.ssh/',
86
- 'COPY ./id_rsa /root/.ssh/id_rsa',
87
- 'RUN chmod 600 /root/.ssh/id_rsa',
88
- // Make sure your domain is accepted
89
- 'RUN touch /root/.ssh/known_hosts',
90
- 'RUN ssh-keyscan github.com >> /root/.ssh/known_hosts',
91
- // Copy repository
92
- 'COPY . /home',
93
- // Go to repository directory
94
- 'WORKDIR /home/repository',
95
- // Set Yarn cache
96
- 'RUN mkdir -p /home/yarn-cache',
97
- 'RUN yarn config set cache-folder /home/yarn-cache',
98
- 'RUN yarn install',
99
- // Used in case of yarn.lock is modified.
100
- 'RUN git checkout -- yarn.lock',
101
- ].join('\n'),
102
- },
103
- },
104
- {
105
- Name: 'IMAGE_TAG',
106
- Value: 'latest',
107
- },
108
- {
109
- Name: 'REPOSITORY_ECR_REPOSITORY',
110
- Value: { Ref: exports.ECR_REPOSITORY_LOGICAL_ID },
111
- },
112
- {
113
- Name: 'SSH_KEY',
114
- Value: { Ref: 'SSHKey' },
115
- },
116
- {
117
- Name: 'SSH_URL',
118
- Value: { Ref: 'SSHUrl' },
119
- },
120
- ],
121
- Image: 'aws/codebuild/standard:3.0',
122
- ImagePullCredentialsType: 'CODEBUILD',
123
- /**
124
- * Enables running the Docker daemon inside a Docker container. Set to
125
- * true only if the build project is used to build Docker images.
126
- * Otherwise, a build that attempts to interact with the Docker daemon
127
- * fails. The default setting is false."
128
- * https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codebuild-project-environment.html#cfn-codebuild-project-environment-privilegedmode
129
- */
130
- PrivilegedMode: true,
131
- Type: 'LINUX_CONTAINER',
132
- },
133
- LogsConfig: {
134
- CloudWatchLogs: {
135
- Status: 'ENABLED',
136
- GroupName: { Ref: exports.CODE_BUILD_PROJECT_LOGS_LOGICAL_ID },
39
+ const getRepositoryImageBuilder = () => {
40
+ /**
41
+ * Get only the number of NODE_RUNTIME. For example, if NODE_RUNTIME is
42
+ * `nodejs14.x`, then `nodeRuntimeNumber` will be `14`.
43
+ */
44
+ const nodeRuntimeNumber = config_3.NODE_RUNTIME.replace('nodejs', '').replace('.x', '');
45
+ return {
46
+ Type: 'AWS::CodeBuild::Project',
47
+ Properties: {
48
+ Artifacts: {
49
+ Type: 'NO_ARTIFACTS',
137
50
  },
138
- },
139
- ServiceRole: {
140
- 'Fn::GetAtt': [exports.CODE_BUILD_PROJECT_SERVICE_ROLE_LOGICAL_ID, 'Arn'],
141
- },
142
- Source: {
143
- BuildSpec: js_yaml_1.default.dump({
144
- version: '0.2',
145
- phases: {
146
- install: {
147
- commands: [
148
- 'echo install started on `date`',
149
- `echo "$SSH_KEY" > ~/.ssh/id_rsa`,
150
- 'chmod 600 ~/.ssh/id_rsa',
151
- 'rm -rf repository',
152
- 'git clone $SSH_URL repository',
153
- 'cd repository',
154
- 'ls',
155
- ],
51
+ Cache: {
52
+ Location: 'LOCAL',
53
+ Modes: ['LOCAL_DOCKER_LAYER_CACHE'],
54
+ Type: 'LOCAL',
55
+ },
56
+ Description: 'Create repository image.',
57
+ Environment: {
58
+ ComputeType: 'BUILD_GENERAL1_SMALL',
59
+ EnvironmentVariables: [
60
+ {
61
+ Name: 'AWS_ACCOUNT_ID',
62
+ Value: { Ref: 'AWS::AccountId' },
156
63
  },
157
- pre_build: {
158
- commands: ['echo pre_build started on `date`'],
64
+ {
65
+ Name: 'AWS_REGION',
66
+ Value: { Ref: 'AWS::Region' },
159
67
  },
160
- build: {
161
- commands: [
162
- 'echo build started on `date`',
163
- '$(aws ecr get-login --no-include-email --region $AWS_REGION)',
164
- 'echo Building the repository image...',
165
- 'cd ../',
166
- 'cp ~/.ssh/id_rsa .',
167
- 'echo "$DOCKERFILE" > Dockerfile',
168
- 'cat Dockerfile',
169
- 'docker build -t $REPOSITORY_ECR_REPOSITORY:$IMAGE_TAG -f Dockerfile .',
170
- 'docker tag $REPOSITORY_ECR_REPOSITORY:$IMAGE_TAG $AWS_ACCOUNT_ID.dkr.ecr.$AWS_REGION.amazonaws.com/$REPOSITORY_ECR_REPOSITORY:$IMAGE_TAG',
171
- 'echo Pushing the repository image...',
172
- 'docker push $AWS_ACCOUNT_ID.dkr.ecr.$AWS_REGION.amazonaws.com/$REPOSITORY_ECR_REPOSITORY:$IMAGE_TAG',
173
- ],
68
+ {
69
+ Name: 'DOCKERFILE',
70
+ Value: {
71
+ 'Fn::Sub': [
72
+ 'FROM public.ecr.aws/ubuntu/ubuntu:20.04_stable',
73
+ // https://stackoverflow.com/a/59693182/8786986
74
+ 'ENV DEBIAN_FRONTEND noninteractive',
75
+ // Make sure apt is up to date
76
+ 'RUN apt-get update --fix-missing',
77
+ 'RUN apt-get install -y curl',
78
+ 'RUN apt-get install -y git',
79
+ 'RUN apt-get install -y jq',
80
+ // Install Node.js
81
+ `RUN curl -fsSL https://deb.nodesource.com/setup_${nodeRuntimeNumber}.x | bash -`,
82
+ 'RUN apt-get install -y nodejs',
83
+ // Clean cache
84
+ 'RUN apt-get clean',
85
+ // Install Yarn
86
+ 'RUN npm install -g yarn',
87
+ // Install carlin CLI
88
+ 'RUN yarn global add carlin',
89
+ // Configure git
90
+ 'RUN git config --global user.name carlin',
91
+ 'RUN git config --global user.email carlin@ttoss.dev',
92
+ 'RUN mkdir /root/.ssh/',
93
+ 'COPY ./id_rsa /root/.ssh/id_rsa',
94
+ 'RUN chmod 600 /root/.ssh/id_rsa',
95
+ // Make sure your domain is accepted
96
+ 'RUN touch /root/.ssh/known_hosts',
97
+ 'RUN ssh-keyscan github.com >> /root/.ssh/known_hosts',
98
+ // Copy repository
99
+ 'COPY . /home',
100
+ // Go to repository directory
101
+ 'WORKDIR /home/repository',
102
+ // Set Yarn cache
103
+ 'RUN mkdir -p /home/yarn-cache',
104
+ 'RUN yarn config set cache-folder /home/yarn-cache',
105
+ 'RUN yarn install',
106
+ // Used in case of yarn.lock is modified.
107
+ 'RUN git checkout -- yarn.lock',
108
+ ].join('\n'),
109
+ },
110
+ },
111
+ {
112
+ Name: 'IMAGE_TAG',
113
+ Value: 'latest',
114
+ },
115
+ {
116
+ Name: 'REPOSITORY_ECR_REPOSITORY',
117
+ Value: { Ref: exports.ECR_REPOSITORY_LOGICAL_ID },
118
+ },
119
+ {
120
+ Name: 'SSH_KEY',
121
+ Value: { Ref: 'SSHKey' },
174
122
  },
175
- post_build: {
176
- commands: ['echo post_build completed on `date`'],
123
+ {
124
+ Name: 'SSH_URL',
125
+ Value: { Ref: 'SSHUrl' },
177
126
  },
127
+ ],
128
+ Image: 'aws/codebuild/standard:3.0',
129
+ ImagePullCredentialsType: 'CODEBUILD',
130
+ /**
131
+ * Enables running the Docker daemon inside a Docker container. Set to
132
+ * true only if the build project is used to build Docker images.
133
+ * Otherwise, a build that attempts to interact with the Docker daemon
134
+ * fails. The default setting is false."
135
+ * https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-codebuild-project-environment.html#cfn-codebuild-project-environment-privilegedmode
136
+ */
137
+ PrivilegedMode: true,
138
+ Type: 'LINUX_CONTAINER',
139
+ },
140
+ LogsConfig: {
141
+ CloudWatchLogs: {
142
+ Status: 'ENABLED',
143
+ GroupName: { Ref: exports.CODE_BUILD_PROJECT_LOGS_LOGICAL_ID },
178
144
  },
179
- }),
180
- Type: 'NO_SOURCE',
145
+ },
146
+ ServiceRole: {
147
+ 'Fn::GetAtt': [exports.CODE_BUILD_PROJECT_SERVICE_ROLE_LOGICAL_ID, 'Arn'],
148
+ },
149
+ Source: {
150
+ BuildSpec: js_yaml_1.default.dump({
151
+ version: '0.2',
152
+ phases: {
153
+ install: {
154
+ commands: [
155
+ 'echo install started on `date`',
156
+ `echo "$SSH_KEY" > ~/.ssh/id_rsa`,
157
+ 'chmod 600 ~/.ssh/id_rsa',
158
+ 'rm -rf repository',
159
+ 'git clone $SSH_URL repository',
160
+ 'cd repository',
161
+ 'ls',
162
+ ],
163
+ },
164
+ pre_build: {
165
+ commands: ['echo pre_build started on `date`'],
166
+ },
167
+ build: {
168
+ commands: [
169
+ 'echo build started on `date`',
170
+ '$(aws ecr get-login --no-include-email --region $AWS_REGION)',
171
+ 'echo Building the repository image...',
172
+ 'cd ../',
173
+ 'cp ~/.ssh/id_rsa .',
174
+ 'echo "$DOCKERFILE" > Dockerfile',
175
+ 'cat Dockerfile',
176
+ 'docker build -t $REPOSITORY_ECR_REPOSITORY:$IMAGE_TAG -f Dockerfile .',
177
+ 'docker tag $REPOSITORY_ECR_REPOSITORY:$IMAGE_TAG $AWS_ACCOUNT_ID.dkr.ecr.$AWS_REGION.amazonaws.com/$REPOSITORY_ECR_REPOSITORY:$IMAGE_TAG',
178
+ 'echo Pushing the repository image...',
179
+ 'docker push $AWS_ACCOUNT_ID.dkr.ecr.$AWS_REGION.amazonaws.com/$REPOSITORY_ECR_REPOSITORY:$IMAGE_TAG',
180
+ ],
181
+ },
182
+ post_build: {
183
+ commands: ['echo post_build completed on `date`'],
184
+ },
185
+ },
186
+ }),
187
+ Type: 'NO_SOURCE',
188
+ },
189
+ TimeoutInMinutes: 15,
181
190
  },
182
- TimeoutInMinutes: 15,
183
- },
184
- });
191
+ };
192
+ };
185
193
  exports.getRepositoryImageBuilder = getRepositoryImageBuilder;
186
194
  /**
187
195
  * This variable is used inside GitHub webhooks to identify the object key
@@ -227,29 +235,31 @@ const getCicdTemplate = ({ pipelines = [], cpu = config_2.ECS_TASK_DEFAULT_CPU,
227
235
  * with a defined expiration rule is also defined. The registry only keeps
228
236
  * the latest image.
229
237
  */
230
- const getEcrRepositoryResource = () => ({
231
- Type: 'AWS::ECR::Repository',
232
- Properties: {
233
- LifecyclePolicy: {
234
- LifecyclePolicyText: JSON.stringify({
235
- rules: [
236
- {
237
- rulePriority: 1,
238
- description: 'Only keep the latest image',
239
- selection: {
240
- tagStatus: 'any',
241
- countType: 'imageCountMoreThan',
242
- countNumber: 1,
243
- },
244
- action: {
245
- type: 'expire',
238
+ const getEcrRepositoryResource = () => {
239
+ return {
240
+ Type: 'AWS::ECR::Repository',
241
+ Properties: {
242
+ LifecyclePolicy: {
243
+ LifecyclePolicyText: JSON.stringify({
244
+ rules: [
245
+ {
246
+ rulePriority: 1,
247
+ description: 'Only keep the latest image',
248
+ selection: {
249
+ tagStatus: 'any',
250
+ countType: 'imageCountMoreThan',
251
+ countNumber: 1,
252
+ },
253
+ action: {
254
+ type: 'expire',
255
+ },
246
256
  },
247
- },
248
- ],
249
- }, null, 2),
257
+ ],
258
+ }, null, 2),
259
+ },
250
260
  },
251
- },
252
- });
261
+ };
262
+ };
253
263
  resources[exports.ECR_REPOSITORY_LOGICAL_ID] = getEcrRepositoryResource();
254
264
  const commonFunctionProperties = {
255
265
  CodeUri: {
@@ -260,7 +270,7 @@ const getCicdTemplate = ({ pipelines = [], cpu = config_2.ECS_TASK_DEFAULT_CPU,
260
270
  Role: {
261
271
  'Fn::GetAtt': [exports.FUNCTION_IAM_ROLE_LOGICAL_ID, 'Arn'],
262
272
  },
263
- Runtime: 'nodejs14.x',
273
+ Runtime: config_3.NODE_RUNTIME,
264
274
  Timeout: 60,
265
275
  };
266
276
  /**
@@ -633,10 +643,12 @@ const getCicdTemplate = ({ pipelines = [], cpu = config_2.ECS_TASK_DEFAULT_CPU,
633
643
  Name: 'CI',
634
644
  Value: 'true',
635
645
  },
636
- ...taskEnvironment.map((te) => ({
637
- Name: te.name,
638
- Value: te.value,
639
- })),
646
+ ...taskEnvironment.map((te) => {
647
+ return {
648
+ Name: te.name,
649
+ Value: te.value,
650
+ };
651
+ }),
640
652
  ],
641
653
  Image: {
642
654
  'Fn::Sub': [
@@ -717,7 +729,7 @@ const getCicdTemplate = ({ pipelines = [], cpu = config_2.ECS_TASK_DEFAULT_CPU,
717
729
  Role: {
718
730
  'Fn::GetAtt': [exports.FUNCTION_IAM_ROLE_LOGICAL_ID, 'Arn'],
719
731
  },
720
- Runtime: 'nodejs14.x',
732
+ Runtime: config_3.NODE_RUNTIME,
721
733
  Timeout: 60,
722
734
  },
723
735
  };
@@ -861,7 +873,9 @@ const getCicdTemplate = ({ pipelines = [], cpu = config_2.ECS_TASK_DEFAULT_CPU,
861
873
  FunctionName: {
862
874
  Ref: exports.PIPELINES_HANDLER_LAMBDA_FUNCTION_LOGICAL_ID,
863
875
  },
864
- UserParameters: (() => pipeline)(),
876
+ UserParameters: (() => {
877
+ return pipeline;
878
+ })(),
865
879
  },
866
880
  InputArtifacts: [
867
881
  {
@@ -12,7 +12,9 @@ const logPrefix = 'deploy-cicd';
12
12
  exports.deployCicdCommand = {
13
13
  command: 'cicd',
14
14
  describe: 'Deploy CICD.',
15
- builder: (yargs) => yargs.options((0, utils_1.addGroupToOptions)(command_options_1.options, 'Deploy CICD Options')),
15
+ builder: (yargs) => {
16
+ return yargs.options((0, utils_1.addGroupToOptions)(command_options_1.options, 'Deploy CICD Options'));
17
+ },
16
18
  handler: ({ destroy, ...rest }) => {
17
19
  if (destroy) {
18
20
  npmlog_1.default.info(logPrefix, `${config_1.NAME} doesn't destroy CICD stack.`);
@@ -14,7 +14,11 @@ exports.options = {
14
14
  },
15
15
  pipelines: {
16
16
  choices: pipelines_1.pipelines,
17
- coerce: (values) => values.map((value) => (0, change_case_1.camelCase)(value)),
17
+ coerce: (values) => {
18
+ return values.map((value) => {
19
+ return (0, change_case_1.camelCase)(value);
20
+ });
21
+ },
18
22
  default: [],
19
23
  description: 'Pipelines that will be implemented with the CICD stack.',
20
24
  type: 'array',
@@ -12,7 +12,9 @@ const utils_1 = require("../utils");
12
12
  const utils_2 = require("../../utils");
13
13
  const npmlog_1 = tslib_1.__importDefault(require("npmlog"));
14
14
  const logPrefix = 'cicd';
15
- const getLambdaInput = (extension) => path.resolve(__dirname, `lambdas/index.${extension}`);
15
+ const getLambdaInput = (extension) => {
16
+ return path.resolve(__dirname, `lambdas/index.${extension}`);
17
+ };
16
18
  exports.getLambdaInput = getLambdaInput;
17
19
  const deployCicdLambdas = async ({ stackName }) => {
18
20
  const lambdaInput = (() => {
@@ -44,7 +44,9 @@ const options = {
44
44
  exports.ecsTaskReportCommand = {
45
45
  command: 'cicd-ecs-task-report',
46
46
  describe: false,
47
- builder: (yargs) => yargs.options(options),
47
+ builder: (yargs) => {
48
+ return yargs.options(options);
49
+ },
48
50
  handler: async (args) => {
49
51
  return sendEcsTaskReport(args);
50
52
  },
@@ -1,9 +1,9 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.getCicdStackName = void 0;
4
- const change_case_1 = require("change-case");
5
4
  const config_1 = require("../../config");
6
5
  const getProjectName_1 = require("../../utils/getProjectName");
6
+ const change_case_1 = require("change-case");
7
7
  const getCicdStackName = () => {
8
8
  const project = (0, getProjectName_1.getProjectName)();
9
9
  return (0, change_case_1.pascalCase)([config_1.NAME, 'Cicd', project].join(' '));
@@ -14,7 +14,9 @@ const getUserParameters = (event) => {
14
14
  const [pipeline, stage] = event['CodePipeline.job'].data.actionConfiguration.configuration.UserParameters.split('&');
15
15
  return { pipeline: pipeline, stage };
16
16
  };
17
- const getJobDetailsFilename = (jobId) => `/tmp/${jobId}.zip`;
17
+ const getJobDetailsFilename = (jobId) => {
18
+ return `/tmp/${jobId}.zip`;
19
+ };
18
20
  exports.getJobDetailsFilename = getJobDetailsFilename;
19
21
  const getJobDetails = async (event) => {
20
22
  const jobId = event['CodePipeline.job'].id;
@@ -3,74 +3,84 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.getTagCommands = exports.getMainCommands = exports.getClosedPrCommands = exports.getPrCommands = exports.getCommandFileDir = exports.pipelines = void 0;
4
4
  const config_1 = require("./config");
5
5
  exports.pipelines = ['pr', 'main', 'tag'];
6
- const getCommandFileDir = (pipeline) => `./${config_1.CICD_FOLDER_NAME}/commands/${pipeline}.sh`;
6
+ const getCommandFileDir = (pipeline) => {
7
+ return `./${config_1.CICD_FOLDER_NAME}/commands/${pipeline}.sh`;
8
+ };
7
9
  exports.getCommandFileDir = getCommandFileDir;
8
- const getPrCommands = ({ branch }) => [
9
- /**
10
- * -e Exit immediately if a command exits with a non-zero status.
11
- */
12
- 'set -e',
13
- 'git status',
14
- 'git fetch',
15
- /**
16
- * Update to the most recent main branch to Lerna performs the diff properly.
17
- */
18
- 'git pull origin main',
19
- `git checkout ${branch} || (echo 'branch not found, probably deleted'; exit 0)`,
20
- `git pull origin ${branch}`,
21
- 'git rev-parse HEAD',
22
- 'git status',
23
- 'yarn install --frozen-lockfile',
24
- `sh -e ${(0, exports.getCommandFileDir)('pr')}`,
25
- ];
10
+ const getPrCommands = ({ branch }) => {
11
+ return [
12
+ /**
13
+ * -e Exit immediately if a command exits with a non-zero status.
14
+ */
15
+ 'set -e',
16
+ 'git status',
17
+ 'git fetch',
18
+ /**
19
+ * Update to the most recent main branch to Lerna performs the diff properly.
20
+ */
21
+ 'git pull origin main',
22
+ `git checkout ${branch} || (echo 'branch not found, probably deleted'; exit 0)`,
23
+ `git pull origin ${branch}`,
24
+ 'git rev-parse HEAD',
25
+ 'git status',
26
+ 'yarn install --frozen-lockfile',
27
+ `sh -e ${(0, exports.getCommandFileDir)('pr')}`,
28
+ ];
29
+ };
26
30
  exports.getPrCommands = getPrCommands;
27
- const getClosedPrCommands = ({ branch }) => [
28
- 'git status',
29
- 'git fetch',
30
- /**
31
- * Get the most recent main because the PR was approved.
32
- */
33
- 'git pull origin main',
34
- 'git rev-parse HEAD',
35
- `export CARLIN_BRANCH=${branch}`,
36
- /**
37
- * Exit without error if `closed-pr` does not exist.
38
- */
39
- `[ ! -f "${(0, exports.getCommandFileDir)('closed-pr')}" ] && echo 'closed-pr command not found' || sh ${(0, exports.getCommandFileDir)('closed-pr')}`,
40
- ];
31
+ const getClosedPrCommands = ({ branch }) => {
32
+ return [
33
+ 'git status',
34
+ 'git fetch',
35
+ /**
36
+ * Get the most recent main because the PR was approved.
37
+ */
38
+ 'git pull origin main',
39
+ 'git rev-parse HEAD',
40
+ `export CARLIN_BRANCH=${branch}`,
41
+ /**
42
+ * Exit without error if `closed-pr` does not exist.
43
+ */
44
+ `[ ! -f "${(0, exports.getCommandFileDir)('closed-pr')}" ] && echo 'closed-pr command not found' || sh ${(0, exports.getCommandFileDir)('closed-pr')}`,
45
+ ];
46
+ };
41
47
  exports.getClosedPrCommands = getClosedPrCommands;
42
- const getMainCommands = () => [
43
- /**
44
- * -e Exit immediately if a command exits with a non-zero status.
45
- */
46
- 'set -e',
47
- `export CARLIN_ENVIRONMENT=Staging`,
48
- 'git status',
49
- 'git fetch',
50
- 'git pull origin main',
51
- 'git rev-parse HEAD',
52
- /**
53
- * Reporting `MainTagFound` before exiting the process. This command blocks
54
- * the process if tag was found. If we don't do this, the loop would never
55
- * end because `main` command can create a tag, that would trigger this
56
- * pipeline again.
57
- */
58
- 'if git describe --exact-match; then echo "Tag found" && carlin cicd-ecs-task-report --status=MainTagFound && exit 0; fi',
59
- 'yarn install --frozen-lockfile',
60
- `sh -e ${(0, exports.getCommandFileDir)('main')}`,
61
- ];
48
+ const getMainCommands = () => {
49
+ return [
50
+ /**
51
+ * -e Exit immediately if a command exits with a non-zero status.
52
+ */
53
+ 'set -e',
54
+ `export CARLIN_ENVIRONMENT=Staging`,
55
+ 'git status',
56
+ 'git fetch',
57
+ 'git pull origin main',
58
+ 'git rev-parse HEAD',
59
+ /**
60
+ * Reporting `MainTagFound` before exiting the process. This command blocks
61
+ * the process if tag was found. If we don't do this, the loop would never
62
+ * end because `main` command can create a tag, that would trigger this
63
+ * pipeline again.
64
+ */
65
+ 'if git describe --exact-match; then echo "Tag found" && carlin cicd-ecs-task-report --status=MainTagFound && exit 0; fi',
66
+ 'yarn install --frozen-lockfile',
67
+ `sh -e ${(0, exports.getCommandFileDir)('main')}`,
68
+ ];
69
+ };
62
70
  exports.getMainCommands = getMainCommands;
63
- const getTagCommands = ({ tag }) => [
64
- /**
65
- * -e Exit immediately if a command exits with a non-zero status.
66
- */
67
- 'set -e',
68
- `export CARLIN_ENVIRONMENT=Production`,
69
- 'git status',
70
- 'git fetch --tags',
71
- `git checkout tags/${tag} -b ${tag}-branch`,
72
- 'git rev-parse HEAD',
73
- 'yarn install --frozen-lockfile',
74
- `sh -e ${(0, exports.getCommandFileDir)('tag')}`,
75
- ];
71
+ const getTagCommands = ({ tag }) => {
72
+ return [
73
+ /**
74
+ * -e Exit immediately if a command exits with a non-zero status.
75
+ */
76
+ 'set -e',
77
+ `export CARLIN_ENVIRONMENT=Production`,
78
+ 'git status',
79
+ 'git fetch --tags',
80
+ `git checkout tags/${tag} -b ${tag}-branch`,
81
+ 'git rev-parse HEAD',
82
+ 'yarn install --frozen-lockfile',
83
+ `sh -e ${(0, exports.getCommandFileDir)('tag')}`,
84
+ ];
85
+ };
76
86
  exports.getTagCommands = getTagCommands;
@@ -6,5 +6,7 @@ const fs = tslib_1.__importStar(require("fs"));
6
6
  /**
7
7
  * Created to allow mocking.
8
8
  */
9
- const readSSHKey = (dir) => fs.readFileSync(dir, 'utf-8');
9
+ const readSSHKey = (dir) => {
10
+ return fs.readFileSync(dir, 'utf-8');
11
+ };
10
12
  exports.readSSHKey = readSSHKey;
@@ -18,7 +18,10 @@ npmlog_1.default.addLevel('output', 10000, { fg: 'blue' });
18
18
  * https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/cloudformation-limits.html
19
19
  */
20
20
  const TEMPLATE_BODY_MAX_SIZE = 51200;
21
- const isTemplateBodyGreaterThanMaxSize = (template) => Buffer.byteLength(JSON.stringify(template), 'utf8') >= TEMPLATE_BODY_MAX_SIZE;
21
+ const isTemplateBodyGreaterThanMaxSize = (template) => {
22
+ return (Buffer.byteLength(JSON.stringify(template), 'utf8') >=
23
+ TEMPLATE_BODY_MAX_SIZE);
24
+ };
22
25
  /**
23
26
  * Update CloudFormation template to base stack bucket.
24
27
  * @param input.stackName: CloudFormation stack name.
@@ -78,13 +81,19 @@ const describeStackEvents = async ({ stackName, }) => {
78
81
  npmlog_1.default.error(logPrefix, 'Stack events:');
79
82
  const { StackEvents } = await (0, exports.cloudFormation)().send(new client_cloudformation_1.DescribeStackEventsCommand({ StackName: stackName }));
80
83
  const events = (StackEvents || [])
81
- .filter(({ Timestamp }) => Date.now() - Number(Timestamp) < 10 * 60 * 1000)
82
- .filter(({ ResourceStatusReason }) => ResourceStatusReason)
84
+ .filter(({ Timestamp }) => {
85
+ return Date.now() - Number(Timestamp) < 10 * 60 * 1000;
86
+ })
87
+ .filter(({ ResourceStatusReason }) => {
88
+ return ResourceStatusReason;
89
+ })
83
90
  /**
84
91
  * Show newer events last.
85
92
  */
86
93
  .reverse();
87
- events.forEach(({ LogicalResourceId, ResourceStatusReason }) => npmlog_1.default.event(LogicalResourceId, ResourceStatusReason));
94
+ events.forEach(({ LogicalResourceId, ResourceStatusReason }) => {
95
+ return npmlog_1.default.event(LogicalResourceId, ResourceStatusReason);
96
+ });
88
97
  return events;
89
98
  };
90
99
  exports.describeStackEvents = describeStackEvents;
@@ -98,7 +107,9 @@ const describeStack = async ({ stackName }) => {
98
107
  exports.describeStack = describeStack;
99
108
  const getStackOutput = async ({ stackName, outputKey, }) => {
100
109
  const { Outputs = [] } = await (0, exports.describeStack)({ stackName });
101
- const output = Outputs === null || Outputs === void 0 ? void 0 : Outputs.find(({ OutputKey }) => OutputKey === outputKey);
110
+ const output = Outputs === null || Outputs === void 0 ? void 0 : Outputs.find(({ OutputKey }) => {
111
+ return OutputKey === outputKey;
112
+ });
102
113
  if (!output) {
103
114
  throw new Error(`Output ${outputKey} doesn't exist on ${stackName} stack`);
104
115
  }
@@ -202,7 +213,9 @@ const enableTerminationProtection = async ({ stackName, }) => {
202
213
  }
203
214
  };
204
215
  exports.enableTerminationProtection = enableTerminationProtection;
205
- exports.defaultTemplatePaths = ['ts', 'js', 'yaml', 'yml', 'json'].map((extension) => `src/cloudformation.${extension}`);
216
+ exports.defaultTemplatePaths = ['ts', 'js', 'yaml', 'yml', 'json'].map((extension) => {
217
+ return `src/cloudformation.${extension}`;
218
+ });
206
219
  /**
207
220
  * 1. Add defaults to CloudFormation template and parameters.
208
221
  * 1. Check is CloudFormation template body is greater than max size limit.
@@ -257,13 +270,13 @@ const canDestroyStack = async ({ stackName }) => {
257
270
  exports.canDestroyStack = canDestroyStack;
258
271
  const emptyStackBuckets = async ({ stackName }) => {
259
272
  const buckets = [];
260
- await (async function getBuckets({ nextToken }) {
273
+ await (async ({ nextToken }) => {
261
274
  const { NextToken, StackResourceSummaries } = await (0, exports.cloudFormation)().send(new client_cloudformation_1.ListStackResourcesCommand({
262
275
  StackName: stackName,
263
276
  NextToken: nextToken,
264
277
  }));
265
278
  if (NextToken) {
266
- await getBuckets({ nextToken: NextToken });
279
+ // await getBuckets({ nextToken: NextToken });
267
280
  }
268
281
  (StackResourceSummaries || []).forEach(({ ResourceType, PhysicalResourceId }) => {
269
282
  if (ResourceType === 'AWS::S3::Bucket' && PhysicalResourceId) {
@@ -271,7 +284,9 @@ const emptyStackBuckets = async ({ stackName }) => {
271
284
  }
272
285
  });
273
286
  })({});
274
- return Promise.all(buckets.map((bucket) => (0, s3_1.emptyS3Directory)({ bucket })));
287
+ return Promise.all(buckets.map((bucket) => {
288
+ return (0, s3_1.emptyS3Directory)({ bucket });
289
+ }));
275
290
  };
276
291
  /**
277
292
  * 1. Check if `environment` is defined. If defined, return. It doesn't destroy
@@ -14,7 +14,9 @@ const path_1 = tslib_1.__importDefault(require("path"));
14
14
  const logPrefix = 'cloudformation';
15
15
  npmlog_1.default.addLevel('event', 10000, { fg: 'yellow' });
16
16
  npmlog_1.default.addLevel('output', 10000, { fg: 'blue' });
17
- exports.defaultTemplatePaths = ['ts', 'js', 'yaml', 'yml', 'json'].map((extension) => `./src/cloudformation.${extension}`);
17
+ exports.defaultTemplatePaths = ['ts', 'js', 'yaml', 'yml', 'json'].map((extension) => {
18
+ return `./src/cloudformation.${extension}`;
19
+ });
18
20
  const findAndReadCloudFormationTemplate = ({ templatePath: defaultTemplatePath, }) => {
19
21
  const templatePath = defaultTemplatePath ||
20
22
  exports.defaultTemplatePaths
@@ -129,20 +131,24 @@ const deployCloudFormation = async ({ lambdaDockerfile, lambdaInput, lambdaImage
129
131
  exports.deployCloudFormation = deployCloudFormation;
130
132
  const emptyStackBuckets = async ({ stackName }) => {
131
133
  const buckets = [];
132
- await (async function getBuckets({ nextToken }) {
133
- const { NextToken, StackResourceSummaries } = await (0, cloudFormation_core_1.cloudFormationV2)()
134
+ await (async ({ nextToken }) => {
135
+ const {
136
+ // NextToken,
137
+ StackResourceSummaries, } = await (0, cloudFormation_core_1.cloudFormationV2)()
134
138
  .listStackResources({ StackName: stackName, NextToken: nextToken })
135
139
  .promise();
136
- if (NextToken) {
137
- await getBuckets({ nextToken: NextToken });
138
- }
140
+ // if (NextToken) {
141
+ // await getBuckets({ nextToken: NextToken });
142
+ // }
139
143
  (StackResourceSummaries || []).forEach(({ ResourceType, PhysicalResourceId }) => {
140
144
  if (ResourceType === 'AWS::S3::Bucket' && PhysicalResourceId) {
141
145
  buckets.push(PhysicalResourceId);
142
146
  }
143
147
  });
144
148
  })({});
145
- return Promise.all(buckets.map((bucket) => (0, s3_1.emptyS3Directory)({ bucket })));
149
+ return Promise.all(buckets.map((bucket) => {
150
+ return (0, s3_1.emptyS3Directory)({ bucket });
151
+ }));
146
152
  };
147
153
  /**
148
154
  * 1. Check if `environment` is defined. If defined, return. It doesn't destroy
@@ -49,7 +49,9 @@ exports.options = {
49
49
  type: 'boolean',
50
50
  },
51
51
  'lambda-dockerfile': {
52
- coerce: (arg) => (0, readDockerfile_1.readDockerfile)(arg),
52
+ coerce: (arg) => {
53
+ return (0, readDockerfile_1.readDockerfile)(arg);
54
+ },
53
55
  default: 'Dockerfile',
54
56
  describe: 'Instructions to create the Lambda image.',
55
57
  type: 'string',
@@ -167,11 +169,15 @@ exports.deployCommand = {
167
169
  command_2.deployCicdCommand,
168
170
  ];
169
171
  yargsBuilder.positional('deploy', {
170
- choices: commands.map(({ command }) => command),
172
+ choices: commands.map(({ command }) => {
173
+ return command;
174
+ }),
171
175
  describe: 'Type of deployment.',
172
176
  type: 'string',
173
177
  });
174
- commands.forEach((command) => yargsBuilder.command(command));
178
+ commands.forEach((command) => {
179
+ return yargsBuilder.command(command);
180
+ });
175
181
  return yargsBuilder;
176
182
  },
177
183
  handler: ({ destroy, ...rest }) => {
@@ -13,14 +13,14 @@ const buildLambdaSingleFile = async ({ lambdaExternals, lambdaInput, }) => {
13
13
  npmlog_1.default.info(logPrefix, 'Building Lambda single file...');
14
14
  const { errors } = esbuild.buildSync({
15
15
  banner: {
16
- js: '// Powered by carlin (https://ttoss.dev)',
16
+ js: '// Powered by carlin (https://ttoss.dev/docs/carlin/)',
17
17
  },
18
18
  bundle: true,
19
19
  entryPoints: [path_1.default.resolve(process.cwd(), lambdaInput)],
20
20
  external: ['aws-sdk', ...builtin_modules_1.default, ...lambdaExternals],
21
21
  platform: 'node',
22
22
  outfile: path_1.default.join(process.cwd(), outFolder, outFile),
23
- target: 'node12',
23
+ target: 'node18',
24
24
  treeShaking: true,
25
25
  });
26
26
  if (errors.length > 0) {
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.deployLambdaLayer = exports.getPackageLambdaLayerStackName = exports.getLambdaLayerTemplate = void 0;
3
+ exports.deployLambdaLayer = exports.getPackageLambdaLayerStackName = exports.lambdaLayerStackNamePrefix = exports.getLambdaLayerTemplate = void 0;
4
4
  const tslib_1 = require("tslib");
5
5
  const utils_1 = require("../../utils");
6
6
  const aws_sdk_1 = require("aws-sdk");
@@ -62,7 +62,7 @@ const getLambdaLayerTemplate = ({ bucket, key, packageName, }) => {
62
62
  LambdaLayer: {
63
63
  Type: 'AWS::Lambda::LayerVersion',
64
64
  Properties: {
65
- CompatibleRuntimes: ['nodejs12.x', 'nodejs14.x'],
65
+ CompatibleRuntimes: [config_1.NODE_RUNTIME],
66
66
  Content: {
67
67
  S3Bucket: bucket,
68
68
  S3Key: key,
@@ -84,19 +84,25 @@ const getLambdaLayerTemplate = ({ bucket, key, packageName, }) => {
84
84
  };
85
85
  };
86
86
  exports.getLambdaLayerTemplate = getLambdaLayerTemplate;
87
- /**
88
- * The stack name is given by `CarlinLambdaLayer` prefix and the package name with
89
- * `@` and `/` removed and `.` replace by the word `dot`.
90
- */
87
+ exports.lambdaLayerStackNamePrefix = `LambdaLayer`;
91
88
  const getPackageLambdaLayerStackName = (packageName) => {
92
- return (0, change_case_1.pascalCase)(`${config_1.NAME} LambdaLayer ${packageName.replace(/\./g, 'dot')}`).replace(/_/g, '');
89
+ const [scopedName, version] = packageName.split('@').filter((part) => {
90
+ return part !== '';
91
+ });
92
+ return [
93
+ exports.lambdaLayerStackNamePrefix,
94
+ (0, change_case_1.pascalCase)(scopedName),
95
+ version.replace(/\./g, '-'),
96
+ ].join('-');
93
97
  };
94
98
  exports.getPackageLambdaLayerStackName = getPackageLambdaLayerStackName;
95
99
  const getPackagesThatAreNotDeployed = async ({ packages, }) => {
96
100
  return (await Promise.all(packages.map(async (packageName) => {
97
101
  const stackName = (0, exports.getPackageLambdaLayerStackName)(packageName);
98
102
  return (await (0, cloudFormation_core_1.doesStackExist)({ stackName })) ? '' : packageName;
99
- }))).filter((packageName) => !!packageName);
103
+ }))).filter((packageName) => {
104
+ return !!packageName;
105
+ });
100
106
  };
101
107
  const deployLambdaLayer = async ({ packages, deployIfExists = true, }) => {
102
108
  try {
@@ -131,7 +137,9 @@ const deployLambdaLayer = async ({ packages, deployIfExists = true, }) => {
131
137
  (0, utils_2.handleDeployError)({ error, logPrefix });
132
138
  }
133
139
  };
134
- await Promise.all(packagesToBeDeployed.map((packageName) => deployLambdaLayerSinglePackage(packageName)));
140
+ await Promise.all(packagesToBeDeployed.map((packageName) => {
141
+ return deployLambdaLayerSinglePackage(packageName);
142
+ }));
135
143
  }
136
144
  catch (error) {
137
145
  (0, utils_2.handleDeployError)({ error, logPrefix });
package/dist/deploy/s3.js CHANGED
@@ -58,7 +58,9 @@ const getAllFilesInsideADirectory = async ({ directory, }) => {
58
58
  /**
59
59
  * Remove directories.
60
60
  */
61
- .filter((item) => fs_1.default.lstatSync(item).isFile());
61
+ .filter((item) => {
62
+ return fs_1.default.lstatSync(item).isFile();
63
+ });
62
64
  return allFiles;
63
65
  };
64
66
  exports.getAllFilesInsideADirectory = getAllFilesInsideADirectory;
@@ -88,11 +90,13 @@ const uploadDirectoryToS3 = async ({ bucket, bucketKey = '', directory, }) => {
88
90
  }, []);
89
91
  for (const [index, groupOfFiles] of aoaOfFiles.entries()) {
90
92
  npmlog_1.default.info(logPrefix, `Uploading group ${index + 1}/${aoaOfFiles.length}...`);
91
- await Promise.all(groupOfFiles.map((file) => (0, exports.uploadFileToS3)({
92
- bucket,
93
- key: path_1.default.join(bucketKey, path_1.default.relative(directory, file)),
94
- filePath: file,
95
- })));
93
+ await Promise.all(groupOfFiles.map((file) => {
94
+ return (0, exports.uploadFileToS3)({
95
+ bucket,
96
+ key: path_1.default.join(bucketKey, path_1.default.relative(directory, file)),
97
+ filePath: file,
98
+ });
99
+ }));
96
100
  }
97
101
  };
98
102
  exports.uploadDirectoryToS3 = uploadDirectoryToS3;
@@ -109,7 +113,9 @@ const emptyS3Directory = async ({ bucket, directory = '', }) => {
109
113
  /**
110
114
  * Get object versions
111
115
  */
112
- const objectsPromises = Contents.filter(({ Key }) => !!Key).map(async ({ Key }) => {
116
+ const objectsPromises = Contents.filter(({ Key }) => {
117
+ return !!Key;
118
+ }).map(async ({ Key }) => {
113
119
  const { Versions = [] } = await exports.s3
114
120
  .listObjectVersions({
115
121
  Bucket: bucket,
@@ -118,15 +124,19 @@ const emptyS3Directory = async ({ bucket, directory = '', }) => {
118
124
  .promise();
119
125
  return {
120
126
  Key: Key,
121
- Versions: Versions.map(({ VersionId }) => VersionId || undefined),
127
+ Versions: Versions.map(({ VersionId }) => {
128
+ return VersionId || undefined;
129
+ }),
122
130
  };
123
131
  });
124
132
  const objects = await Promise.all(objectsPromises);
125
133
  const objectsWithVersionsIds = objects.reduce((acc, { Key, Versions }) => {
126
- const objectWithVersionsIds = Versions.map((VersionId) => ({
127
- Key,
128
- VersionId,
129
- }));
134
+ const objectWithVersionsIds = Versions.map((VersionId) => {
135
+ return {
136
+ Key,
137
+ VersionId,
138
+ };
139
+ });
130
140
  return [...acc, ...objectWithVersionsIds];
131
141
  }, []);
132
142
  await exports.s3
@@ -1,8 +1,8 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.getStackName = exports.limitStackName = exports.STACK_NAME_MAX_LENGTH = exports.setPreDefinedStackName = void 0;
4
- const change_case_1 = require("change-case");
5
4
  const utils_1 = require("../utils");
5
+ const change_case_1 = require("change-case");
6
6
  /**
7
7
  * Used by CLI set stack name when it is defined.
8
8
  */
@@ -14,7 +14,9 @@ exports.setPreDefinedStackName = setPreDefinedStackName;
14
14
  * https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/cloudformation-limits.html
15
15
  */
16
16
  exports.STACK_NAME_MAX_LENGTH = 128;
17
- const limitStackName = (stackName) => `${stackName}`.substring(0, exports.STACK_NAME_MAX_LENGTH);
17
+ const limitStackName = (stackName) => {
18
+ return `${stackName}`.substring(0, exports.STACK_NAME_MAX_LENGTH);
19
+ };
18
20
  exports.limitStackName = limitStackName;
19
21
  /**
20
22
  * If stack name isn't previously defined, the name will be created accordingly
@@ -72,7 +74,11 @@ const getStackName = async () => {
72
74
  }
73
75
  return undefined;
74
76
  })();
75
- const name = [firstName, secondName].filter((word) => !!word).join('-');
77
+ const name = [firstName, secondName]
78
+ .filter((word) => {
79
+ return !!word;
80
+ })
81
+ .join('-');
76
82
  return (0, exports.limitStackName)(name);
77
83
  };
78
84
  exports.getStackName = getStackName;
@@ -104,12 +104,18 @@ const cloudFormationTypes = [
104
104
  },
105
105
  },
106
106
  ];
107
- const getYamlTypes = (tagAndTypeArr) => tagAndTypeArr.map(({ tag, options }) => new js_yaml_1.default.Type(tag, options));
107
+ const getYamlTypes = (tagAndTypeArr) => {
108
+ return tagAndTypeArr.map(({ tag, options }) => {
109
+ return new js_yaml_1.default.Type(tag, options);
110
+ });
111
+ };
108
112
  /**
109
113
  * Transform CloudFormation directives in objects. For example, transform
110
114
  * !Ref Something in { Ref: Something }.
111
115
  */
112
- const getSchema = (tagAndTypeArr = []) => js_yaml_1.default.DEFAULT_SCHEMA.extend(getYamlTypes([...tagAndTypeArr, ...cloudFormationTypes]));
116
+ const getSchema = (tagAndTypeArr = []) => {
117
+ return js_yaml_1.default.DEFAULT_SCHEMA.extend(getYamlTypes([...tagAndTypeArr, ...cloudFormationTypes]));
118
+ };
113
119
  exports.getSchema = getSchema;
114
120
  /**
115
121
  * Transform a JSON in a YAML string.
@@ -117,7 +123,9 @@ exports.getSchema = getSchema;
117
123
  * @param cloudFormationTemplate JSON CloudFormation template
118
124
  * @returns YAML as string
119
125
  */
120
- const dumpToYamlCloudFormationTemplate = (cloudFormationTemplate) => js_yaml_1.default.dump(cloudFormationTemplate, { schema: (0, exports.getSchema)() });
126
+ const dumpToYamlCloudFormationTemplate = (cloudFormationTemplate) => {
127
+ return js_yaml_1.default.dump(cloudFormationTemplate, { schema: (0, exports.getSchema)() });
128
+ };
121
129
  exports.dumpToYamlCloudFormationTemplate = dumpToYamlCloudFormationTemplate;
122
130
  /**
123
131
  * Transform YAML string in JSON object.
@@ -2,10 +2,10 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.getPackageVersion = exports.getPackageName = void 0;
4
4
  const tslib_1 = require("tslib");
5
- const find_up_1 = tslib_1.__importDefault(require("find-up"));
5
+ const findup_sync_1 = tslib_1.__importDefault(require("findup-sync"));
6
6
  const fs_1 = tslib_1.__importDefault(require("fs"));
7
7
  const readPackageJson = () => {
8
- const packageJsonDir = find_up_1.default.sync('package.json');
8
+ const packageJsonDir = (0, findup_sync_1.default)('package.json');
9
9
  if (!packageJsonDir) {
10
10
  return {};
11
11
  }
@@ -19,7 +19,11 @@ const getPackageJsonProperty = ({ property }) => {
19
19
  return '';
20
20
  }
21
21
  };
22
- const getPackageName = () => getPackageJsonProperty({ property: 'name' });
22
+ const getPackageName = () => {
23
+ return getPackageJsonProperty({ property: 'name' });
24
+ };
23
25
  exports.getPackageName = getPackageName;
24
- const getPackageVersion = () => getPackageJsonProperty({ property: 'version' });
26
+ const getPackageVersion = () => {
27
+ return getPackageJsonProperty({ property: 'version' });
28
+ };
25
29
  exports.getPackageVersion = getPackageVersion;
@@ -5,19 +5,21 @@ const tslib_1 = require("tslib");
5
5
  const fs = tslib_1.__importStar(require("fs"));
6
6
  const path = tslib_1.__importStar(require("path"));
7
7
  const cloudFormationTemplate_1 = require("./cloudFormationTemplate");
8
- const getTypes = () => [
9
- {
10
- tag: `!SubString`,
11
- options: {
12
- kind: 'scalar',
13
- construct: (filePath) => {
14
- return fs
15
- .readFileSync(path.resolve(process.cwd(), filePath))
16
- .toString();
8
+ const getTypes = () => {
9
+ return [
10
+ {
11
+ tag: `!SubString`,
12
+ options: {
13
+ kind: 'scalar',
14
+ construct: (filePath) => {
15
+ return fs
16
+ .readFileSync(path.resolve(process.cwd(), filePath))
17
+ .toString();
18
+ },
17
19
  },
18
20
  },
19
- },
20
- ];
21
+ ];
22
+ };
21
23
  /**
22
24
  * CloudFormation
23
25
  * @param param0
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "carlin",
3
- "version": "1.19.14",
3
+ "version": "1.20.0",
4
4
  "description": "",
5
5
  "license": "GPL-3.0",
6
6
  "author": "Pedro Arantes <arantespp@gmail.com> (https://twitter.com/arantespp)",
@@ -26,50 +26,49 @@
26
26
  "test": "jest"
27
27
  },
28
28
  "dependencies": {
29
- "@aws-sdk/client-cloudformation": "^3.85.0",
30
- "@octokit/webhooks": "^9.24.0",
29
+ "@aws-sdk/client-cloudformation": "^3.226.0",
30
+ "@octokit/webhooks": "^10.3.1",
31
31
  "@slack/webhook": "^6.1.0",
32
32
  "adm-zip": "^0.5.9",
33
- "aws-sdk": "^2.1130.0",
34
- "builtin-modules": "^3.2.0",
33
+ "aws-sdk": "^2.1270.0",
34
+ "builtin-modules": "^3.3.0",
35
35
  "change-case": "^4.1.2",
36
- "deep-equal": "^2.0.5",
36
+ "deep-equal": "^2.1.0",
37
37
  "deepmerge": "^4.2.2",
38
- "dotenv": "^16.0.0",
39
- "esbuild": "^0.14.38",
40
- "find-up": "^5.0.0",
41
- "glob": "^8.0.1",
38
+ "dotenv": "^16.0.3",
39
+ "esbuild": "^0.16.2",
40
+ "findup-sync": "^5.0.0",
41
+ "glob": "^8.0.3",
42
42
  "js-yaml": "^4.1.0",
43
43
  "mime-types": "^2.1.35",
44
- "npmlog": "^6.0.2",
45
- "prettier": "^2.6.2",
46
- "semver": "^7.3.7",
47
- "simple-git": "^3.7.1",
48
- "ts-node": "^10.8.1",
49
- "uglify-js": "^3.15.4",
50
- "yargs": "^17.4.1"
44
+ "npmlog": "^7.0.1",
45
+ "prettier": "^2.8.1",
46
+ "semver": "^7.3.8",
47
+ "simple-git": "^3.15.1",
48
+ "ts-node": "^10.9.1",
49
+ "uglify-js": "^3.17.4",
50
+ "yargs": "^17.6.2"
51
51
  },
52
52
  "devDependencies": {
53
- "@ttoss/test-utils": "^1.18.0",
53
+ "@ttoss/test-utils": "^1.18.1",
54
54
  "@types/adm-zip": "^0.5.0",
55
- "@types/aws-lambda": "^8.10.95",
55
+ "@types/aws-lambda": "^8.10.109",
56
56
  "@types/deep-equal": "^1.0.1",
57
- "@types/glob": "^7.2.0",
57
+ "@types/findup-sync": "^4.0.2",
58
+ "@types/glob": "^8.0.0",
59
+ "@types/jest": "^29.2.4",
58
60
  "@types/js-yaml": "^4.0.5",
59
61
  "@types/mime-types": "^2.1.1",
60
- "@types/node": "^17.0.31",
62
+ "@types/node": "^18.11.11",
61
63
  "@types/npmlog": "^4.1.4",
62
- "@types/semver": "^7.3.9",
63
- "@types/uglify-js": "^3.13.2",
64
- "@types/yargs": "^17.0.10",
65
- "jest": "^28.1.1"
64
+ "@types/semver": "^7.3.13",
65
+ "@types/uglify-js": "^3.17.1",
66
+ "@types/yargs": "^17.0.17",
67
+ "jest": "^29.3.1"
66
68
  },
67
69
  "keywords": [],
68
- "engines": {
69
- "node": ">=14.0.0"
70
- },
71
70
  "publishConfig": {
72
71
  "registry": "https://registry.npmjs.org/"
73
72
  },
74
- "gitHead": "f1199aa45751844818bf397c6f6c8597803018a4"
73
+ "gitHead": "7283c8bffc1b432659339927173eee2ad19b622d"
75
74
  }