carlin 1.40.1 → 1.41.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.js +155 -120
  2. package/package.json +10 -7
package/dist/index.js CHANGED
@@ -15,6 +15,8 @@ import deepmerge from 'deepmerge';
15
15
  import * as path from 'path';
16
16
  import path__default from 'path';
17
17
  import { ValidateTemplateCommand, DeleteStackCommand, CloudFormationClient, UpdateStackCommand, CreateStackCommand, UpdateTerminationProtectionCommand, DescribeStacksCommand, DescribeStackEventsCommand, DescribeStackResourceCommand } from '@aws-sdk/client-cloudformation';
18
+ import { ListObjectsV2Command, ListObjectVersionsCommand, DeleteObjectsCommand, HeadObjectCommand, CopyObjectCommand, S3Client } from '@aws-sdk/client-s3';
19
+ import { Upload } from '@aws-sdk/lib-storage';
18
20
  import { glob } from 'glob';
19
21
  import mime from 'mime-types';
20
22
  import { hideBin } from 'yargs/helpers';
@@ -24,7 +26,6 @@ import { typescriptConfig } from '@ttoss/config';
24
26
  import * as esbuild from 'esbuild';
25
27
  import AdmZip from 'adm-zip';
26
28
  import importSync from 'import-sync';
27
- import semver from 'semver';
28
29
  import deepEqual from 'deep-equal';
29
30
 
30
31
  var __defProp = Object.defineProperty;
@@ -434,9 +435,17 @@ var getBaseStackResource = /* @__PURE__ */ __name(async (resource) => {
434
435
  // src/deploy/config.ts
435
436
  var LATEST_DEPLOY_OUTPUTS_FILENAME = "latest-deploy.json";
436
437
  var logPrefix2 = "s3";
437
- var s3 = new AWS.S3({
438
- apiVersion: "2006-03-01"
439
- });
438
+ var s3Clients = {};
439
+ var s3 = /* @__PURE__ */ __name(() => {
440
+ const s3ClientConfig = {
441
+ region: getEnvVar("REGION")
442
+ };
443
+ const key = JSON.stringify(s3ClientConfig);
444
+ if (!s3Clients[key]) {
445
+ s3Clients[key] = new S3Client(s3ClientConfig);
446
+ }
447
+ return s3Clients[key];
448
+ }, "s3");
440
449
  var getBucketKeyUrl = /* @__PURE__ */ __name(({ bucket, key }) => {
441
450
  return `https://s3.amazonaws.com/${bucket}/${key}`;
442
451
  }, "getBucketKeyUrl");
@@ -444,7 +453,7 @@ var uploadFileToS3 = /* @__PURE__ */ __name(async ({ bucket, contentType, file,
444
453
  if (!file && !filePath) {
445
454
  throw new Error("file or filePath must be defined");
446
455
  }
447
- let params = {
456
+ const params = {
448
457
  Bucket: bucket,
449
458
  Key: key.split(path__default.sep).join("/")
450
459
  };
@@ -453,20 +462,21 @@ var uploadFileToS3 = /* @__PURE__ */ __name(async ({ bucket, contentType, file,
453
462
  params.Body = file;
454
463
  } else if (filePath) {
455
464
  const readFile = await fs3__default.promises.readFile(filePath);
456
- params = {
457
- ...params,
458
- ContentType: contentType || mime.contentType(path__default.extname(filePath)) || void 0
459
- };
465
+ params.ContentType = contentType || mime.contentType(path__default.extname(filePath)) || void 0;
460
466
  params.Body = Buffer.from(readFile);
461
467
  }
462
- const { Bucket, Key, VersionId } = await s3.upload(params).promise();
468
+ const upload = new Upload({
469
+ client: s3(),
470
+ params
471
+ });
472
+ const result = await upload.done();
463
473
  return {
464
- bucket: Bucket,
465
- key: Key,
466
- versionId: VersionId,
474
+ bucket: result.Bucket,
475
+ key: result.Key,
476
+ versionId: result.VersionId,
467
477
  url: getBucketKeyUrl({
468
- bucket: Bucket,
469
- key: Key
478
+ bucket: result.Bucket,
479
+ key: result.Key
470
480
  })
471
481
  };
472
482
  }, "uploadFileToS3");
@@ -479,18 +489,20 @@ var getAllFilesInsideADirectory = /* @__PURE__ */ __name(async ({ directory }) =
479
489
  }, "getAllFilesInsideADirectory");
480
490
  var copyRoot404To404Index = /* @__PURE__ */ __name(async ({ bucket }) => {
481
491
  try {
482
- const root404Exists = await s3.headObject({
492
+ const headCommand = new HeadObjectCommand({
483
493
  Bucket: bucket,
484
494
  Key: "404.html"
485
- }).promise().catch(() => {
495
+ });
496
+ const root404Exists = await s3().send(headCommand).catch(() => {
486
497
  return false;
487
498
  });
488
499
  if (root404Exists) {
489
- await s3.copyObject({
500
+ const copyCommand = new CopyObjectCommand({
490
501
  Bucket: bucket,
491
502
  CopySource: `${bucket}/404.html`,
492
503
  Key: "404/index.html"
493
- }).promise();
504
+ });
505
+ await s3().send(copyCommand);
494
506
  }
495
507
  } catch (error) {
496
508
  log5.error(logPrefix2, `Cannot copy 404.html to 404/index.html`);
@@ -529,18 +541,20 @@ var uploadDirectoryToS3 = /* @__PURE__ */ __name(async ({ bucket, bucketKey = ""
529
541
  var emptyS3Directory = /* @__PURE__ */ __name(async ({ bucket, directory = "" }) => {
530
542
  log5.info(logPrefix2, `${bucket}/${directory} will be empty`);
531
543
  try {
532
- const { Contents, IsTruncated } = await s3.listObjectsV2({
544
+ const listCommand = new ListObjectsV2Command({
533
545
  Bucket: bucket,
534
546
  Prefix: directory
535
- }).promise();
547
+ });
548
+ const { Contents, IsTruncated } = await s3().send(listCommand);
536
549
  if (Contents && Contents.length > 0) {
537
550
  const objectsPromises = Contents.filter(({ Key }) => {
538
551
  return !!Key;
539
552
  }).map(async ({ Key }) => {
540
- const { Versions = [] } = await s3.listObjectVersions({
553
+ const listVersionsCommand = new ListObjectVersionsCommand({
541
554
  Bucket: bucket,
542
555
  Prefix: Key
543
- }).promise();
556
+ });
557
+ const { Versions = [] } = await s3().send(listVersionsCommand);
544
558
  return {
545
559
  Key,
546
560
  Versions: Versions.map(({ VersionId }) => {
@@ -564,12 +578,13 @@ var emptyS3Directory = /* @__PURE__ */ __name(async ({ bucket, directory = "" })
564
578
  const BATCH_SIZE = 1e3;
565
579
  for (let i = 0; i < objectsWithVersionsIds.length; i += BATCH_SIZE) {
566
580
  const batch = objectsWithVersionsIds.slice(i, i + BATCH_SIZE);
567
- const result = await s3.deleteObjects({
581
+ const deleteCommand = new DeleteObjectsCommand({
568
582
  Bucket: bucket,
569
583
  Delete: {
570
584
  Objects: batch
571
585
  }
572
- }).promise();
586
+ });
587
+ const result = await s3().send(deleteCommand);
573
588
  if (result.Errors && result.Errors.length > 0) {
574
589
  const firstError = result.Errors[0];
575
590
  throw new Error(`Error deleting objects from ${bucket}/${directory}: ${JSON.stringify(firstError)}`);
@@ -588,23 +603,74 @@ var emptyS3Directory = /* @__PURE__ */ __name(async ({ bucket, directory = "" })
588
603
  throw error;
589
604
  }
590
605
  }, "emptyS3Directory");
591
- var deleteS3Directory = /* @__PURE__ */ __name(async ({ bucket, directory = "" }) => {
606
+ var deleteOldS3Files = /* @__PURE__ */ __name(async ({ bucket, continuationToken, directory = "", retentionDays, totalDeleted = 0 }) => {
607
+ if (!continuationToken) {
608
+ log5.info(logPrefix2, `Deleting files older than ${retentionDays} days from ${bucket}/${directory}...`);
609
+ }
592
610
  try {
593
- log5.info(logPrefix2, `${bucket}/${directory} is being deleted...`);
594
- await emptyS3Directory({
595
- bucket,
596
- directory
597
- });
598
- await s3.deleteObject({
611
+ const listCommand = new ListObjectsV2Command({
599
612
  Bucket: bucket,
600
- Key: directory
601
- }).promise();
602
- log5.info(logPrefix2, `${bucket}/${directory} was deleted.`);
613
+ Prefix: directory,
614
+ ContinuationToken: continuationToken
615
+ });
616
+ const { Contents, IsTruncated, NextContinuationToken } = await s3().send(listCommand);
617
+ let deletedCount = 0;
618
+ if (Contents && Contents.length > 0) {
619
+ const now = /* @__PURE__ */ new Date();
620
+ const retentionMs = retentionDays * 24 * 60 * 60 * 1e3;
621
+ const oldFiles = Contents.filter(({ Key, LastModified }) => {
622
+ if (!Key || !LastModified) {
623
+ return false;
624
+ }
625
+ const fileAge = now.getTime() - LastModified.getTime();
626
+ return fileAge > retentionMs;
627
+ }).map(({ Key }) => {
628
+ return Key;
629
+ });
630
+ if (oldFiles.length > 0) {
631
+ const BATCH_SIZE = 1e3;
632
+ for (let i = 0; i < oldFiles.length; i += BATCH_SIZE) {
633
+ const batch = oldFiles.slice(i, i + BATCH_SIZE);
634
+ const deleteCommand = new DeleteObjectsCommand({
635
+ Bucket: bucket,
636
+ Delete: {
637
+ Objects: batch.map((Key) => {
638
+ return {
639
+ Key
640
+ };
641
+ })
642
+ }
643
+ });
644
+ const result = await s3().send(deleteCommand);
645
+ if (result.Errors && result.Errors.length > 0) {
646
+ const firstError = result.Errors[0];
647
+ throw new Error(`Error deleting old files from ${bucket}/${directory}: ${JSON.stringify(firstError)}`);
648
+ }
649
+ }
650
+ deletedCount = oldFiles.length;
651
+ }
652
+ }
653
+ if (IsTruncated && NextContinuationToken) {
654
+ return await deleteOldS3Files({
655
+ bucket,
656
+ continuationToken: NextContinuationToken,
657
+ directory,
658
+ retentionDays,
659
+ totalDeleted: totalDeleted + deletedCount
660
+ });
661
+ }
662
+ const finalTotal = totalDeleted + deletedCount;
663
+ if (finalTotal === 0) {
664
+ log5.info(logPrefix2, `No files older than ${retentionDays} days found in ${bucket}/${directory}`);
665
+ } else {
666
+ log5.info(logPrefix2, `Deleted ${finalTotal} old files from ${bucket}/${directory}`);
667
+ }
668
+ return finalTotal;
603
669
  } catch (error) {
604
- log5.error(logPrefix2, `Cannot delete ${bucket}/${directory}.`);
670
+ log5.error(logPrefix2, `Cannot delete old files from ${bucket}/${directory}.`);
605
671
  throw error;
606
672
  }
607
- }, "deleteS3Directory");
673
+ }, "deleteOldS3Files");
608
674
 
609
675
  // src/deploy/cloudformation.core.ts
610
676
  var logPrefix3 = "cloudformation";
@@ -931,24 +997,24 @@ var getStackName = /* @__PURE__ */ __name(async () => {
931
997
  }).join("-");
932
998
  return limitStackName(name);
933
999
  }, "getStackName");
934
- var deployErrorLogs = /* @__PURE__ */ __name(({ error, logPrefix: logPrefix24 }) => {
935
- log5.error(logPrefix24, `An error occurred. Cannot deploy ${logPrefix24}.`);
936
- log5.error(logPrefix24, "Error message: %j", error?.message);
1000
+ var deployErrorLogs = /* @__PURE__ */ __name(({ error, logPrefix: logPrefix23 }) => {
1001
+ log5.error(logPrefix23, `An error occurred. Cannot deploy ${logPrefix23}.`);
1002
+ log5.error(logPrefix23, "Error message: %j", error?.message);
937
1003
  }, "deployErrorLogs");
938
- var handleDeployError = /* @__PURE__ */ __name(({ error, logPrefix: logPrefix24 }) => {
1004
+ var handleDeployError = /* @__PURE__ */ __name(({ error, logPrefix: logPrefix23 }) => {
939
1005
  deployErrorLogs({
940
1006
  error,
941
- logPrefix: logPrefix24
1007
+ logPrefix: logPrefix23
942
1008
  });
943
1009
  process.exit(1);
944
1010
  }, "handleDeployError");
945
- var handleDeployInitialization = /* @__PURE__ */ __name(async ({ logPrefix: logPrefix24, stackName: preDefinedStackName }) => {
946
- log5.info(logPrefix24, `Starting deploy ${logPrefix24}...`);
1011
+ var handleDeployInitialization = /* @__PURE__ */ __name(async ({ logPrefix: logPrefix23, stackName: preDefinedStackName }) => {
1012
+ log5.info(logPrefix23, `Starting deploy ${logPrefix23}...`);
947
1013
  if (preDefinedStackName) {
948
1014
  setPreDefinedStackName(preDefinedStackName);
949
1015
  }
950
1016
  const stackName = await getStackName();
951
- log5.info(logPrefix24, `stackName: ${stackName}`);
1017
+ log5.info(logPrefix23, `stackName: ${stackName}`);
952
1018
  return {
953
1019
  stackName
954
1020
  };
@@ -2867,8 +2933,8 @@ var deployLambdaLayers = /* @__PURE__ */ __name(async ({ lambdaExternal = [] })
2867
2933
  })();
2868
2934
  const packages = lambdaExternal.map((external) => {
2869
2935
  try {
2870
- const semver2 = dependencies[external].replace(/(~|\^)/g, "");
2871
- return `${external}@${semver2}`;
2936
+ const semver = dependencies[external].replace(/(~|\^)/g, "");
2937
+ return `${external}@${semver}`;
2872
2938
  } catch {
2873
2939
  throw new Error(`Cannot find ${external} on package.json dependencies.`);
2874
2940
  }
@@ -3750,34 +3816,6 @@ var invalidateCloudFront = /* @__PURE__ */ __name(async ({ outputs }) => {
3750
3816
  log5.info(logPrefix14, `Cannot invalidate because distribution does not exist.`);
3751
3817
  }
3752
3818
  }, "invalidateCloudFront");
3753
- var logPrefix15 = "static-app";
3754
- var removeOldVersions = /* @__PURE__ */ __name(async ({ bucket }) => {
3755
- try {
3756
- log5.info(logPrefix15, "Removing old versions...");
3757
- const { CommonPrefixes = [] } = await s3.listObjectsV2({
3758
- Bucket: bucket,
3759
- Delimiter: "/"
3760
- }).promise();
3761
- const versions = CommonPrefixes?.map(({ Prefix }) => {
3762
- return Prefix?.replace("/", "");
3763
- }).filter((version) => {
3764
- return semver.valid(version);
3765
- }).sort((a, b) => {
3766
- return semver.gt(a, b) ? -1 : 1;
3767
- });
3768
- versions.shift();
3769
- versions.shift();
3770
- versions.shift();
3771
- await Promise.all(versions.map((version) => {
3772
- return deleteS3Directory({
3773
- bucket,
3774
- directory: `${version}`
3775
- });
3776
- }));
3777
- } catch (error) {
3778
- log5.info(logPrefix15, `Cannot remove older versions from "${bucket}" bucket.`);
3779
- }
3780
- }, "removeOldVersions");
3781
3819
 
3782
3820
  // src/deploy/staticApp/staticApp.template.ts
3783
3821
  var PACKAGE_VERSION = getPackageVersion();
@@ -4203,8 +4241,9 @@ var uploadBuiltAppToS3 = /* @__PURE__ */ __name(async ({ buildFolder: directory,
4203
4241
  directory
4204
4242
  });
4205
4243
  if (files.length > 0) {
4206
- await emptyS3Directory({
4207
- bucket
4244
+ await deleteOldS3Files({
4245
+ bucket,
4246
+ retentionDays: 7
4208
4247
  });
4209
4248
  }
4210
4249
  await uploadDirectoryToS3({
@@ -4215,8 +4254,9 @@ var uploadBuiltAppToS3 = /* @__PURE__ */ __name(async ({ buildFolder: directory,
4215
4254
  }
4216
4255
  const defaultDirectory = await findDefaultBuildFolder();
4217
4256
  if (defaultDirectory) {
4218
- await emptyS3Directory({
4219
- bucket
4257
+ await deleteOldS3Files({
4258
+ bucket,
4259
+ retentionDays: 7
4220
4260
  });
4221
4261
  await uploadDirectoryToS3({
4222
4262
  bucket,
@@ -4231,11 +4271,11 @@ var uploadBuiltAppToS3 = /* @__PURE__ */ __name(async ({ buildFolder: directory,
4231
4271
  }, "uploadBuiltAppToS3");
4232
4272
 
4233
4273
  // src/deploy/staticApp/deployStaticApp.ts
4234
- var logPrefix16 = "static-app";
4274
+ var logPrefix15 = "static-app";
4235
4275
  var deployStaticApp = /* @__PURE__ */ __name(async ({ acm, aliases, appendIndexHtml, buildFolder, cloudfront, spa, hostedZoneName, region, skipUpload }) => {
4236
4276
  try {
4237
4277
  const { stackName } = await handleDeployInitialization({
4238
- logPrefix: logPrefix16
4278
+ logPrefix: logPrefix15
4239
4279
  });
4240
4280
  const params = {
4241
4281
  StackName: stackName
@@ -4267,11 +4307,6 @@ var deployStaticApp = /* @__PURE__ */ __name(async ({ acm, aliases, appendIndexH
4267
4307
  await invalidateCloudFront({
4268
4308
  outputs: Outputs
4269
4309
  });
4270
- if (!skipUpload) {
4271
- await removeOldVersions({
4272
- bucket
4273
- });
4274
- }
4275
4310
  } else {
4276
4311
  await deploy({
4277
4312
  params,
@@ -4292,7 +4327,7 @@ var deployStaticApp = /* @__PURE__ */ __name(async ({ acm, aliases, appendIndexH
4292
4327
  } catch (error) {
4293
4328
  handleDeployError({
4294
4329
  error,
4295
- logPrefix: logPrefix16
4330
+ logPrefix: logPrefix15
4296
4331
  });
4297
4332
  }
4298
4333
  }, "deployStaticApp");
@@ -4368,7 +4403,7 @@ var deployStaticAppCommand = {
4368
4403
  }
4369
4404
  }, "handler")
4370
4405
  };
4371
- var logPrefix17 = "deploy vercel";
4406
+ var logPrefix16 = "deploy vercel";
4372
4407
  var makeCommand = /* @__PURE__ */ __name((cmds) => {
4373
4408
  return cmds.filter((cmd) => {
4374
4409
  return cmd !== void 0 && cmd !== null && cmd !== "";
@@ -4376,7 +4411,7 @@ var makeCommand = /* @__PURE__ */ __name((cmds) => {
4376
4411
  }, "makeCommand");
4377
4412
  var deployVercel = /* @__PURE__ */ __name(async ({ token }) => {
4378
4413
  try {
4379
- log5.info(logPrefix17, "Deploying on Vercel...");
4414
+ log5.info(logPrefix16, "Deploying on Vercel...");
4380
4415
  const environment = getEnvironment();
4381
4416
  const finalToken = token || process.env.VERCEL_TOKEN;
4382
4417
  if (!finalToken) {
@@ -4411,11 +4446,11 @@ var deployVercel = /* @__PURE__ */ __name(async ({ token }) => {
4411
4446
  } catch (error) {
4412
4447
  handleDeployError({
4413
4448
  error,
4414
- logPrefix: logPrefix17
4449
+ logPrefix: logPrefix16
4415
4450
  });
4416
4451
  }
4417
4452
  }, "deployVercel");
4418
- var logPrefix18 = "deploy vercel";
4453
+ var logPrefix17 = "deploy vercel";
4419
4454
  var options4 = {
4420
4455
  token: {
4421
4456
  describe: "Vercel authorization token.",
@@ -4430,7 +4465,7 @@ var deployVercelCommand = {
4430
4465
  }, "builder"),
4431
4466
  handler: /* @__PURE__ */ __name(({ destroy: destroy2, ...rest }) => {
4432
4467
  if (destroy2) {
4433
- log5.info(logPrefix18, "Destroy Vercel deployment not implemented yet.");
4468
+ log5.info(logPrefix17, "Destroy Vercel deployment not implemented yet.");
4434
4469
  } else {
4435
4470
  deployVercel(rest);
4436
4471
  }
@@ -4508,7 +4543,7 @@ var generateSSHCommandWithPwd = /* @__PURE__ */ __name(({ userName, host, passwo
4508
4543
  }, "generateSSHCommandWithPwd");
4509
4544
 
4510
4545
  // src/deploy/vm/deployVM.ts
4511
- var logPrefix19 = "deploy-vm";
4546
+ var logPrefix18 = "deploy-vm";
4512
4547
  var deployVM = /* @__PURE__ */ __name(async ({ userName, host, scriptPath, keyPath, password, port, fixPermissions = false }) => {
4513
4548
  if (!userName || !host || !scriptPath) {
4514
4549
  throw new Error("Missing required parameters: userName, host, scriptPath");
@@ -4529,27 +4564,27 @@ var deployVM = /* @__PURE__ */ __name(async ({ userName, host, scriptPath, keyPa
4529
4564
  const permissionStr = permissions.toString(8);
4530
4565
  const fixCommand = `chmod 400 ${keyPath}`;
4531
4566
  if (fixPermissions) {
4532
- log5.info(logPrefix19, `Fixing SSH key permissions: ${keyPath} (${permissionStr} \u2192 400)`);
4567
+ log5.info(logPrefix18, `Fixing SSH key permissions: ${keyPath} (${permissionStr} \u2192 400)`);
4533
4568
  chmodSync(keyPath, 256);
4534
- log5.info(logPrefix19, `Permissions set to 400 (read-only by owner)`);
4569
+ log5.info(logPrefix18, `Permissions set to 400 (read-only by owner)`);
4535
4570
  } else {
4536
- log5.error(logPrefix19, `SSH key permissions too open: ${permissionStr} (octal)`);
4537
- log5.error(logPrefix19, `SSH requires permissions 400 or 600`);
4538
- log5.error(logPrefix19, `Fix manually: ${fixCommand}`);
4539
- log5.error(logPrefix19, `Or run with: --fix-permissions`);
4571
+ log5.error(logPrefix18, `SSH key permissions too open: ${permissionStr} (octal)`);
4572
+ log5.error(logPrefix18, `SSH requires permissions 400 or 600`);
4573
+ log5.error(logPrefix18, `Fix manually: ${fixCommand}`);
4574
+ log5.error(logPrefix18, `Or run with: --fix-permissions`);
4540
4575
  throw new Error(`Invalid SSH key permissions: ${permissionStr}. Expected 400 or 600.`);
4541
4576
  }
4542
4577
  } else {
4543
- log5.info(logPrefix19, `SSH key permissions OK: ${permissions.toString(8)}`);
4578
+ log5.info(logPrefix18, `SSH key permissions OK: ${permissions.toString(8)}`);
4544
4579
  }
4545
4580
  } catch (error) {
4546
4581
  if (error instanceof Error) {
4547
4582
  if (error.message.includes("Invalid SSH key permissions")) {
4548
4583
  throw error;
4549
4584
  }
4550
- log5.warn(logPrefix19, `Warning: Could not check key permissions: ${error.message}`);
4585
+ log5.warn(logPrefix18, `Warning: Could not check key permissions: ${error.message}`);
4551
4586
  } else {
4552
- log5.warn(logPrefix19, "Warning: Could not check key permissions: Unknown error");
4587
+ log5.warn(logPrefix18, "Warning: Could not check key permissions: Unknown error");
4553
4588
  }
4554
4589
  }
4555
4590
  }
@@ -4584,15 +4619,15 @@ var deployVM = /* @__PURE__ */ __name(async ({ userName, host, scriptPath, keyPa
4584
4619
  });
4585
4620
  const validateStdin = /* @__PURE__ */ __name((stdin) => {
4586
4621
  if (!stdin) {
4587
- log5.error(logPrefix19, "SSH process stdin is null or undefined");
4622
+ log5.error(logPrefix18, "SSH process stdin is null or undefined");
4588
4623
  return false;
4589
4624
  }
4590
4625
  if (stdin.destroyed) {
4591
- log5.error(logPrefix19, "SSH process stdin has been destroyed");
4626
+ log5.error(logPrefix18, "SSH process stdin has been destroyed");
4592
4627
  return false;
4593
4628
  }
4594
4629
  if (!stdin.writable) {
4595
- log5.error(logPrefix19, "SSH process stdin is not writable");
4630
+ log5.error(logPrefix18, "SSH process stdin is not writable");
4596
4631
  return false;
4597
4632
  }
4598
4633
  return true;
@@ -4603,7 +4638,7 @@ var deployVM = /* @__PURE__ */ __name(async ({ userName, host, scriptPath, keyPa
4603
4638
  }
4604
4639
  if (!existsSync(scriptPath)) {
4605
4640
  const message = `Deployment script not found at path: ${scriptPath}`;
4606
- log5.error(logPrefix19, message);
4641
+ log5.error(logPrefix18, message);
4607
4642
  reject(new Error(message));
4608
4643
  return;
4609
4644
  }
@@ -4613,7 +4648,7 @@ var deployVM = /* @__PURE__ */ __name(async ({ userName, host, scriptPath, keyPa
4613
4648
  }
4614
4649
  deployScript.pipe(sshProcess.stdin);
4615
4650
  const sigintHandler = /* @__PURE__ */ __name(() => {
4616
- log5.info(logPrefix19, "Interrupting deployment...");
4651
+ log5.info(logPrefix18, "Interrupting deployment...");
4617
4652
  sshProcess.kill("SIGINT");
4618
4653
  process.exit(130);
4619
4654
  }, "sigintHandler");
@@ -4637,7 +4672,7 @@ var deployVM = /* @__PURE__ */ __name(async ({ userName, host, scriptPath, keyPa
4637
4672
  }, "deployVM");
4638
4673
 
4639
4674
  // src/deploy/vm/command.ts
4640
- var logPrefix20 = "deploy-vm";
4675
+ var logPrefix19 = "deploy-vm";
4641
4676
  var deployVMCommand = {
4642
4677
  command: "vm",
4643
4678
  describe: "Deploy to a VM via SSH by executing a deployment script",
@@ -4655,16 +4690,16 @@ var deployVMCommand = {
4655
4690
  port,
4656
4691
  fixPermissions
4657
4692
  });
4658
- log5.info(logPrefix20, "Deployment completed successfully!");
4693
+ log5.info(logPrefix19, "Deployment completed successfully!");
4659
4694
  } catch (error) {
4660
- log5.error(logPrefix20, "Deployment failed: %s", error.message);
4695
+ log5.error(logPrefix19, "Deployment failed: %s", error.message);
4661
4696
  process.exit(1);
4662
4697
  }
4663
4698
  }, "handler")
4664
4699
  };
4665
4700
 
4666
4701
  // src/deploy/command.ts
4667
- var logPrefix21 = "deploy";
4702
+ var logPrefix20 = "deploy";
4668
4703
  var checkAwsAccountId = /* @__PURE__ */ __name(async (awsAccountId) => {
4669
4704
  try {
4670
4705
  const currentAwsAccountId = await getAwsAccountId();
@@ -4675,7 +4710,7 @@ var checkAwsAccountId = /* @__PURE__ */ __name(async (awsAccountId) => {
4675
4710
  if (error.code === "CredentialsError") {
4676
4711
  return;
4677
4712
  }
4678
- log5.error(logPrefix21, error.message);
4713
+ log5.error(logPrefix20, error.message);
4679
4714
  process.exit();
4680
4715
  }
4681
4716
  }, "checkAwsAccountId");
@@ -4689,7 +4724,7 @@ var describeDeployCommand = {
4689
4724
  stackName: newStackName
4690
4725
  });
4691
4726
  } catch (error) {
4692
- log5.info(logPrefix21, "Cannot describe stack. Message: %s", error.message);
4727
+ log5.info(logPrefix20, "Cannot describe stack. Message: %s", error.message);
4693
4728
  }
4694
4729
  }, "handler")
4695
4730
  };
@@ -4874,7 +4909,7 @@ var deployCommand = {
4874
4909
  }
4875
4910
  }).middleware(({ skipDeploy }) => {
4876
4911
  if (skipDeploy) {
4877
- log5.warn(logPrefix21, "Skip deploy flag is true, then the deploy command wasn't executed.");
4912
+ log5.warn(logPrefix20, "Skip deploy flag is true, then the deploy command wasn't executed.");
4878
4913
  process.exit(0);
4879
4914
  }
4880
4915
  }).middleware(({ lambdaExternals, lambdaInput }) => {
@@ -4915,10 +4950,10 @@ var deployCommand = {
4915
4950
  }
4916
4951
  }, "handler")
4917
4952
  };
4918
- var logPrefix22 = "cicd-ecs-task-report";
4953
+ var logPrefix21 = "cicd-ecs-task-report";
4919
4954
  var sendEcsTaskReport = /* @__PURE__ */ __name(async ({ status }) => {
4920
4955
  if (!process.env.ECS_TASK_REPORT_HANDLER_NAME) {
4921
- log5.info(logPrefix22, "ECS_TASK_REPORT_HANDLER_NAME not defined.");
4956
+ log5.info(logPrefix21, "ECS_TASK_REPORT_HANDLER_NAME not defined.");
4922
4957
  return;
4923
4958
  }
4924
4959
  const lambda = new AWS.Lambda();
@@ -4935,7 +4970,7 @@ var sendEcsTaskReport = /* @__PURE__ */ __name(async ({ status }) => {
4935
4970
  FunctionName: process.env.ECS_TASK_REPORT_HANDLER_NAME,
4936
4971
  InvokeArgs: JSON.stringify(payload)
4937
4972
  }).promise();
4938
- log5.info(logPrefix22, "Report sent.");
4973
+ log5.info(logPrefix21, "Report sent.");
4939
4974
  }, "sendEcsTaskReport");
4940
4975
  var options7 = {
4941
4976
  status: {
@@ -4958,7 +4993,7 @@ var ecsTaskReportCommand = {
4958
4993
  return sendEcsTaskReport(args);
4959
4994
  }, "handler")
4960
4995
  };
4961
- var logPrefix23 = "generate-env";
4996
+ var logPrefix22 = "generate-env";
4962
4997
  var readEnvFile = /* @__PURE__ */ __name(async ({ envFileName, envsPath }) => {
4963
4998
  try {
4964
4999
  const content = await fs3.promises.readFile(path.resolve(process.cwd(), envsPath, envFileName), "utf8");
@@ -4978,14 +5013,14 @@ var generateEnv = /* @__PURE__ */ __name(async ({ defaultEnvironment, path: envs
4978
5013
  envsPath
4979
5014
  });
4980
5015
  if (!envFile) {
4981
- log5.info(logPrefix23, "Env file %s doesn't exist. Skip generating env file.", envFileName);
5016
+ log5.info(logPrefix22, "Env file %s doesn't exist. Skip generating env file.", envFileName);
4982
5017
  return;
4983
5018
  }
4984
5019
  await writeEnvFile({
4985
5020
  content: envFile,
4986
5021
  envFileName: ".env"
4987
5022
  });
4988
- log5.info(logPrefix23, "Generate env file %s from %s successfully.", ".env", envFileName);
5023
+ log5.info(logPrefix22, "Generate env file %s from %s successfully.", ".env", envFileName);
4989
5024
  }, "generateEnv");
4990
5025
 
4991
5026
  // src/generateEnv/generateEnvCommand.ts
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "carlin",
3
- "version": "1.40.1",
3
+ "version": "1.41.0",
4
4
  "description": "",
5
5
  "license": "MIT",
6
6
  "author": "Pedro Arantes <arantespp@gmail.com> (https://twitter.com/arantespp)",
@@ -23,10 +23,12 @@
23
23
  ],
24
24
  "dependencies": {
25
25
  "@aws-sdk/client-cloudformation": "^3.699.0",
26
+ "@aws-sdk/client-s3": "^3.975.0",
27
+ "@aws-sdk/lib-storage": "^3.975.0",
26
28
  "@octokit/webhooks": "^12.0.0",
27
29
  "@slack/webhook": "^7.0.3",
28
30
  "adm-zip": "^0.5.16",
29
- "aws-sdk": "^2.1692.0",
31
+ "aws-sdk": "^2.1693.0",
30
32
  "change-case": "^5.4.4",
31
33
  "deep-equal": "^2.2.3",
32
34
  "deepmerge": "^4.3.1",
@@ -45,9 +47,9 @@
45
47
  "uglify-js": "^3.19.3",
46
48
  "vercel": "^39.1.1",
47
49
  "yargs": "^17.7.2",
48
- "@ttoss/cloudformation": "^0.11.10",
49
- "@ttoss/config": "^1.35.12",
50
- "@ttoss/read-config-file": "^2.0.20"
50
+ "@ttoss/cloudformation": "^0.12.0",
51
+ "@ttoss/config": "^1.36.0",
52
+ "@ttoss/read-config-file": "^2.1.0"
51
53
  },
52
54
  "devDependencies": {
53
55
  "@types/adm-zip": "^0.5.6",
@@ -58,15 +60,16 @@
58
60
  "@types/jest": "^30.0.0",
59
61
  "@types/js-yaml": "^4.0.9",
60
62
  "@types/mime-types": "^2.1.4",
61
- "@types/node": "^22.19.0",
63
+ "@types/node": "^24.10.13",
62
64
  "@types/npmlog": "^7.0.0",
63
65
  "@types/semver": "^7.5.8",
64
66
  "@types/uglify-js": "^3.17.5",
65
67
  "@types/yargs": "^17.0.33",
68
+ "aws-sdk-client-mock": "^4.1.0",
66
69
  "jest": "^30.2.0",
67
70
  "tsup": "^8.5.1",
68
71
  "typescript": "~5.9.3",
69
- "@ttoss/test-utils": "^4.0.3"
72
+ "@ttoss/test-utils": "^4.1.0"
70
73
  },
71
74
  "keywords": [],
72
75
  "publishConfig": {