@commercetools-frontend/application-cli 2.4.0 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -18,7 +18,7 @@ var _mapInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instan
18
18
  var _Object$entries = require('@babel/runtime-corejs3/core-js-stable/object/entries');
19
19
  var _Set = require('@babel/runtime-corejs3/core-js-stable/set');
20
20
  var _flatMapInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/flat-map');
21
- var storageBucketsConfig = require('../../dist/storage-buckets-config-b6d36938.cjs.prod.js');
21
+ var storageBucketsConfig = require('../../dist/storage-buckets-config-2f9a9168.cjs.prod.js');
22
22
  var fs = require('node:fs');
23
23
  var path$1 = require('node:path');
24
24
  var listr2 = require('listr2');
@@ -41,9 +41,17 @@ var _JSON$stringify = require('@babel/runtime-corejs3/core-js-stable/json/string
41
41
  var applicationConfig = require('@commercetools-frontend/application-config');
42
42
  var l10n = require('@commercetools-frontend/l10n');
43
43
  var _sliceInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/slice');
44
+ var _startsWithInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/starts-with');
45
+ var _trimInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/trim');
46
+ var _someInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/some');
47
+ var _everyInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/every');
48
+ var _includesInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/includes');
49
+ var micromatch = require('micromatch');
50
+ var snakeCase = require('lodash/snakeCase');
44
51
  var jsonschema = require('jsonschema');
45
52
  require('cosmiconfig');
46
53
  require('ts-deepmerge');
54
+ require('lodash');
47
55
 
48
56
  function _interopDefault (e) { return e && e.__esModule ? e : { 'default': e }; }
49
57
 
@@ -70,6 +78,13 @@ var fs__default$1 = /*#__PURE__*/_interopDefault(fs$1);
70
78
  var _findInstanceProperty__default = /*#__PURE__*/_interopDefault(_findInstanceProperty);
71
79
  var _JSON$stringify__default = /*#__PURE__*/_interopDefault(_JSON$stringify);
72
80
  var _sliceInstanceProperty__default = /*#__PURE__*/_interopDefault(_sliceInstanceProperty);
81
+ var _startsWithInstanceProperty__default = /*#__PURE__*/_interopDefault(_startsWithInstanceProperty);
82
+ var _trimInstanceProperty__default = /*#__PURE__*/_interopDefault(_trimInstanceProperty);
83
+ var _someInstanceProperty__default = /*#__PURE__*/_interopDefault(_someInstanceProperty);
84
+ var _everyInstanceProperty__default = /*#__PURE__*/_interopDefault(_everyInstanceProperty);
85
+ var _includesInstanceProperty__default = /*#__PURE__*/_interopDefault(_includesInstanceProperty);
86
+ var micromatch__default = /*#__PURE__*/_interopDefault(micromatch);
87
+ var snakeCase__default = /*#__PURE__*/_interopDefault(snakeCase);
73
88
 
74
89
  function getApplicationDirectory(cwd) {
75
90
  return fs__default["default"].realpathSync(cwd);
@@ -86,20 +101,8 @@ function isCI() {
86
101
 
87
102
  function ownKeys$4(e, r) { var t = _Object$keys__default["default"](e); if (_Object$getOwnPropertySymbols__default["default"]) { var o = _Object$getOwnPropertySymbols__default["default"](e); r && (o = _filterInstanceProperty__default["default"](o).call(o, function (r) { return _Object$getOwnPropertyDescriptor__default["default"](e, r).enumerable; })), t.push.apply(t, o); } return t; }
88
103
  function _objectSpread$4(e) { for (var r = 1; r < arguments.length; r++) { var _context6, _context7; var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? _forEachInstanceProperty__default["default"](_context6 = ownKeys$4(Object(t), !0)).call(_context6, function (r) { _defineProperty(e, r, t[r]); }) : _Object$getOwnPropertyDescriptors__default["default"] ? _Object$defineProperties__default["default"](e, _Object$getOwnPropertyDescriptors__default["default"](t)) : _forEachInstanceProperty__default["default"](_context7 = ownKeys$4(Object(t))).call(_context7, function (r) { _Object$defineProperty__default["default"](e, r, _Object$getOwnPropertyDescriptor__default["default"](t, r)); }); } return e; }
89
- function _callSuper(_this, derived, args) {
90
- function isNativeReflectConstruct() {
91
- if (typeof Reflect === "undefined" || !_Reflect$construct__default["default"]) return false;
92
- if (_Reflect$construct__default["default"].sham) return false;
93
- if (typeof Proxy === "function") return true;
94
- try {
95
- return !Boolean.prototype.valueOf.call(_Reflect$construct__default["default"](Boolean, [], function () {}));
96
- } catch (e) {
97
- return false;
98
- }
99
- }
100
- derived = _getPrototypeOf(derived);
101
- return _possibleConstructorReturn(_this, isNativeReflectConstruct() ? _Reflect$construct__default["default"](derived, args || [], _getPrototypeOf(_this).constructor) : derived.apply(_this, args));
102
- }
104
+ function _callSuper(t, o, e) { return o = _getPrototypeOf(o), _possibleConstructorReturn(t, _isNativeReflectConstruct() ? _Reflect$construct__default["default"](o, e || [], _getPrototypeOf(t).constructor) : o.apply(t, e)); }
105
+ function _isNativeReflectConstruct() { try { var t = !Boolean.prototype.valueOf.call(_Reflect$construct__default["default"](Boolean, [], function () {})); } catch (t) {} return (_isNativeReflectConstruct = function () { return !!t; })(); }
103
106
  var _bucketRegion$1 = /*#__PURE__*/_classPrivateFieldLooseKey("bucketRegion");
104
107
  var _bucketEnvironment$1 = /*#__PURE__*/_classPrivateFieldLooseKey("bucketEnvironment");
105
108
  let StorageProvider = /*#__PURE__*/function () {
@@ -202,20 +205,20 @@ var _bucketRegion2$1 = /*#__PURE__*/_classPrivateFieldLooseKey("bucketRegion");
202
205
  var _bucketEnvironment2$1 = /*#__PURE__*/_classPrivateFieldLooseKey("bucketEnvironment");
203
206
  let GoogleStorageProvider = /*#__PURE__*/function (_StorageProvider2) {
204
207
  function GoogleStorageProvider(config) {
205
- var _this2;
208
+ var _this;
206
209
  _classCallCheck(this, GoogleStorageProvider);
207
- _this2 = _callSuper(this, GoogleStorageProvider, [config]);
208
- _Object$defineProperty__default["default"](_this2, _bucketRegion2$1, {
210
+ _this = _callSuper(this, GoogleStorageProvider, [config]);
211
+ _Object$defineProperty__default["default"](_this, _bucketRegion2$1, {
209
212
  writable: true,
210
213
  value: void 0
211
214
  });
212
- _Object$defineProperty__default["default"](_this2, _bucketEnvironment2$1, {
215
+ _Object$defineProperty__default["default"](_this, _bucketEnvironment2$1, {
213
216
  writable: true,
214
217
  value: void 0
215
218
  });
216
- _classPrivateFieldLooseBase(_this2, _bucketRegion2$1)[_bucketRegion2$1] = config.bucketRegion;
217
- _classPrivateFieldLooseBase(_this2, _bucketEnvironment2$1)[_bucketEnvironment2$1] = config.bucketEnvironment;
218
- return _this2;
219
+ _classPrivateFieldLooseBase(_this, _bucketRegion2$1)[_bucketRegion2$1] = config.bucketRegion;
220
+ _classPrivateFieldLooseBase(_this, _bucketEnvironment2$1)[_bucketEnvironment2$1] = config.bucketEnvironment;
221
+ return _this;
219
222
  }
220
223
  _inherits(GoogleStorageProvider, _StorageProvider2);
221
224
  return _createClass(GoogleStorageProvider, [{
@@ -274,20 +277,20 @@ var _bucketRegion3 = /*#__PURE__*/_classPrivateFieldLooseKey("bucketRegion");
274
277
  var _bucketEnvironment3 = /*#__PURE__*/_classPrivateFieldLooseKey("bucketEnvironment");
275
278
  let AwsStorageProvider = /*#__PURE__*/function (_StorageProvider3) {
276
279
  function AwsStorageProvider(config) {
277
- var _this3;
280
+ var _this2;
278
281
  _classCallCheck(this, AwsStorageProvider);
279
- _this3 = _callSuper(this, AwsStorageProvider, [config]);
280
- _Object$defineProperty__default["default"](_this3, _bucketRegion3, {
282
+ _this2 = _callSuper(this, AwsStorageProvider, [config]);
283
+ _Object$defineProperty__default["default"](_this2, _bucketRegion3, {
281
284
  writable: true,
282
285
  value: void 0
283
286
  });
284
- _Object$defineProperty__default["default"](_this3, _bucketEnvironment3, {
287
+ _Object$defineProperty__default["default"](_this2, _bucketEnvironment3, {
285
288
  writable: true,
286
289
  value: void 0
287
290
  });
288
- _classPrivateFieldLooseBase(_this3, _bucketRegion3)[_bucketRegion3] = config.bucketRegion;
289
- _classPrivateFieldLooseBase(_this3, _bucketEnvironment3)[_bucketEnvironment3] = config.bucketEnvironment;
290
- return _this3;
291
+ _classPrivateFieldLooseBase(_this2, _bucketRegion3)[_bucketRegion3] = config.bucketRegion;
292
+ _classPrivateFieldLooseBase(_this2, _bucketEnvironment3)[_bucketEnvironment3] = config.bucketEnvironment;
293
+ return _this2;
291
294
  }
292
295
  _inherits(AwsStorageProvider, _StorageProvider3);
293
296
  return _createClass(AwsStorageProvider, [{
@@ -611,7 +614,7 @@ async function compileEnvironmentApplicationIndexes(_ref3) {
611
614
  throw new Error(moveResult.stderr);
612
615
  }
613
616
  }
614
- async function command$3(cliFlags, cwd) {
617
+ async function command$4(cliFlags, cwd) {
615
618
  var _context3;
616
619
  const storageBucketConfig = await storageBucketsConfig.loadStorageBucketsConfig();
617
620
  const applicationDirectory = getApplicationDirectory(cwd);
@@ -784,7 +787,7 @@ const mapApplicationMenuConfigToGraqhQLMenuJson = config => {
784
787
  shouldRenderDivider: (_menuLinks$shouldRend = menuLinks.shouldRenderDivider) !== null && _menuLinks$shouldRend !== void 0 ? _menuLinks$shouldRend : false
785
788
  };
786
789
  };
787
- async function command$2(cliFlags, cwd) {
790
+ async function command$3(cliFlags, cwd) {
788
791
  const applicationDirectory = getApplicationDirectory(cwd);
789
792
  const monorepoRoot = findRoot.findRootSync(cwd);
790
793
  const dotenvPath = cliFlags.dotenvFolder && path__default["default"].join(monorepoRoot.rootDir, cliFlags.dotenvFolder);
@@ -815,7 +818,7 @@ async function command$2(cliFlags, cwd) {
815
818
  });
816
819
  }
817
820
 
818
- async function command$1(cliFlags) {
821
+ async function command$2(cliFlags) {
819
822
  const numberOfRollbacks = cliFlags.rollbacks - 1;
820
823
  let nextRollbacks;
821
824
  try {
@@ -851,6 +854,158 @@ async function command$1(cliFlags) {
851
854
  }
852
855
  }
853
856
 
857
+ /**
858
+ * This is heavily inspired by https://circleci.com/developer/orbs/orb/circleci/path-filtering.
859
+ *
860
+ * It detects changed files between `HEAD` and a base revision.
861
+ * To match them against configured RegEx tr
862
+ * All matched triggers will be written as a dotenv file.
863
+ * The dotenv file is read in a CircleCI step and be evaluated.
864
+ */
865
+ const git = {
866
+ // https://git-scm.com/docs/git-merge-base
867
+ base: (baseBranch, headRevision) => {
868
+ var _context;
869
+ return _concatInstanceProperty__default["default"](_context = "git merge-base ".concat(baseBranch, " ")).call(_context, headRevision);
870
+ },
871
+ // https://git-scm.com/docs/git-diff
872
+ changedFiles: (mergeRevision, headRevision) => {
873
+ var _context2;
874
+ return _concatInstanceProperty__default["default"](_context2 = "git diff --name-only ".concat(mergeRevision, " ")).call(_context2, headRevision);
875
+ },
876
+ commitMessage: headRevision => "git log --format=oneline -n 1 ".concat(headRevision)
877
+ };
878
+ const helpers = {
879
+ async writeOutDotEnvFile(cliFlags, cwd, matchingTriggers) {
880
+ var _context3;
881
+ // If desired read the env file and write out the matching triggers.
882
+ if (!cliFlags.outEnvFile) {
883
+ return;
884
+ }
885
+ const filePath = path__default$1["default"].join(fs__default["default"].realpathSync(cwd), cliFlags.outEnvFile);
886
+ const fileContents = _mapInstanceProperty__default["default"](_context3 = _Object$entries__default["default"](matchingTriggers)).call(_context3, _ref => {
887
+ var _context5;
888
+ let _ref2 = _slicedToArray(_ref, 2),
889
+ triggerName = _ref2[0],
890
+ triggerValue = _ref2[1];
891
+ const triggerNameForEnvFile = "".concat(snakeCase__default["default"](triggerName).toUpperCase());
892
+
893
+ // General pipeline optimization hints are not transformed
894
+ if (_startsWithInstanceProperty__default["default"](triggerName).call(triggerName, 'allowPipelineOptimizations')) {
895
+ var _context4;
896
+ return _concatInstanceProperty__default["default"](_context4 = "".concat(triggerNameForEnvFile, "=")).call(_context4, triggerValue);
897
+ }
898
+ return _concatInstanceProperty__default["default"](_context5 = "DID_".concat(triggerNameForEnvFile, "_CHANGE=")).call(_context5, triggerValue);
899
+ }).join('\n');
900
+ await fs__default["default"].promises.writeFile(filePath, fileContents);
901
+ if (!cliFlags.silent) {
902
+ console.log("\uD83D\uDCDD Wrote out file to '".concat(filePath, "' with contents:"));
903
+ console.log(fileContents);
904
+ }
905
+ },
906
+ async getChangedFiles(cliFlags) {
907
+ var _context6, _context7;
908
+ const baseCmdResult = await execa.command(git.base(cliFlags.baseBranch, cliFlags.headRevision));
909
+ const mergeRevision = baseCmdResult.stdout;
910
+ const changedFilesCmdResult = await execa.command(git.changedFiles(mergeRevision, cliFlags.headRevision));
911
+ const changedFiles = _filterInstanceProperty__default["default"](_context6 = _mapInstanceProperty__default["default"](_context7 = changedFilesCmdResult.stdout.split('\n')).call(_context7, filePath => _trimInstanceProperty__default["default"](filePath).call(filePath))).call(_context6, filePath => filePath.length > 0);
912
+ return changedFiles;
913
+ },
914
+ async matchTriggersAgainstChangedFiles(cliFlags, config, changedFiles) {
915
+ const matchedTriggers = {};
916
+
917
+ // Evaluate each trigger against each file.
918
+ _forEachInstanceProperty__default["default"](config).call(config, async trigger => {
919
+ var _trigger$exclude;
920
+ const hasTriggerBeenInitialized = typeof matchedTriggers[trigger.name] === 'number';
921
+
922
+ // Given the trigger with this name was never evaluated it has to be defaulted to 0.
923
+ // As without any matches we should indicate nothing changed.
924
+ if (!hasTriggerBeenInitialized) {
925
+ matchedTriggers[trigger.name] = 0;
926
+ }
927
+ // Given the trigger was already evaluated to be positive we can skip this evaluation.
928
+ if (matchedTriggers[trigger.name] === 1) {
929
+ return matchedTriggers;
930
+ }
931
+
932
+ // In any other case we evaluate this trigger.
933
+ const anyFileChangedForTrigger = _someInstanceProperty__default["default"](micromatch__default["default"]).call(micromatch__default["default"], changedFiles, trigger.include, {
934
+ ignore: trigger.ignore
935
+ });
936
+ if (!cliFlags.silent && anyFileChangedForTrigger) {
937
+ console.log("\u2139\uFE0F Files for trigger ".concat(trigger.name, " changed."));
938
+ }
939
+ let onlyExcludedFilesChangedForTrigger = false;
940
+ if (((_trigger$exclude = trigger.exclude) === null || _trigger$exclude === void 0 ? void 0 : _trigger$exclude.length) > 0) {
941
+ // NOTE: `micromatch.every` evaluates if every file matches
942
+ // every pattern.
943
+ // We need to evaluate if every file matches some pattern.
944
+ onlyExcludedFilesChangedForTrigger = _everyInstanceProperty__default["default"](changedFiles).call(changedFiles, changedFile => {
945
+ return micromatch__default["default"].isMatch(changedFile, trigger.exclude, {
946
+ ignore: trigger.ignore
947
+ });
948
+ });
949
+ }
950
+ if (!cliFlags.silent && onlyExcludedFilesChangedForTrigger) {
951
+ console.log("\u2139\uFE0F Only excluded files for trigger ".concat(trigger.name, " changed."));
952
+ }
953
+ if (onlyExcludedFilesChangedForTrigger) {
954
+ matchedTriggers[trigger.name] = 0;
955
+ } else {
956
+ matchedTriggers[trigger.name] = Number(anyFileChangedForTrigger);
957
+ }
958
+ return matchedTriggers;
959
+ });
960
+ return matchedTriggers;
961
+ }
962
+ };
963
+ async function command$1(cliFlags, config, cwd) {
964
+ const enablePipelineOptimizations = process.env.ENABLE_PIPELINE_OPTIMIZATIONS === '1';
965
+ const isDevelopmentBranch = cliFlags.branch !== cliFlags.baseBranch;
966
+ const triggersContainingSharedFiles = _filterInstanceProperty__default["default"](config).call(config, trigger => trigger.containsSharedFiles);
967
+ if (!cliFlags.silent) {
968
+ var _context8;
969
+ console.log("\u2139\uFE0F Pipeline optimizations are ".concat(enablePipelineOptimizations ? 'enabled' : 'disabled', "."));
970
+ console.log("\u2139\uFE0F Changes have been commited to the ".concat(isDevelopmentBranch ? 'a development' : 'the main', " branch."));
971
+ console.log(_concatInstanceProperty__default["default"](_context8 = "\uD83D\uDEA7 Comparing '".concat(cliFlags.baseBranch, "' against '")).call(_context8, cliFlags.headRevision, "' to determine changed files."));
972
+ }
973
+
974
+ // Collect and parse changed files from git comparing base and head revision.
975
+ const changedFiles = await helpers.getChangedFiles(cliFlags);
976
+ if (!cliFlags.silent) {
977
+ if (changedFiles.length === 0) {
978
+ console.log("\u2139\uFE0F No changes found.");
979
+ } else {
980
+ console.log("\u2139\uFE0F ".concat(changedFiles.length, " changes found."));
981
+ }
982
+ }
983
+
984
+ // Read the trigger file to match the changed files against.
985
+ const matchedTriggers = await helpers.matchTriggersAgainstChangedFiles(cliFlags, config, changedFiles);
986
+ const commitMessageCmdResult = await execa.command(git.commitMessage(cliFlags.headRevision));
987
+ const commitMessage = commitMessageCmdResult.stdout;
988
+ const hasCommitMessageTrigger = commitMessage && _includesInstanceProperty__default["default"](commitMessage).call(commitMessage, '[ci all]');
989
+ const doesSharedTriggerMatch = _someInstanceProperty__default["default"](triggersContainingSharedFiles).call(triggersContainingSharedFiles, triggerContainingSharedFiles => matchedTriggers[triggerContainingSharedFiles.name] === 1);
990
+ if (!cliFlags.silent) {
991
+ console.log("\u2139\uFE0F The git commit message ".concat(hasCommitMessageTrigger ? 'does' : 'does not', " contain a [ci all] trigger."));
992
+ }
993
+ const doesPackageFolderTriggerMatch = matchedTriggers[cliFlags.triggerName] === 1;
994
+ if (enablePipelineOptimizations && isDevelopmentBranch && !hasCommitMessageTrigger && !doesSharedTriggerMatch && !doesPackageFolderTriggerMatch) {
995
+ if (!cliFlags.silent) {
996
+ console.log("\u2139\uFE0F No relevant changes found for ".concat(cliFlags.triggerName, "."));
997
+ }
998
+ matchedTriggers['allowPipelineOptimizationsForTrigger'] = 1;
999
+ } else {
1000
+ if (!cliFlags.silent) {
1001
+ console.log("\u2139\uFE0F Relevant changes found for ".concat(cliFlags.triggerName, "."));
1002
+ }
1003
+ matchedTriggers['allowPipelineOptimizationsForTrigger'] = 0;
1004
+ }
1005
+ await helpers.writeOutDotEnvFile(cliFlags, cwd, matchedTriggers);
1006
+ return matchedTriggers;
1007
+ }
1008
+
854
1009
  function ownKeys(e, r) { var t = _Object$keys__default["default"](e); if (_Object$getOwnPropertySymbols__default["default"]) { var o = _Object$getOwnPropertySymbols__default["default"](e); r && (o = _filterInstanceProperty__default["default"](o).call(o, function (r) { return _Object$getOwnPropertyDescriptor__default["default"](e, r).enumerable; })), t.push.apply(t, o); } return t; }
855
1010
  function _objectSpread(e) { for (var r = 1; r < arguments.length; r++) { var _context, _context2; var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? _forEachInstanceProperty__default["default"](_context = ownKeys(Object(t), !0)).call(_context, function (r) { _defineProperty(e, r, t[r]); }) : _Object$getOwnPropertyDescriptors__default["default"] ? _Object$defineProperties__default["default"](e, _Object$getOwnPropertyDescriptors__default["default"](t)) : _forEachInstanceProperty__default["default"](_context2 = ownKeys(Object(t))).call(_context2, function (r) { _Object$defineProperty__default["default"](e, r, _Object$getOwnPropertyDescriptor__default["default"](t, r)); }); } return e; }
856
1011
  const baseMenuProperties = {
@@ -971,7 +1126,7 @@ async function command(cliFlags) {
971
1126
 
972
1127
  var pkgJson = {
973
1128
  name: "@commercetools-frontend/application-cli",
974
- version: "2.4.0",
1129
+ version: "3.0.0",
975
1130
  description: "Internal CLI to manage Merchant Center application deployments across various environments.",
976
1131
  keywords: [
977
1132
  "commercetools",
@@ -983,6 +1138,7 @@ var pkgJson = {
983
1138
  module: "dist/commercetools-frontend-application-cli.esm.js",
984
1139
  bin: "bin/cli.js",
985
1140
  files: [
1141
+ "bin",
986
1142
  "cli",
987
1143
  "dist",
988
1144
  "package.json",
@@ -996,26 +1152,30 @@ var pkgJson = {
996
1152
  "@babel/core": "^7.22.11",
997
1153
  "@babel/runtime": "^7.21.0",
998
1154
  "@babel/runtime-corejs3": "^7.21.0",
999
- "@commercetools-frontend/application-config": "22.23.3",
1000
- "@commercetools-frontend/constants": "22.23.3",
1001
- "@commercetools-frontend/l10n": "22.23.3",
1155
+ "@commercetools-frontend/application-config": "22.27.0",
1156
+ "@commercetools-frontend/constants": "22.27.0",
1157
+ "@commercetools-frontend/l10n": "22.27.0",
1002
1158
  "@manypkg/find-root": "2.2.1",
1003
1159
  cac: "^6.7.14",
1004
1160
  cosmiconfig: "9.0.0",
1005
1161
  dotenv: "16.4.5",
1006
1162
  execa: "5.1.1",
1007
1163
  jsonschema: "^1.4.1",
1008
- listr2: "8.2.0",
1164
+ listr2: "8.2.1",
1165
+ lodash: "4.17.21",
1166
+ micromatch: "4.0.7",
1009
1167
  "node-fetch": "2.7.0",
1010
1168
  "ts-deepmerge": "7.0.0"
1011
1169
  },
1012
1170
  devDependencies: {
1013
1171
  "@tsconfig/node20": "20.1.4",
1014
- "@types/node": "20.12.7",
1172
+ "@types/lodash": "^4.14.198",
1173
+ "@types/micromatch": "4.0.7",
1174
+ "@types/node": "20.12.12",
1015
1175
  typescript: "5.2.2"
1016
1176
  },
1017
1177
  engines: {
1018
- node: ">=18",
1178
+ node: ">=21",
1019
1179
  npm: ">=6"
1020
1180
  },
1021
1181
  publishConfig: {
@@ -1052,11 +1212,11 @@ const run = async () => {
1052
1212
  }).option('--ci-assets-root-path [path]', '(optional) A replacement value for the scripts root path only used on CI (e.g. "--ci-assets-root-path=/root/") used in generated scripts.').option('--skip-menu', '(optional) If provided, it will skip uploading the `menu.json`.', {
1053
1213
  default: false
1054
1214
  }).action(async options => {
1055
- await command$3(options, cwd);
1215
+ await command$4(options, cwd);
1056
1216
  });
1057
1217
  const usageCompileMenu = 'Compile the menu links of an application into a `menu.json`. This is only required for internal applications';
1058
1218
  cli.command('compile-menu', usageCompileMenu).usage("compile-menu \n\n ".concat(usageCompileMenu)).option('--dotenv-folder [string]', '(optional) The path to a folder containing a dotenv file `.env.production` and a cloud-environment specific dotenv file (for example `.env.gcp-production-eu`). Those values are parsed and merged together to be used by the application config.').action(async options => {
1059
- await command$2(options, cwd);
1219
+ await command$3(options, cwd);
1060
1220
  });
1061
1221
  const usageValidateMenu = 'Validate compiled `menu.json` file';
1062
1222
  cli.command('validate-menu', usageValidateMenu).usage("validate-menu \n\n ".concat(usageValidateMenu)).option('--input-file <path>', '(required) The path to the `menu.json` file to be validated.').option('--navigation [string]', '(optional) Location of the menu navigation. Possible values are `top`.').action(async options => {
@@ -1066,7 +1226,20 @@ const run = async () => {
1066
1226
  cli.command('create-version', usageCreateVersion).usage("create-version \n\n ".concat(usageCreateVersion)).option('--version-url <url>', "(required) The path of an application's current `version.json` within the storage bucket.").option('--rollbacks [int]', '(optional) The number of max rollbacks to keep', {
1067
1227
  default: 15
1068
1228
  }).option('--out-file [path]', '(optional) The path to the file where to write the JSON. If not specified, the JSON is printed to stdout.').action(async options => {
1069
- await command$1(options);
1229
+ await command$2(options);
1230
+ });
1231
+
1232
+ // Command: Evaluate change triggers
1233
+ const usageEvaluateChangeTriggers = 'Evaluates changed files against a base and evaluates them against defined triggers.';
1234
+ cli.command('evaluate-change-triggers', usageEvaluateChangeTriggers).usage("evaluate-change-triggers \n\n ".concat(usageEvaluateChangeTriggers)).option('--branch <string>', 'The branch of the pull request', {
1235
+ default: process.env.CIRCLE_BRANCH
1236
+ }).option('--base-branch <string>', 'The base revision of the git commit compare against (e.g. "main")').option('--head-revision <string>', 'The revision of the git head to compare with', {
1237
+ default: process.env.CIRCLE_SHA1
1238
+ }).option('--trigger-name <string>', 'The trigger to evaluate for.').option('--silent', '(optional) Disable logging', {
1239
+ default: false
1240
+ }).option('--out-env-file [string]', '(optional) A file path where the matched triggers are written as a dotenv file.').action(async options => {
1241
+ const config = await storageBucketsConfig.loadConfig('circleci-change-triggers', []);
1242
+ await command$1(options, config, cwd);
1070
1243
  });
1071
1244
  cli.help();
1072
1245
  cli.version(pkgJson.version);