@commercetools-frontend/application-cli 2.3.2 → 2.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -18,7 +18,7 @@ var _mapInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instan
18
18
  var _Object$entries = require('@babel/runtime-corejs3/core-js-stable/object/entries');
19
19
  var _Set = require('@babel/runtime-corejs3/core-js-stable/set');
20
20
  var _flatMapInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/flat-map');
21
- var storageBucketsConfig = require('../../dist/storage-buckets-config-e092f613.cjs.prod.js');
21
+ var storageBucketsConfig = require('../../dist/storage-buckets-config-0a380c12.cjs.prod.js');
22
22
  var fs = require('node:fs');
23
23
  var path$1 = require('node:path');
24
24
  var listr2 = require('listr2');
@@ -41,9 +41,17 @@ var _JSON$stringify = require('@babel/runtime-corejs3/core-js-stable/json/string
41
41
  var applicationConfig = require('@commercetools-frontend/application-config');
42
42
  var l10n = require('@commercetools-frontend/l10n');
43
43
  var _sliceInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/slice');
44
+ var _startsWithInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/starts-with');
45
+ var _trimInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/trim');
46
+ var _someInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/some');
47
+ var _everyInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/every');
48
+ var _includesInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/includes');
49
+ var micromatch = require('micromatch');
50
+ var snakeCase = require('lodash/snakeCase');
44
51
  var jsonschema = require('jsonschema');
45
52
  require('cosmiconfig');
46
53
  require('ts-deepmerge');
54
+ require('lodash');
47
55
 
48
56
  function _interopDefault (e) { return e && e.__esModule ? e : { 'default': e }; }
49
57
 
@@ -70,6 +78,13 @@ var fs__default$1 = /*#__PURE__*/_interopDefault(fs$1);
70
78
  var _findInstanceProperty__default = /*#__PURE__*/_interopDefault(_findInstanceProperty);
71
79
  var _JSON$stringify__default = /*#__PURE__*/_interopDefault(_JSON$stringify);
72
80
  var _sliceInstanceProperty__default = /*#__PURE__*/_interopDefault(_sliceInstanceProperty);
81
+ var _startsWithInstanceProperty__default = /*#__PURE__*/_interopDefault(_startsWithInstanceProperty);
82
+ var _trimInstanceProperty__default = /*#__PURE__*/_interopDefault(_trimInstanceProperty);
83
+ var _someInstanceProperty__default = /*#__PURE__*/_interopDefault(_someInstanceProperty);
84
+ var _everyInstanceProperty__default = /*#__PURE__*/_interopDefault(_everyInstanceProperty);
85
+ var _includesInstanceProperty__default = /*#__PURE__*/_interopDefault(_includesInstanceProperty);
86
+ var micromatch__default = /*#__PURE__*/_interopDefault(micromatch);
87
+ var snakeCase__default = /*#__PURE__*/_interopDefault(snakeCase);
73
88
 
74
89
  function getApplicationDirectory(cwd) {
75
90
  return fs__default["default"].realpathSync(cwd);
@@ -85,7 +100,7 @@ function isCI() {
85
100
  }
86
101
 
87
102
  function ownKeys$4(e, r) { var t = _Object$keys__default["default"](e); if (_Object$getOwnPropertySymbols__default["default"]) { var o = _Object$getOwnPropertySymbols__default["default"](e); r && (o = _filterInstanceProperty__default["default"](o).call(o, function (r) { return _Object$getOwnPropertyDescriptor__default["default"](e, r).enumerable; })), t.push.apply(t, o); } return t; }
88
- function _objectSpread$4(e) { for (var r = 1; r < arguments.length; r++) { var _context5, _context6; var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? _forEachInstanceProperty__default["default"](_context5 = ownKeys$4(Object(t), !0)).call(_context5, function (r) { _defineProperty(e, r, t[r]); }) : _Object$getOwnPropertyDescriptors__default["default"] ? _Object$defineProperties__default["default"](e, _Object$getOwnPropertyDescriptors__default["default"](t)) : _forEachInstanceProperty__default["default"](_context6 = ownKeys$4(Object(t))).call(_context6, function (r) { _Object$defineProperty__default["default"](e, r, _Object$getOwnPropertyDescriptor__default["default"](t, r)); }); } return e; }
103
+ function _objectSpread$4(e) { for (var r = 1; r < arguments.length; r++) { var _context6, _context7; var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? _forEachInstanceProperty__default["default"](_context6 = ownKeys$4(Object(t), !0)).call(_context6, function (r) { _defineProperty(e, r, t[r]); }) : _Object$getOwnPropertyDescriptors__default["default"] ? _Object$defineProperties__default["default"](e, _Object$getOwnPropertyDescriptors__default["default"](t)) : _forEachInstanceProperty__default["default"](_context7 = ownKeys$4(Object(t))).call(_context7, function (r) { _Object$defineProperty__default["default"](e, r, _Object$getOwnPropertyDescriptor__default["default"](t, r)); }); } return e; }
89
104
  function _callSuper(_this, derived, args) {
90
105
  function isNativeReflectConstruct() {
91
106
  if (typeof Reflect === "undefined" || !_Reflect$construct__default["default"]) return false;
@@ -190,6 +205,10 @@ let StorageProvider = /*#__PURE__*/function () {
190
205
  }
191
206
  const storageProvider = storageBucketsConfig.storageProviders[tag];
192
207
  const publicBaseUrl = (_storageProvider$urls3 = storageProvider.urls.public[_classPrivateFieldLooseBase(this, _bucketEnvironment$1)[_bucketEnvironment$1]]) !== null && _storageProvider$urls3 !== void 0 ? _storageProvider$urls3 : storageProvider.urls.public.default;
208
+ if (!publicBaseUrl) {
209
+ var _context5;
210
+ throw new Error(_concatInstanceProperty__default["default"](_context5 = "'publicBaseUrl' is not defined for '".concat(tag, "' storage provider for ")).call(_context5, _classPrivateFieldLooseBase(this, _bucketEnvironment$1)[_bucketEnvironment$1], " or as default."));
211
+ }
193
212
  return publicBaseUrl;
194
213
  }
195
214
  }]);
@@ -418,7 +437,7 @@ let AwsStorageUploadScriptsGenerator = /*#__PURE__*/function () {
418
437
  buildRevision = _ref3.buildRevision,
419
438
  buildNumber = _ref3.buildNumber,
420
439
  applicationIndexOutFile = _ref3.applicationIndexOutFile;
421
- return _concatInstanceProperty__default["default"](_context19 = _concatInstanceProperty__default["default"](_context20 = _concatInstanceProperty__default["default"](_context21 = _concatInstanceProperty__default["default"](_context22 = _concatInstanceProperty__default["default"](_context23 = _concatInstanceProperty__default["default"](_context24 = _concatInstanceProperty__default["default"](_context25 = _concatInstanceProperty__default["default"](_context26 = _concatInstanceProperty__default["default"](_context27 = _concatInstanceProperty__default["default"](_context28 = _concatInstanceProperty__default["default"](_context29 = "\n #!/usr/bin/env bash\n\n echo \"Uploading static assets to Amazon S3 bucket ".concat(bucketUrl, "\"\n\n set -e\n\n aws s3 cp \"$(dirname \"$0\")/")).call(_context29, applicationIndexOutFile, "\" \\\n \"")).call(_context28, bucketUrl, "/\" \\\n --content-type=\"text/html\" \\\n --cache-control=\"public,max-age=0,no-transform\" \\\n --profile ")).call(_context27, _classPrivateFieldLooseBase(this, _bucketRegion2)[_bucketRegion2], "\n\n echo \"Creating version.json and uploading it to bucket ")).call(_context26, bucketUrl, "\"\n\n NODE_ENV=production ")).call(_context25, packageManagerName, " application-cli create-version \\\n --version-url=")).call(_context24, cdnUrl, "/")).call(_context23, _classPrivateFieldLooseBase(this, _bucketEnvironment2)[_bucketEnvironment2], "/version.json \\\n --build-revision=")).call(_context22, buildRevision, " \\\n --build-number=")).call(_context21, buildNumber, " \\\n --out-file=$(dirname \"$0\")/version.json\n\n aws s3 cp \"$(dirname \"$0\")\" \\\n \"")).call(_context20, bucketUrl, "/\" \\\n --exclude \"*\" \\\n --include \"version.json\" \\\n --content-type=\"application/json\" \\\n --cache-control=\"public,max-age=0,no-transform\" \\\n --profile ")).call(_context19, _classPrivateFieldLooseBase(this, _bucketRegion2)[_bucketRegion2], "\n");
440
+ return _concatInstanceProperty__default["default"](_context19 = _concatInstanceProperty__default["default"](_context20 = _concatInstanceProperty__default["default"](_context21 = _concatInstanceProperty__default["default"](_context22 = _concatInstanceProperty__default["default"](_context23 = _concatInstanceProperty__default["default"](_context24 = _concatInstanceProperty__default["default"](_context25 = _concatInstanceProperty__default["default"](_context26 = _concatInstanceProperty__default["default"](_context27 = _concatInstanceProperty__default["default"](_context28 = _concatInstanceProperty__default["default"](_context29 = "\n #!/usr/bin/env bash\n\n echo \"Uploading static assets to Amazon S3 bucket ".concat(bucketUrl, "\"\n\n set -e\n\n aws s3 cp \"$(dirname \"$0\")/")).call(_context29, applicationIndexOutFile, "\" \\\n \"")).call(_context28, bucketUrl, "/\" \\\n --content-type=\"text/html\" \\\n --cache-control=\"public,max-age=0,no-transform\" \\\n --profile ")).call(_context27, _classPrivateFieldLooseBase(this, _bucketRegion2)[_bucketRegion2], "\n\n echo \"Creating version.json and uploading it to bucket ")).call(_context26, bucketUrl, "\"\n\n NODE_ENV=production ")).call(_context25, packageManagerName, " application-cli create-version \\\n --version-url=")).call(_context24, cdnUrl, "/")).call(_context23, _classPrivateFieldLooseBase(this, _bucketEnvironment2)[_bucketEnvironment2], "/version.json \\\n --build-revision=")).call(_context22, buildRevision, " \\\n --build-number=")).call(_context21, buildNumber, " \\\n --out-file=$(dirname \"$0\")/version.json\n\n aws s3 cp \"$(dirname \"$0\")/version.json\" \\\n \"")).call(_context20, bucketUrl, "/\" \\\n --content-type=\"application/json\" \\\n --cache-control=\"public,max-age=0,no-transform\" \\\n --profile ")).call(_context19, _classPrivateFieldLooseBase(this, _bucketRegion2)[_bucketRegion2], "\n");
422
441
  }
423
442
  }, {
424
443
  key: "getProductionBundlesUploadScript",
@@ -607,7 +626,7 @@ async function compileEnvironmentApplicationIndexes(_ref3) {
607
626
  throw new Error(moveResult.stderr);
608
627
  }
609
628
  }
610
- async function command$3(cliFlags, cwd) {
629
+ async function command$4(cliFlags, cwd) {
611
630
  var _context3;
612
631
  const storageBucketConfig = await storageBucketsConfig.loadStorageBucketsConfig();
613
632
  const applicationDirectory = getApplicationDirectory(cwd);
@@ -780,7 +799,7 @@ const mapApplicationMenuConfigToGraqhQLMenuJson = config => {
780
799
  shouldRenderDivider: (_menuLinks$shouldRend = menuLinks.shouldRenderDivider) !== null && _menuLinks$shouldRend !== void 0 ? _menuLinks$shouldRend : false
781
800
  };
782
801
  };
783
- async function command$2(cliFlags, cwd) {
802
+ async function command$3(cliFlags, cwd) {
784
803
  const applicationDirectory = getApplicationDirectory(cwd);
785
804
  const monorepoRoot = findRoot.findRootSync(cwd);
786
805
  const dotenvPath = cliFlags.dotenvFolder && path__default["default"].join(monorepoRoot.rootDir, cliFlags.dotenvFolder);
@@ -811,7 +830,7 @@ async function command$2(cliFlags, cwd) {
811
830
  });
812
831
  }
813
832
 
814
- async function command$1(cliFlags) {
833
+ async function command$2(cliFlags) {
815
834
  const numberOfRollbacks = cliFlags.rollbacks - 1;
816
835
  let nextRollbacks;
817
836
  try {
@@ -847,6 +866,158 @@ async function command$1(cliFlags) {
847
866
  }
848
867
  }
849
868
 
869
+ /**
870
+ * This is heavily inspired by https://circleci.com/developer/orbs/orb/circleci/path-filtering.
871
+ *
872
+ * It detects changed files between `HEAD` and a base revision.
873
+ * To match them against configured RegEx tr
874
+ * All matched triggers will be written as a dotenv file.
875
+ * The dotenv file is read in a CircleCI step and be evaluated.
876
+ */
877
+ const git = {
878
+ // https://git-scm.com/docs/git-merge-base
879
+ base: (baseBranch, headRevision) => {
880
+ var _context;
881
+ return _concatInstanceProperty__default["default"](_context = "git merge-base ".concat(baseBranch, " ")).call(_context, headRevision);
882
+ },
883
+ // https://git-scm.com/docs/git-diff
884
+ changedFiles: (mergeRevision, headRevision) => {
885
+ var _context2;
886
+ return _concatInstanceProperty__default["default"](_context2 = "git diff --name-only ".concat(mergeRevision, " ")).call(_context2, headRevision);
887
+ },
888
+ commitMessage: headRevision => "git log --format=oneline -n 1 ".concat(headRevision)
889
+ };
890
+ const helpers = {
891
+ async writeOutDotEnvFile(cliFlags, cwd, matchingTriggers) {
892
+ var _context3;
893
+ // If desired read the env file and write out the matching triggers.
894
+ if (!cliFlags.outEnvFile) {
895
+ return;
896
+ }
897
+ const filePath = path__default$1["default"].join(fs__default["default"].realpathSync(cwd), cliFlags.outEnvFile);
898
+ const fileContents = _mapInstanceProperty__default["default"](_context3 = _Object$entries__default["default"](matchingTriggers)).call(_context3, _ref => {
899
+ var _context5;
900
+ let _ref2 = _slicedToArray(_ref, 2),
901
+ triggerName = _ref2[0],
902
+ triggerValue = _ref2[1];
903
+ const triggerNameForEnvFile = "".concat(snakeCase__default["default"](triggerName).toUpperCase());
904
+
905
+ // General pipeline optimization hints are not transformed
906
+ if (_startsWithInstanceProperty__default["default"](triggerName).call(triggerName, 'allowPipelineOptimizations')) {
907
+ var _context4;
908
+ return _concatInstanceProperty__default["default"](_context4 = "".concat(triggerNameForEnvFile, "=")).call(_context4, triggerValue);
909
+ }
910
+ return _concatInstanceProperty__default["default"](_context5 = "DID_".concat(triggerNameForEnvFile, "_CHANGE=")).call(_context5, triggerValue);
911
+ }).join('\n');
912
+ await fs__default["default"].promises.writeFile(filePath, fileContents);
913
+ if (!cliFlags.silent) {
914
+ console.log("\uD83D\uDCDD Wrote out file to '".concat(filePath, "' with contents:"));
915
+ console.log(fileContents);
916
+ }
917
+ },
918
+ async getChangedFiles(cliFlags) {
919
+ var _context6, _context7;
920
+ const baseCmdResult = await execa.command(git.base(cliFlags.baseBranch, cliFlags.headRevision));
921
+ const mergeRevision = baseCmdResult.stdout;
922
+ const changedFilesCmdResult = await execa.command(git.changedFiles(mergeRevision, cliFlags.headRevision));
923
+ const changedFiles = _filterInstanceProperty__default["default"](_context6 = _mapInstanceProperty__default["default"](_context7 = changedFilesCmdResult.stdout.split('\n')).call(_context7, filePath => _trimInstanceProperty__default["default"](filePath).call(filePath))).call(_context6, filePath => filePath.length > 0);
924
+ return changedFiles;
925
+ },
926
+ async matchTriggersAgainstChangedFiles(cliFlags, config, changedFiles) {
927
+ const matchedTriggers = {};
928
+
929
+ // Evaluate each trigger against each file.
930
+ _forEachInstanceProperty__default["default"](config).call(config, async trigger => {
931
+ var _trigger$exclude;
932
+ const hasTriggerBeenInitialized = typeof matchedTriggers[trigger.name] === 'number';
933
+
934
+ // Given the trigger with this name was never evaluated it has to be defaulted to 0.
935
+ // As without any matches we should indicate nothing changed.
936
+ if (!hasTriggerBeenInitialized) {
937
+ matchedTriggers[trigger.name] = 0;
938
+ }
939
+ // Given the trigger was already evaluated to be positive we can skip this evaluation.
940
+ if (matchedTriggers[trigger.name] === 1) {
941
+ return matchedTriggers;
942
+ }
943
+
944
+ // In any other case we evaluate this trigger.
945
+ const anyFileChangedForTrigger = _someInstanceProperty__default["default"](micromatch__default["default"]).call(micromatch__default["default"], changedFiles, trigger.include, {
946
+ ignore: trigger.ignore
947
+ });
948
+ if (!cliFlags.silent && anyFileChangedForTrigger) {
949
+ console.log("\u2139\uFE0F Files for trigger ".concat(trigger.name, " changed."));
950
+ }
951
+ let onlyExcludedFilesChangedForTrigger = false;
952
+ if (((_trigger$exclude = trigger.exclude) === null || _trigger$exclude === void 0 ? void 0 : _trigger$exclude.length) > 0) {
953
+ // NOTE: `micromatch.every` evaluates if every file matches
954
+ // every pattern.
955
+ // We need to evaluate if every file matches some pattern.
956
+ onlyExcludedFilesChangedForTrigger = _everyInstanceProperty__default["default"](changedFiles).call(changedFiles, changedFile => {
957
+ return micromatch__default["default"].isMatch(changedFile, trigger.exclude, {
958
+ ignore: trigger.ignore
959
+ });
960
+ });
961
+ }
962
+ if (!cliFlags.silent && onlyExcludedFilesChangedForTrigger) {
963
+ console.log("\u2139\uFE0F Only excluded files for trigger ".concat(trigger.name, " changed."));
964
+ }
965
+ if (onlyExcludedFilesChangedForTrigger) {
966
+ matchedTriggers[trigger.name] = 0;
967
+ } else {
968
+ matchedTriggers[trigger.name] = Number(anyFileChangedForTrigger);
969
+ }
970
+ return matchedTriggers;
971
+ });
972
+ return matchedTriggers;
973
+ }
974
+ };
975
+ async function command$1(cliFlags, config, cwd) {
976
+ const enablePipelineOptimizations = process.env.ENABLE_PIPELINE_OPTIMIZATIONS === '1';
977
+ const isDevelopmentBranch = cliFlags.branch !== cliFlags.baseBranch;
978
+ const triggersContainingSharedFiles = _filterInstanceProperty__default["default"](config).call(config, trigger => trigger.containsSharedFiles);
979
+ if (!cliFlags.silent) {
980
+ var _context8;
981
+ console.log("\u2139\uFE0F Pipeline optimizations are ".concat(enablePipelineOptimizations ? 'enabled' : 'disabled', "."));
982
+ console.log("\u2139\uFE0F Changes have been commited to the ".concat(isDevelopmentBranch ? 'a development' : 'the main', " branch."));
983
+ console.log(_concatInstanceProperty__default["default"](_context8 = "\uD83D\uDEA7 Comparing '".concat(cliFlags.baseBranch, "' against '")).call(_context8, cliFlags.headRevision, "' to determine changed files."));
984
+ }
985
+
986
+ // Collect and parse changed files from git comparing base and head revision.
987
+ const changedFiles = await helpers.getChangedFiles(cliFlags);
988
+ if (!cliFlags.silent) {
989
+ if (changedFiles.length === 0) {
990
+ console.log("\u2139\uFE0F No changes found.");
991
+ } else {
992
+ console.log("\u2139\uFE0F ".concat(changedFiles.length, " changes found."));
993
+ }
994
+ }
995
+
996
+ // Read the trigger file to match the changed files against.
997
+ const matchedTriggers = await helpers.matchTriggersAgainstChangedFiles(cliFlags, config, changedFiles);
998
+ const commitMessageCmdResult = await execa.command(git.commitMessage(cliFlags.headRevision));
999
+ const commitMessage = commitMessageCmdResult.stdout;
1000
+ const hasCommitMessageTrigger = commitMessage && _includesInstanceProperty__default["default"](commitMessage).call(commitMessage, '[ci all]');
1001
+ const doesSharedTriggerMatch = _someInstanceProperty__default["default"](triggersContainingSharedFiles).call(triggersContainingSharedFiles, triggerContainingSharedFiles => matchedTriggers[triggerContainingSharedFiles.name] === 1);
1002
+ if (!cliFlags.silent) {
1003
+ console.log("\u2139\uFE0F The git commit message ".concat(hasCommitMessageTrigger ? 'does' : 'does not', " contain a [ci all] trigger."));
1004
+ }
1005
+ const doesPackageFolderTriggerMatch = matchedTriggers[cliFlags.triggerName] === 1;
1006
+ if (enablePipelineOptimizations && isDevelopmentBranch && !hasCommitMessageTrigger && !doesSharedTriggerMatch && !doesPackageFolderTriggerMatch) {
1007
+ if (!cliFlags.silent) {
1008
+ console.log("\u2139\uFE0F No relevant changes found for ".concat(cliFlags.triggerName, "."));
1009
+ }
1010
+ matchedTriggers['allowPipelineOptimizationsForTrigger'] = 1;
1011
+ } else {
1012
+ if (!cliFlags.silent) {
1013
+ console.log("\u2139\uFE0F Relevant changes found for ".concat(cliFlags.triggerName, "."));
1014
+ }
1015
+ matchedTriggers['allowPipelineOptimizationsForTrigger'] = 0;
1016
+ }
1017
+ await helpers.writeOutDotEnvFile(cliFlags, cwd, matchedTriggers);
1018
+ return matchedTriggers;
1019
+ }
1020
+
850
1021
  function ownKeys(e, r) { var t = _Object$keys__default["default"](e); if (_Object$getOwnPropertySymbols__default["default"]) { var o = _Object$getOwnPropertySymbols__default["default"](e); r && (o = _filterInstanceProperty__default["default"](o).call(o, function (r) { return _Object$getOwnPropertyDescriptor__default["default"](e, r).enumerable; })), t.push.apply(t, o); } return t; }
851
1022
  function _objectSpread(e) { for (var r = 1; r < arguments.length; r++) { var _context, _context2; var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? _forEachInstanceProperty__default["default"](_context = ownKeys(Object(t), !0)).call(_context, function (r) { _defineProperty(e, r, t[r]); }) : _Object$getOwnPropertyDescriptors__default["default"] ? _Object$defineProperties__default["default"](e, _Object$getOwnPropertyDescriptors__default["default"](t)) : _forEachInstanceProperty__default["default"](_context2 = ownKeys(Object(t))).call(_context2, function (r) { _Object$defineProperty__default["default"](e, r, _Object$getOwnPropertyDescriptor__default["default"](t, r)); }); } return e; }
852
1023
  const baseMenuProperties = {
@@ -967,7 +1138,7 @@ async function command(cliFlags) {
967
1138
 
968
1139
  var pkgJson = {
969
1140
  name: "@commercetools-frontend/application-cli",
970
- version: "2.3.2",
1141
+ version: "2.5.0",
971
1142
  description: "Internal CLI to manage Merchant Center application deployments across various environments.",
972
1143
  keywords: [
973
1144
  "commercetools",
@@ -1002,11 +1173,15 @@ var pkgJson = {
1002
1173
  execa: "5.1.1",
1003
1174
  jsonschema: "^1.4.1",
1004
1175
  listr2: "8.2.0",
1176
+ lodash: "4.17.21",
1177
+ micromatch: "4.0.5",
1005
1178
  "node-fetch": "2.7.0",
1006
1179
  "ts-deepmerge": "7.0.0"
1007
1180
  },
1008
1181
  devDependencies: {
1009
1182
  "@tsconfig/node20": "20.1.4",
1183
+ "@types/lodash": "^4.14.198",
1184
+ "@types/micromatch": "4.0.6",
1010
1185
  "@types/node": "20.12.7",
1011
1186
  typescript: "5.2.2"
1012
1187
  },
@@ -1048,11 +1223,11 @@ const run = async () => {
1048
1223
  }).option('--ci-assets-root-path [path]', '(optional) A replacement value for the scripts root path only used on CI (e.g. "--ci-assets-root-path=/root/") used in generated scripts.').option('--skip-menu', '(optional) If provided, it will skip uploading the `menu.json`.', {
1049
1224
  default: false
1050
1225
  }).action(async options => {
1051
- await command$3(options, cwd);
1226
+ await command$4(options, cwd);
1052
1227
  });
1053
1228
  const usageCompileMenu = 'Compile the menu links of an application into a `menu.json`. This is only required for internal applications';
1054
1229
  cli.command('compile-menu', usageCompileMenu).usage("compile-menu \n\n ".concat(usageCompileMenu)).option('--dotenv-folder [string]', '(optional) The path to a folder containing a dotenv file `.env.production` and a cloud-environment specific dotenv file (for example `.env.gcp-production-eu`). Those values are parsed and merged together to be used by the application config.').action(async options => {
1055
- await command$2(options, cwd);
1230
+ await command$3(options, cwd);
1056
1231
  });
1057
1232
  const usageValidateMenu = 'Validate compiled `menu.json` file';
1058
1233
  cli.command('validate-menu', usageValidateMenu).usage("validate-menu \n\n ".concat(usageValidateMenu)).option('--input-file <path>', '(required) The path to the `menu.json` file to be validated.').option('--navigation [string]', '(optional) Location of the menu navigation. Possible values are `top`.').action(async options => {
@@ -1062,7 +1237,20 @@ const run = async () => {
1062
1237
  cli.command('create-version', usageCreateVersion).usage("create-version \n\n ".concat(usageCreateVersion)).option('--version-url <url>', "(required) The path of an application's current `version.json` within the storage bucket.").option('--rollbacks [int]', '(optional) The number of max rollbacks to keep', {
1063
1238
  default: 15
1064
1239
  }).option('--out-file [path]', '(optional) The path to the file where to write the JSON. If not specified, the JSON is printed to stdout.').action(async options => {
1065
- await command$1(options);
1240
+ await command$2(options);
1241
+ });
1242
+
1243
+ // Command: Evaluate change triggers
1244
+ const usageEvaluateChangeTriggers = 'Evaluates changed files against a base and evaluates them against defined triggers.';
1245
+ cli.command('evaluate-change-triggers', usageEvaluateChangeTriggers).usage("evaluate-change-triggers \n\n ".concat(usageEvaluateChangeTriggers)).option('--branch <string>', 'The branch of the pull request', {
1246
+ default: process.env.CIRCLE_BRANCH
1247
+ }).option('--base-branch <string>', 'The base revision of the git commit compare against (e.g. "main")').option('--head-revision <string>', 'The revision of the git head to compare with', {
1248
+ default: process.env.CIRCLE_SHA1
1249
+ }).option('--trigger-name <string>', 'The trigger to evaluate for.').option('--silent', '(optional) Disable logging', {
1250
+ default: false
1251
+ }).option('--out-env-file [string]', '(optional) A file path where the matched triggers are written as a dotenv file.').action(async options => {
1252
+ const config = await storageBucketsConfig.loadConfig('circleci-change-triggers', []);
1253
+ await command$1(options, config, cwd);
1066
1254
  });
1067
1255
  cli.help();
1068
1256
  cli.version(pkgJson.version);
@@ -14,11 +14,11 @@ import _mapInstanceProperty from '@babel/runtime-corejs3/core-js-stable/instance
14
14
  import _Object$entries from '@babel/runtime-corejs3/core-js-stable/object/entries';
15
15
  import _Set from '@babel/runtime-corejs3/core-js-stable/set';
16
16
  import _flatMapInstanceProperty from '@babel/runtime-corejs3/core-js-stable/instance/flat-map';
17
- import { s as storageProviders, l as loadStorageBucketsConfig, c as clusterContexts } from '../../dist/storage-buckets-config-f41d07bc.esm.js';
17
+ import { s as storageProviders, l as loadStorageBucketsConfig, c as clusterContexts, a as loadConfig } from '../../dist/storage-buckets-config-896b4064.esm.js';
18
18
  import fs from 'node:fs';
19
19
  import path$1 from 'node:path';
20
20
  import { Listr } from 'listr2';
21
- import execa from 'execa';
21
+ import execa, { command as command$5 } from 'execa';
22
22
  import { findRootSync } from '@manypkg/find-root';
23
23
  import path from 'path';
24
24
  import _possibleConstructorReturn from '@babel/runtime-corejs3/helpers/esm/possibleConstructorReturn';
@@ -37,9 +37,17 @@ import _JSON$stringify from '@babel/runtime-corejs3/core-js-stable/json/stringif
37
37
  import { processConfig } from '@commercetools-frontend/application-config';
38
38
  import { getSupportedLocales } from '@commercetools-frontend/l10n';
39
39
  import _sliceInstanceProperty from '@babel/runtime-corejs3/core-js-stable/instance/slice';
40
+ import _startsWithInstanceProperty from '@babel/runtime-corejs3/core-js-stable/instance/starts-with';
41
+ import _trimInstanceProperty from '@babel/runtime-corejs3/core-js-stable/instance/trim';
42
+ import _someInstanceProperty from '@babel/runtime-corejs3/core-js-stable/instance/some';
43
+ import _everyInstanceProperty from '@babel/runtime-corejs3/core-js-stable/instance/every';
44
+ import _includesInstanceProperty from '@babel/runtime-corejs3/core-js-stable/instance/includes';
45
+ import micromatch from 'micromatch';
46
+ import snakeCase from 'lodash/snakeCase';
40
47
  import { Validator } from 'jsonschema';
41
48
  import 'cosmiconfig';
42
49
  import 'ts-deepmerge';
50
+ import 'lodash';
43
51
 
44
52
  function getApplicationDirectory(cwd) {
45
53
  return fs.realpathSync(cwd);
@@ -55,7 +63,7 @@ function isCI() {
55
63
  }
56
64
 
57
65
  function ownKeys$4(e, r) { var t = _Object$keys(e); if (_Object$getOwnPropertySymbols) { var o = _Object$getOwnPropertySymbols(e); r && (o = _filterInstanceProperty(o).call(o, function (r) { return _Object$getOwnPropertyDescriptor(e, r).enumerable; })), t.push.apply(t, o); } return t; }
58
- function _objectSpread$4(e) { for (var r = 1; r < arguments.length; r++) { var _context5, _context6; var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? _forEachInstanceProperty(_context5 = ownKeys$4(Object(t), !0)).call(_context5, function (r) { _defineProperty(e, r, t[r]); }) : _Object$getOwnPropertyDescriptors ? _Object$defineProperties(e, _Object$getOwnPropertyDescriptors(t)) : _forEachInstanceProperty(_context6 = ownKeys$4(Object(t))).call(_context6, function (r) { _Object$defineProperty(e, r, _Object$getOwnPropertyDescriptor(t, r)); }); } return e; }
66
+ function _objectSpread$4(e) { for (var r = 1; r < arguments.length; r++) { var _context6, _context7; var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? _forEachInstanceProperty(_context6 = ownKeys$4(Object(t), !0)).call(_context6, function (r) { _defineProperty(e, r, t[r]); }) : _Object$getOwnPropertyDescriptors ? _Object$defineProperties(e, _Object$getOwnPropertyDescriptors(t)) : _forEachInstanceProperty(_context7 = ownKeys$4(Object(t))).call(_context7, function (r) { _Object$defineProperty(e, r, _Object$getOwnPropertyDescriptor(t, r)); }); } return e; }
59
67
  function _callSuper(_this, derived, args) {
60
68
  function isNativeReflectConstruct() {
61
69
  if (typeof Reflect === "undefined" || !_Reflect$construct) return false;
@@ -160,6 +168,10 @@ let StorageProvider = /*#__PURE__*/function () {
160
168
  }
161
169
  const storageProvider = storageProviders[tag];
162
170
  const publicBaseUrl = (_storageProvider$urls3 = storageProvider.urls.public[_classPrivateFieldLooseBase(this, _bucketEnvironment$1)[_bucketEnvironment$1]]) !== null && _storageProvider$urls3 !== void 0 ? _storageProvider$urls3 : storageProvider.urls.public.default;
171
+ if (!publicBaseUrl) {
172
+ var _context5;
173
+ throw new Error(_concatInstanceProperty(_context5 = "'publicBaseUrl' is not defined for '".concat(tag, "' storage provider for ")).call(_context5, _classPrivateFieldLooseBase(this, _bucketEnvironment$1)[_bucketEnvironment$1], " or as default."));
174
+ }
163
175
  return publicBaseUrl;
164
176
  }
165
177
  }]);
@@ -388,7 +400,7 @@ let AwsStorageUploadScriptsGenerator = /*#__PURE__*/function () {
388
400
  buildRevision = _ref3.buildRevision,
389
401
  buildNumber = _ref3.buildNumber,
390
402
  applicationIndexOutFile = _ref3.applicationIndexOutFile;
391
- return _concatInstanceProperty(_context19 = _concatInstanceProperty(_context20 = _concatInstanceProperty(_context21 = _concatInstanceProperty(_context22 = _concatInstanceProperty(_context23 = _concatInstanceProperty(_context24 = _concatInstanceProperty(_context25 = _concatInstanceProperty(_context26 = _concatInstanceProperty(_context27 = _concatInstanceProperty(_context28 = _concatInstanceProperty(_context29 = "\n #!/usr/bin/env bash\n\n echo \"Uploading static assets to Amazon S3 bucket ".concat(bucketUrl, "\"\n\n set -e\n\n aws s3 cp \"$(dirname \"$0\")/")).call(_context29, applicationIndexOutFile, "\" \\\n \"")).call(_context28, bucketUrl, "/\" \\\n --content-type=\"text/html\" \\\n --cache-control=\"public,max-age=0,no-transform\" \\\n --profile ")).call(_context27, _classPrivateFieldLooseBase(this, _bucketRegion2)[_bucketRegion2], "\n\n echo \"Creating version.json and uploading it to bucket ")).call(_context26, bucketUrl, "\"\n\n NODE_ENV=production ")).call(_context25, packageManagerName, " application-cli create-version \\\n --version-url=")).call(_context24, cdnUrl, "/")).call(_context23, _classPrivateFieldLooseBase(this, _bucketEnvironment2)[_bucketEnvironment2], "/version.json \\\n --build-revision=")).call(_context22, buildRevision, " \\\n --build-number=")).call(_context21, buildNumber, " \\\n --out-file=$(dirname \"$0\")/version.json\n\n aws s3 cp \"$(dirname \"$0\")\" \\\n \"")).call(_context20, bucketUrl, "/\" \\\n --exclude \"*\" \\\n --include \"version.json\" \\\n --content-type=\"application/json\" \\\n --cache-control=\"public,max-age=0,no-transform\" \\\n --profile ")).call(_context19, _classPrivateFieldLooseBase(this, _bucketRegion2)[_bucketRegion2], "\n");
403
+ return _concatInstanceProperty(_context19 = _concatInstanceProperty(_context20 = _concatInstanceProperty(_context21 = _concatInstanceProperty(_context22 = _concatInstanceProperty(_context23 = _concatInstanceProperty(_context24 = _concatInstanceProperty(_context25 = _concatInstanceProperty(_context26 = _concatInstanceProperty(_context27 = _concatInstanceProperty(_context28 = _concatInstanceProperty(_context29 = "\n #!/usr/bin/env bash\n\n echo \"Uploading static assets to Amazon S3 bucket ".concat(bucketUrl, "\"\n\n set -e\n\n aws s3 cp \"$(dirname \"$0\")/")).call(_context29, applicationIndexOutFile, "\" \\\n \"")).call(_context28, bucketUrl, "/\" \\\n --content-type=\"text/html\" \\\n --cache-control=\"public,max-age=0,no-transform\" \\\n --profile ")).call(_context27, _classPrivateFieldLooseBase(this, _bucketRegion2)[_bucketRegion2], "\n\n echo \"Creating version.json and uploading it to bucket ")).call(_context26, bucketUrl, "\"\n\n NODE_ENV=production ")).call(_context25, packageManagerName, " application-cli create-version \\\n --version-url=")).call(_context24, cdnUrl, "/")).call(_context23, _classPrivateFieldLooseBase(this, _bucketEnvironment2)[_bucketEnvironment2], "/version.json \\\n --build-revision=")).call(_context22, buildRevision, " \\\n --build-number=")).call(_context21, buildNumber, " \\\n --out-file=$(dirname \"$0\")/version.json\n\n aws s3 cp \"$(dirname \"$0\")/version.json\" \\\n \"")).call(_context20, bucketUrl, "/\" \\\n --content-type=\"application/json\" \\\n --cache-control=\"public,max-age=0,no-transform\" \\\n --profile ")).call(_context19, _classPrivateFieldLooseBase(this, _bucketRegion2)[_bucketRegion2], "\n");
392
404
  }
393
405
  }, {
394
406
  key: "getProductionBundlesUploadScript",
@@ -577,7 +589,7 @@ async function compileEnvironmentApplicationIndexes(_ref3) {
577
589
  throw new Error(moveResult.stderr);
578
590
  }
579
591
  }
580
- async function command$3(cliFlags, cwd) {
592
+ async function command$4(cliFlags, cwd) {
581
593
  var _context3;
582
594
  const storageBucketConfig = await loadStorageBucketsConfig();
583
595
  const applicationDirectory = getApplicationDirectory(cwd);
@@ -750,7 +762,7 @@ const mapApplicationMenuConfigToGraqhQLMenuJson = config => {
750
762
  shouldRenderDivider: (_menuLinks$shouldRend = menuLinks.shouldRenderDivider) !== null && _menuLinks$shouldRend !== void 0 ? _menuLinks$shouldRend : false
751
763
  };
752
764
  };
753
- async function command$2(cliFlags, cwd) {
765
+ async function command$3(cliFlags, cwd) {
754
766
  const applicationDirectory = getApplicationDirectory(cwd);
755
767
  const monorepoRoot = findRootSync(cwd);
756
768
  const dotenvPath = cliFlags.dotenvFolder && path.join(monorepoRoot.rootDir, cliFlags.dotenvFolder);
@@ -781,7 +793,7 @@ async function command$2(cliFlags, cwd) {
781
793
  });
782
794
  }
783
795
 
784
- async function command$1(cliFlags) {
796
+ async function command$2(cliFlags) {
785
797
  const numberOfRollbacks = cliFlags.rollbacks - 1;
786
798
  let nextRollbacks;
787
799
  try {
@@ -817,6 +829,158 @@ async function command$1(cliFlags) {
817
829
  }
818
830
  }
819
831
 
832
+ /**
833
+ * This is heavily inspired by https://circleci.com/developer/orbs/orb/circleci/path-filtering.
834
+ *
835
+ * It detects changed files between `HEAD` and a base revision.
836
+ * To match them against configured RegEx tr
837
+ * All matched triggers will be written as a dotenv file.
838
+ * The dotenv file is read in a CircleCI step and be evaluated.
839
+ */
840
+ const git = {
841
+ // https://git-scm.com/docs/git-merge-base
842
+ base: (baseBranch, headRevision) => {
843
+ var _context;
844
+ return _concatInstanceProperty(_context = "git merge-base ".concat(baseBranch, " ")).call(_context, headRevision);
845
+ },
846
+ // https://git-scm.com/docs/git-diff
847
+ changedFiles: (mergeRevision, headRevision) => {
848
+ var _context2;
849
+ return _concatInstanceProperty(_context2 = "git diff --name-only ".concat(mergeRevision, " ")).call(_context2, headRevision);
850
+ },
851
+ commitMessage: headRevision => "git log --format=oneline -n 1 ".concat(headRevision)
852
+ };
853
+ const helpers = {
854
+ async writeOutDotEnvFile(cliFlags, cwd, matchingTriggers) {
855
+ var _context3;
856
+ // If desired read the env file and write out the matching triggers.
857
+ if (!cliFlags.outEnvFile) {
858
+ return;
859
+ }
860
+ const filePath = path$1.join(fs.realpathSync(cwd), cliFlags.outEnvFile);
861
+ const fileContents = _mapInstanceProperty(_context3 = _Object$entries(matchingTriggers)).call(_context3, _ref => {
862
+ var _context5;
863
+ let _ref2 = _slicedToArray(_ref, 2),
864
+ triggerName = _ref2[0],
865
+ triggerValue = _ref2[1];
866
+ const triggerNameForEnvFile = "".concat(snakeCase(triggerName).toUpperCase());
867
+
868
+ // General pipeline optimization hints are not transformed
869
+ if (_startsWithInstanceProperty(triggerName).call(triggerName, 'allowPipelineOptimizations')) {
870
+ var _context4;
871
+ return _concatInstanceProperty(_context4 = "".concat(triggerNameForEnvFile, "=")).call(_context4, triggerValue);
872
+ }
873
+ return _concatInstanceProperty(_context5 = "DID_".concat(triggerNameForEnvFile, "_CHANGE=")).call(_context5, triggerValue);
874
+ }).join('\n');
875
+ await fs.promises.writeFile(filePath, fileContents);
876
+ if (!cliFlags.silent) {
877
+ console.log("\uD83D\uDCDD Wrote out file to '".concat(filePath, "' with contents:"));
878
+ console.log(fileContents);
879
+ }
880
+ },
881
+ async getChangedFiles(cliFlags) {
882
+ var _context6, _context7;
883
+ const baseCmdResult = await command$5(git.base(cliFlags.baseBranch, cliFlags.headRevision));
884
+ const mergeRevision = baseCmdResult.stdout;
885
+ const changedFilesCmdResult = await command$5(git.changedFiles(mergeRevision, cliFlags.headRevision));
886
+ const changedFiles = _filterInstanceProperty(_context6 = _mapInstanceProperty(_context7 = changedFilesCmdResult.stdout.split('\n')).call(_context7, filePath => _trimInstanceProperty(filePath).call(filePath))).call(_context6, filePath => filePath.length > 0);
887
+ return changedFiles;
888
+ },
889
+ async matchTriggersAgainstChangedFiles(cliFlags, config, changedFiles) {
890
+ const matchedTriggers = {};
891
+
892
+ // Evaluate each trigger against each file.
893
+ _forEachInstanceProperty(config).call(config, async trigger => {
894
+ var _trigger$exclude;
895
+ const hasTriggerBeenInitialized = typeof matchedTriggers[trigger.name] === 'number';
896
+
897
+ // Given the trigger with this name was never evaluated it has to be defaulted to 0.
898
+ // As without any matches we should indicate nothing changed.
899
+ if (!hasTriggerBeenInitialized) {
900
+ matchedTriggers[trigger.name] = 0;
901
+ }
902
+ // Given the trigger was already evaluated to be positive we can skip this evaluation.
903
+ if (matchedTriggers[trigger.name] === 1) {
904
+ return matchedTriggers;
905
+ }
906
+
907
+ // In any other case we evaluate this trigger.
908
+ const anyFileChangedForTrigger = _someInstanceProperty(micromatch).call(micromatch, changedFiles, trigger.include, {
909
+ ignore: trigger.ignore
910
+ });
911
+ if (!cliFlags.silent && anyFileChangedForTrigger) {
912
+ console.log("\u2139\uFE0F Files for trigger ".concat(trigger.name, " changed."));
913
+ }
914
+ let onlyExcludedFilesChangedForTrigger = false;
915
+ if (((_trigger$exclude = trigger.exclude) === null || _trigger$exclude === void 0 ? void 0 : _trigger$exclude.length) > 0) {
916
+ // NOTE: `micromatch.every` evaluates if every file matches
917
+ // every pattern.
918
+ // We need to evaluate if every file matches some pattern.
919
+ onlyExcludedFilesChangedForTrigger = _everyInstanceProperty(changedFiles).call(changedFiles, changedFile => {
920
+ return micromatch.isMatch(changedFile, trigger.exclude, {
921
+ ignore: trigger.ignore
922
+ });
923
+ });
924
+ }
925
+ if (!cliFlags.silent && onlyExcludedFilesChangedForTrigger) {
926
+ console.log("\u2139\uFE0F Only excluded files for trigger ".concat(trigger.name, " changed."));
927
+ }
928
+ if (onlyExcludedFilesChangedForTrigger) {
929
+ matchedTriggers[trigger.name] = 0;
930
+ } else {
931
+ matchedTriggers[trigger.name] = Number(anyFileChangedForTrigger);
932
+ }
933
+ return matchedTriggers;
934
+ });
935
+ return matchedTriggers;
936
+ }
937
+ };
938
+ async function command$1(cliFlags, config, cwd) {
939
+ const enablePipelineOptimizations = process.env.ENABLE_PIPELINE_OPTIMIZATIONS === '1';
940
+ const isDevelopmentBranch = cliFlags.branch !== cliFlags.baseBranch;
941
+ const triggersContainingSharedFiles = _filterInstanceProperty(config).call(config, trigger => trigger.containsSharedFiles);
942
+ if (!cliFlags.silent) {
943
+ var _context8;
944
+ console.log("\u2139\uFE0F Pipeline optimizations are ".concat(enablePipelineOptimizations ? 'enabled' : 'disabled', "."));
945
+ console.log("\u2139\uFE0F Changes have been commited to the ".concat(isDevelopmentBranch ? 'a development' : 'the main', " branch."));
946
+ console.log(_concatInstanceProperty(_context8 = "\uD83D\uDEA7 Comparing '".concat(cliFlags.baseBranch, "' against '")).call(_context8, cliFlags.headRevision, "' to determine changed files."));
947
+ }
948
+
949
+ // Collect and parse changed files from git comparing base and head revision.
950
+ const changedFiles = await helpers.getChangedFiles(cliFlags);
951
+ if (!cliFlags.silent) {
952
+ if (changedFiles.length === 0) {
953
+ console.log("\u2139\uFE0F No changes found.");
954
+ } else {
955
+ console.log("\u2139\uFE0F ".concat(changedFiles.length, " changes found."));
956
+ }
957
+ }
958
+
959
+ // Read the trigger file to match the changed files against.
960
+ const matchedTriggers = await helpers.matchTriggersAgainstChangedFiles(cliFlags, config, changedFiles);
961
+ const commitMessageCmdResult = await command$5(git.commitMessage(cliFlags.headRevision));
962
+ const commitMessage = commitMessageCmdResult.stdout;
963
+ const hasCommitMessageTrigger = commitMessage && _includesInstanceProperty(commitMessage).call(commitMessage, '[ci all]');
964
+ const doesSharedTriggerMatch = _someInstanceProperty(triggersContainingSharedFiles).call(triggersContainingSharedFiles, triggerContainingSharedFiles => matchedTriggers[triggerContainingSharedFiles.name] === 1);
965
+ if (!cliFlags.silent) {
966
+ console.log("\u2139\uFE0F The git commit message ".concat(hasCommitMessageTrigger ? 'does' : 'does not', " contain a [ci all] trigger."));
967
+ }
968
+ const doesPackageFolderTriggerMatch = matchedTriggers[cliFlags.triggerName] === 1;
969
+ if (enablePipelineOptimizations && isDevelopmentBranch && !hasCommitMessageTrigger && !doesSharedTriggerMatch && !doesPackageFolderTriggerMatch) {
970
+ if (!cliFlags.silent) {
971
+ console.log("\u2139\uFE0F No relevant changes found for ".concat(cliFlags.triggerName, "."));
972
+ }
973
+ matchedTriggers['allowPipelineOptimizationsForTrigger'] = 1;
974
+ } else {
975
+ if (!cliFlags.silent) {
976
+ console.log("\u2139\uFE0F Relevant changes found for ".concat(cliFlags.triggerName, "."));
977
+ }
978
+ matchedTriggers['allowPipelineOptimizationsForTrigger'] = 0;
979
+ }
980
+ await helpers.writeOutDotEnvFile(cliFlags, cwd, matchedTriggers);
981
+ return matchedTriggers;
982
+ }
983
+
820
984
  function ownKeys(e, r) { var t = _Object$keys(e); if (_Object$getOwnPropertySymbols) { var o = _Object$getOwnPropertySymbols(e); r && (o = _filterInstanceProperty(o).call(o, function (r) { return _Object$getOwnPropertyDescriptor(e, r).enumerable; })), t.push.apply(t, o); } return t; }
821
985
  function _objectSpread(e) { for (var r = 1; r < arguments.length; r++) { var _context, _context2; var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? _forEachInstanceProperty(_context = ownKeys(Object(t), !0)).call(_context, function (r) { _defineProperty(e, r, t[r]); }) : _Object$getOwnPropertyDescriptors ? _Object$defineProperties(e, _Object$getOwnPropertyDescriptors(t)) : _forEachInstanceProperty(_context2 = ownKeys(Object(t))).call(_context2, function (r) { _Object$defineProperty(e, r, _Object$getOwnPropertyDescriptor(t, r)); }); } return e; }
822
986
  const baseMenuProperties = {
@@ -937,7 +1101,7 @@ async function command(cliFlags) {
937
1101
 
938
1102
  var pkgJson = {
939
1103
  name: "@commercetools-frontend/application-cli",
940
- version: "2.3.2",
1104
+ version: "2.5.0",
941
1105
  description: "Internal CLI to manage Merchant Center application deployments across various environments.",
942
1106
  keywords: [
943
1107
  "commercetools",
@@ -972,11 +1136,15 @@ var pkgJson = {
972
1136
  execa: "5.1.1",
973
1137
  jsonschema: "^1.4.1",
974
1138
  listr2: "8.2.0",
1139
+ lodash: "4.17.21",
1140
+ micromatch: "4.0.5",
975
1141
  "node-fetch": "2.7.0",
976
1142
  "ts-deepmerge": "7.0.0"
977
1143
  },
978
1144
  devDependencies: {
979
1145
  "@tsconfig/node20": "20.1.4",
1146
+ "@types/lodash": "^4.14.198",
1147
+ "@types/micromatch": "4.0.6",
980
1148
  "@types/node": "20.12.7",
981
1149
  typescript: "5.2.2"
982
1150
  },
@@ -1018,11 +1186,11 @@ const run = async () => {
1018
1186
  }).option('--ci-assets-root-path [path]', '(optional) A replacement value for the scripts root path only used on CI (e.g. "--ci-assets-root-path=/root/") used in generated scripts.').option('--skip-menu', '(optional) If provided, it will skip uploading the `menu.json`.', {
1019
1187
  default: false
1020
1188
  }).action(async options => {
1021
- await command$3(options, cwd);
1189
+ await command$4(options, cwd);
1022
1190
  });
1023
1191
  const usageCompileMenu = 'Compile the menu links of an application into a `menu.json`. This is only required for internal applications';
1024
1192
  cli.command('compile-menu', usageCompileMenu).usage("compile-menu \n\n ".concat(usageCompileMenu)).option('--dotenv-folder [string]', '(optional) The path to a folder containing a dotenv file `.env.production` and a cloud-environment specific dotenv file (for example `.env.gcp-production-eu`). Those values are parsed and merged together to be used by the application config.').action(async options => {
1025
- await command$2(options, cwd);
1193
+ await command$3(options, cwd);
1026
1194
  });
1027
1195
  const usageValidateMenu = 'Validate compiled `menu.json` file';
1028
1196
  cli.command('validate-menu', usageValidateMenu).usage("validate-menu \n\n ".concat(usageValidateMenu)).option('--input-file <path>', '(required) The path to the `menu.json` file to be validated.').option('--navigation [string]', '(optional) Location of the menu navigation. Possible values are `top`.').action(async options => {
@@ -1032,7 +1200,20 @@ const run = async () => {
1032
1200
  cli.command('create-version', usageCreateVersion).usage("create-version \n\n ".concat(usageCreateVersion)).option('--version-url <url>', "(required) The path of an application's current `version.json` within the storage bucket.").option('--rollbacks [int]', '(optional) The number of max rollbacks to keep', {
1033
1201
  default: 15
1034
1202
  }).option('--out-file [path]', '(optional) The path to the file where to write the JSON. If not specified, the JSON is printed to stdout.').action(async options => {
1035
- await command$1(options);
1203
+ await command$2(options);
1204
+ });
1205
+
1206
+ // Command: Evaluate change triggers
1207
+ const usageEvaluateChangeTriggers = 'Evaluates changed files against a base and evaluates them against defined triggers.';
1208
+ cli.command('evaluate-change-triggers', usageEvaluateChangeTriggers).usage("evaluate-change-triggers \n\n ".concat(usageEvaluateChangeTriggers)).option('--branch <string>', 'The branch of the pull request', {
1209
+ default: process.env.CIRCLE_BRANCH
1210
+ }).option('--base-branch <string>', 'The base revision of the git commit compare against (e.g. "main")').option('--head-revision <string>', 'The revision of the git head to compare with', {
1211
+ default: process.env.CIRCLE_SHA1
1212
+ }).option('--trigger-name <string>', 'The trigger to evaluate for.').option('--silent', '(optional) Disable logging', {
1213
+ default: false
1214
+ }).option('--out-env-file [string]', '(optional) A file path where the matched triggers are written as a dotenv file.').action(async options => {
1215
+ const config = await loadConfig('circleci-change-triggers', []);
1216
+ await command$1(options, config, cwd);
1036
1217
  });
1037
1218
  cli.help();
1038
1219
  cli.version(pkgJson.version);