@commercetools-frontend/application-cli 0.0.0-FEC-212-react19-20250122084835

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1354 @@
1
+ import { cac } from 'cac';
2
+ import _Object$keys from '@babel/runtime-corejs3/core-js-stable/object/keys';
3
+ import _Object$getOwnPropertySymbols from '@babel/runtime-corejs3/core-js-stable/object/get-own-property-symbols';
4
+ import _filterInstanceProperty from '@babel/runtime-corejs3/core-js-stable/instance/filter';
5
+ import _Object$getOwnPropertyDescriptor from '@babel/runtime-corejs3/core-js-stable/object/get-own-property-descriptor';
6
+ import _forEachInstanceProperty from '@babel/runtime-corejs3/core-js-stable/instance/for-each';
7
+ import _Object$getOwnPropertyDescriptors from '@babel/runtime-corejs3/core-js-stable/object/get-own-property-descriptors';
8
+ import _Object$defineProperties from '@babel/runtime-corejs3/core-js-stable/object/define-properties';
9
+ import _Object$defineProperty from '@babel/runtime-corejs3/core-js-stable/object/define-property';
10
+ import _slicedToArray from '@babel/runtime-corejs3/helpers/esm/slicedToArray';
11
+ import _defineProperty from '@babel/runtime-corejs3/helpers/esm/defineProperty';
12
+ import _mapInstanceProperty from '@babel/runtime-corejs3/core-js-stable/instance/map';
13
+ import _Object$entries from '@babel/runtime-corejs3/core-js-stable/object/entries';
14
+ import _Set from '@babel/runtime-corejs3/core-js-stable/set';
15
+ import _flatMapInstanceProperty from '@babel/runtime-corejs3/core-js-stable/instance/flat-map';
16
+ import { s as storageProviders, l as loadStorageBucketsConfig, c as clusterContexts, a as loadConfig } from '../../dist/storage-buckets-config-fa564d15.esm.js';
17
+ import fs from 'node:fs';
18
+ import path$1 from 'node:path';
19
+ import { Listr } from 'listr2';
20
+ import execa, { command as command$5 } from 'execa';
21
+ import { findRootSync } from '@manypkg/find-root';
22
+ import path from 'path';
23
+ import _possibleConstructorReturn from '@babel/runtime-corejs3/helpers/esm/possibleConstructorReturn';
24
+ import _get from '@babel/runtime-corejs3/helpers/esm/get';
25
+ import _getPrototypeOf from '@babel/runtime-corejs3/helpers/esm/getPrototypeOf';
26
+ import _inherits from '@babel/runtime-corejs3/helpers/esm/inherits';
27
+ import _classCallCheck from '@babel/runtime-corejs3/helpers/esm/classCallCheck';
28
+ import _createClass from '@babel/runtime-corejs3/helpers/esm/createClass';
29
+ import _classPrivateFieldLooseBase from '@babel/runtime-corejs3/helpers/esm/classPrivateFieldLooseBase';
30
+ import _classPrivateFieldLooseKey from '@babel/runtime-corejs3/helpers/esm/classPrivateFieldLooseKey';
31
+ import _Reflect$construct from '@babel/runtime-corejs3/core-js-stable/reflect/construct';
32
+ import dotenv from 'dotenv';
33
+ import fs$1 from 'fs';
34
+ import _findInstanceProperty from '@babel/runtime-corejs3/core-js-stable/instance/find';
35
+ import _JSON$stringify from '@babel/runtime-corejs3/core-js-stable/json/stringify';
36
+ import { processConfig } from '@commercetools-frontend/application-config';
37
+ import { getSupportedLocales } from '@commercetools-frontend/l10n';
38
+ import _sliceInstanceProperty from '@babel/runtime-corejs3/core-js-stable/instance/slice';
39
+ import _startsWithInstanceProperty from '@babel/runtime-corejs3/core-js-stable/instance/starts-with';
40
+ import _trimInstanceProperty from '@babel/runtime-corejs3/core-js-stable/instance/trim';
41
+ import _someInstanceProperty from '@babel/runtime-corejs3/core-js-stable/instance/some';
42
+ import _everyInstanceProperty from '@babel/runtime-corejs3/core-js-stable/instance/every';
43
+ import _includesInstanceProperty from '@babel/runtime-corejs3/core-js-stable/instance/includes';
44
+ import micromatch from 'micromatch';
45
+ import snakeCase from 'lodash/snakeCase';
46
+ import { Validator } from 'jsonschema';
47
+ import 'cosmiconfig';
48
+ import 'ts-deepmerge';
49
+ import 'lodash';
50
+
51
+ function getApplicationDirectory(cwd) {
52
+ return fs.realpathSync(cwd);
53
+ }
54
+
55
+ function resolveInApplication(relativePath, cwd) {
56
+ return path.resolve(getApplicationDirectory(cwd), relativePath);
57
+ }
58
+
59
+ function isCI() {
60
+ // @ts-expect-error The env is sometimes overwritten by code to a boolean
61
+ return process.env.CI === true || process.env.CI === 'true';
62
+ }
63
+
64
+ function ownKeys$4(e, r) { var t = _Object$keys(e); if (_Object$getOwnPropertySymbols) { var o = _Object$getOwnPropertySymbols(e); r && (o = _filterInstanceProperty(o).call(o, function (r) { return _Object$getOwnPropertyDescriptor(e, r).enumerable; })), t.push.apply(t, o); } return t; }
65
+ function _objectSpread$4(e) { for (var r = 1; r < arguments.length; r++) { var _context3, _context4; var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? _forEachInstanceProperty(_context3 = ownKeys$4(Object(t), !0)).call(_context3, function (r) { _defineProperty(e, r, t[r]); }) : _Object$getOwnPropertyDescriptors ? _Object$defineProperties(e, _Object$getOwnPropertyDescriptors(t)) : _forEachInstanceProperty(_context4 = ownKeys$4(Object(t))).call(_context4, function (r) { _Object$defineProperty(e, r, _Object$getOwnPropertyDescriptor(t, r)); }); } return e; }
66
+ function _callSuper(t, o, e) { return o = _getPrototypeOf(o), _possibleConstructorReturn(t, _isNativeReflectConstruct() ? _Reflect$construct(o, e || [], _getPrototypeOf(t).constructor) : o.apply(t, e)); }
67
+ function _isNativeReflectConstruct() { try { var t = !Boolean.prototype.valueOf.call(_Reflect$construct(Boolean, [], function () {})); } catch (t) {} return (_isNativeReflectConstruct = function () { return !!t; })(); }
68
+ var _bucketRegion$1 = /*#__PURE__*/_classPrivateFieldLooseKey("bucketRegion");
69
+ var _bucketEnvironment$1 = /*#__PURE__*/_classPrivateFieldLooseKey("bucketEnvironment");
70
+ let StorageProvider = /*#__PURE__*/function () {
71
+ function StorageProvider(config) {
72
+ _classCallCheck(this, StorageProvider);
73
+ _Object$defineProperty(this, _bucketRegion$1, {
74
+ writable: true,
75
+ value: void 0
76
+ });
77
+ _Object$defineProperty(this, _bucketEnvironment$1, {
78
+ writable: true,
79
+ value: void 0
80
+ });
81
+ _classPrivateFieldLooseBase(this, _bucketRegion$1)[_bucketRegion$1] = config.bucketRegion;
82
+ _classPrivateFieldLooseBase(this, _bucketEnvironment$1)[_bucketEnvironment$1] = config.bucketEnvironment;
83
+ }
84
+ /**
85
+ * Construct the storage bucket URL for the specific application and cloud environment.
86
+ *
87
+ * 1. Static assets are uploaded to `:bucketRegion/:prNumber?/:applicationName`
88
+ * 2. The application index is uploaded to `:bucketRegion/:prNumber?/:applicationName/:cloudEnvironment`
89
+ *
90
+ * This allows all cloud environments sharing the same static assets while each application's index
91
+ * is uploaded with different headers (e.g. CSP rules).
92
+ */
93
+ return _createClass(StorageProvider, [{
94
+ key: "getBucketNamespace",
95
+ value: function getBucketNamespace(prNumber) {
96
+ if (!prNumber) return;
97
+ if (prNumber === 'merchant-center-preview') return prNumber;
98
+ return `mc-${prNumber}`;
99
+ }
100
+
101
+ /**
102
+ * Construct the storage bucket URL for the specific application and cloud environment.
103
+ */
104
+ }, {
105
+ key: "getAssetsBucketUrl",
106
+ value: function getAssetsBucketUrl(_ref) {
107
+ var _context;
108
+ let applicationName = _ref.applicationName,
109
+ bucketProtocol = _ref.bucketProtocol,
110
+ bucketNamespace = _ref.bucketNamespace,
111
+ tag = _ref.tag;
112
+ if (!_classPrivateFieldLooseBase(this, _bucketRegion$1)[_bucketRegion$1]) {
113
+ throw new Error("'bucketRegion' is not defined. Required to determine 'assetsBucketUrl'.");
114
+ }
115
+ const storageProvider = storageProviders[tag];
116
+ const assetBuketUrl = storageProvider.urls.bucket?.[_classPrivateFieldLooseBase(this, _bucketRegion$1)[_bucketRegion$1]] ?? _classPrivateFieldLooseBase(this, _bucketRegion$1)[_bucketRegion$1];
117
+ const assetsBucketUrl = _filterInstanceProperty(_context = [assetBuketUrl, bucketNamespace, applicationName]).call(_context, Boolean).join('/');
118
+ return `${bucketProtocol}${assetsBucketUrl}`;
119
+ }
120
+ }, {
121
+ key: "getApplicationIndexBucketUrl",
122
+ value: function getApplicationIndexBucketUrl(_ref2) {
123
+ let tag = _ref2.tag,
124
+ prNumber = _ref2.prNumber,
125
+ applicationName = _ref2.applicationName,
126
+ bucketProtocol = _ref2.bucketProtocol,
127
+ bucketNamespace = _ref2.bucketNamespace;
128
+ const applicationAssetsBucketUrl = this.getAssetsBucketUrl({
129
+ tag,
130
+ applicationName,
131
+ prNumber,
132
+ bucketProtocol,
133
+ bucketNamespace
134
+ });
135
+ const applicationIndexBucketUrl = `${applicationAssetsBucketUrl}/${_classPrivateFieldLooseBase(this, _bucketEnvironment$1)[_bucketEnvironment$1]}`;
136
+ return applicationIndexBucketUrl;
137
+ }
138
+ }, {
139
+ key: "getCdnUrl",
140
+ value: function getCdnUrl(_ref3) {
141
+ var _context2;
142
+ let applicationName = _ref3.applicationName,
143
+ prNumber = _ref3.prNumber,
144
+ publicBaseUrl = _ref3.publicBaseUrl,
145
+ excludeBucketRegion = _ref3.excludeBucketRegion;
146
+ return _filterInstanceProperty(_context2 = [publicBaseUrl, excludeBucketRegion ? null : _classPrivateFieldLooseBase(this, _bucketRegion$1)[_bucketRegion$1], this.getBucketNamespace(prNumber), applicationName]).call(_context2, Boolean).join('/');
147
+ }
148
+ }, {
149
+ key: "getPublicBaseUrl",
150
+ value: function getPublicBaseUrl(tag) {
151
+ if (!_classPrivateFieldLooseBase(this, _bucketEnvironment$1)[_bucketEnvironment$1]) {
152
+ throw new Error("'bucketEnvironment' is not defined. Required to determine 'publicBaseUrl'.");
153
+ }
154
+ const storageProvider = storageProviders[tag];
155
+ const publicBaseUrl = storageProvider.urls.public[_classPrivateFieldLooseBase(this, _bucketEnvironment$1)[_bucketEnvironment$1]] ?? storageProvider.urls.public.default;
156
+ if (!publicBaseUrl) {
157
+ throw new Error(`'publicBaseUrl' is not defined for '${tag}' storage provider for ${_classPrivateFieldLooseBase(this, _bucketEnvironment$1)[_bucketEnvironment$1]} or as default.`);
158
+ }
159
+ return publicBaseUrl;
160
+ }
161
+ }]);
162
+ }();
163
+ var _bucketRegion2$1 = /*#__PURE__*/_classPrivateFieldLooseKey("bucketRegion");
164
+ var _bucketEnvironment2$1 = /*#__PURE__*/_classPrivateFieldLooseKey("bucketEnvironment");
165
+ let GoogleStorageProvider = /*#__PURE__*/function (_StorageProvider2) {
166
+ function GoogleStorageProvider(config) {
167
+ var _this;
168
+ _classCallCheck(this, GoogleStorageProvider);
169
+ _this = _callSuper(this, GoogleStorageProvider, [config]);
170
+ _Object$defineProperty(_this, _bucketRegion2$1, {
171
+ writable: true,
172
+ value: void 0
173
+ });
174
+ _Object$defineProperty(_this, _bucketEnvironment2$1, {
175
+ writable: true,
176
+ value: void 0
177
+ });
178
+ _classPrivateFieldLooseBase(_this, _bucketRegion2$1)[_bucketRegion2$1] = config.bucketRegion;
179
+ _classPrivateFieldLooseBase(_this, _bucketEnvironment2$1)[_bucketEnvironment2$1] = config.bucketEnvironment;
180
+ return _this;
181
+ }
182
+ _inherits(GoogleStorageProvider, _StorageProvider2);
183
+ return _createClass(GoogleStorageProvider, [{
184
+ key: "getTag",
185
+ value: function getTag() {
186
+ return 'gs';
187
+ }
188
+ }, {
189
+ key: "getBucketRegion",
190
+ value: function getBucketRegion() {
191
+ return _classPrivateFieldLooseBase(this, _bucketRegion2$1)[_bucketRegion2$1];
192
+ }
193
+ }, {
194
+ key: "getBucketEnvironment",
195
+ value: function getBucketEnvironment() {
196
+ return _classPrivateFieldLooseBase(this, _bucketEnvironment2$1)[_bucketEnvironment2$1];
197
+ }
198
+ }, {
199
+ key: "getProtocol",
200
+ value: function getProtocol() {
201
+ return 'gs://';
202
+ }
203
+ }, {
204
+ key: "getPublicBaseUrl",
205
+ value: function getPublicBaseUrl() {
206
+ return _get(_getPrototypeOf(GoogleStorageProvider.prototype), "getPublicBaseUrl", this).call(this, this.getTag());
207
+ }
208
+ }, {
209
+ key: "getCdnUrl",
210
+ value: function getCdnUrl(config) {
211
+ return _get(_getPrototypeOf(GoogleStorageProvider.prototype), "getCdnUrl", this).call(this, _objectSpread$4({
212
+ publicBaseUrl: this.getPublicBaseUrl()
213
+ }, config));
214
+ }
215
+ }, {
216
+ key: "getAssetsBucketUrl",
217
+ value: function getAssetsBucketUrl(config) {
218
+ return _get(_getPrototypeOf(GoogleStorageProvider.prototype), "getAssetsBucketUrl", this).call(this, _objectSpread$4({
219
+ tag: this.getTag(),
220
+ bucketProtocol: this.getProtocol(),
221
+ bucketNamespace: _get(_getPrototypeOf(GoogleStorageProvider.prototype), "getBucketNamespace", this).call(this, config.prNumber)
222
+ }, config));
223
+ }
224
+ }, {
225
+ key: "getApplicationIndexBucketUrl",
226
+ value: function getApplicationIndexBucketUrl(config) {
227
+ return _get(_getPrototypeOf(GoogleStorageProvider.prototype), "getApplicationIndexBucketUrl", this).call(this, _objectSpread$4({
228
+ tag: this.getTag(),
229
+ bucketProtocol: this.getProtocol(),
230
+ bucketNamespace: _get(_getPrototypeOf(GoogleStorageProvider.prototype), "getBucketNamespace", this).call(this, config.prNumber)
231
+ }, config));
232
+ }
233
+ }]);
234
+ }(StorageProvider);
235
+ var _bucketRegion3 = /*#__PURE__*/_classPrivateFieldLooseKey("bucketRegion");
236
+ var _bucketEnvironment3 = /*#__PURE__*/_classPrivateFieldLooseKey("bucketEnvironment");
237
+ let AwsStorageProvider = /*#__PURE__*/function (_StorageProvider3) {
238
+ function AwsStorageProvider(config) {
239
+ var _this2;
240
+ _classCallCheck(this, AwsStorageProvider);
241
+ _this2 = _callSuper(this, AwsStorageProvider, [config]);
242
+ _Object$defineProperty(_this2, _bucketRegion3, {
243
+ writable: true,
244
+ value: void 0
245
+ });
246
+ _Object$defineProperty(_this2, _bucketEnvironment3, {
247
+ writable: true,
248
+ value: void 0
249
+ });
250
+ _classPrivateFieldLooseBase(_this2, _bucketRegion3)[_bucketRegion3] = config.bucketRegion;
251
+ _classPrivateFieldLooseBase(_this2, _bucketEnvironment3)[_bucketEnvironment3] = config.bucketEnvironment;
252
+ return _this2;
253
+ }
254
+ _inherits(AwsStorageProvider, _StorageProvider3);
255
+ return _createClass(AwsStorageProvider, [{
256
+ key: "getTag",
257
+ value: function getTag() {
258
+ return 's3';
259
+ }
260
+ }, {
261
+ key: "getBucketRegion",
262
+ value: function getBucketRegion() {
263
+ return _classPrivateFieldLooseBase(this, _bucketRegion3)[_bucketRegion3];
264
+ }
265
+ }, {
266
+ key: "getBucketEnvironment",
267
+ value: function getBucketEnvironment() {
268
+ return _classPrivateFieldLooseBase(this, _bucketEnvironment3)[_bucketEnvironment3];
269
+ }
270
+ }, {
271
+ key: "getProtocol",
272
+ value: function getProtocol() {
273
+ return 's3://';
274
+ }
275
+ }, {
276
+ key: "getPublicBaseUrl",
277
+ value: function getPublicBaseUrl() {
278
+ return _get(_getPrototypeOf(AwsStorageProvider.prototype), "getPublicBaseUrl", this).call(this, this.getTag());
279
+ }
280
+ }, {
281
+ key: "getCdnUrl",
282
+ value: function getCdnUrl(config) {
283
+ return _get(_getPrototypeOf(AwsStorageProvider.prototype), "getCdnUrl", this).call(this, _objectSpread$4({
284
+ publicBaseUrl: this.getPublicBaseUrl(),
285
+ excludeBucketRegion: true
286
+ }, config));
287
+ }
288
+ }, {
289
+ key: "getAssetsBucketUrl",
290
+ value: function getAssetsBucketUrl(config) {
291
+ return _get(_getPrototypeOf(AwsStorageProvider.prototype), "getAssetsBucketUrl", this).call(this, _objectSpread$4({
292
+ tag: this.getTag(),
293
+ bucketProtocol: this.getProtocol(),
294
+ bucketNamespace: _get(_getPrototypeOf(AwsStorageProvider.prototype), "getBucketNamespace", this).call(this, config.prNumber)
295
+ }, config));
296
+ }
297
+ }, {
298
+ key: "getApplicationIndexBucketUrl",
299
+ value: function getApplicationIndexBucketUrl(config) {
300
+ return _get(_getPrototypeOf(AwsStorageProvider.prototype), "getApplicationIndexBucketUrl", this).call(this, _objectSpread$4({
301
+ tag: this.getTag(),
302
+ bucketProtocol: this.getProtocol(),
303
+ bucketNamespace: _get(_getPrototypeOf(AwsStorageProvider.prototype), "getBucketNamespace", this).call(this, config.prNumber)
304
+ }, config));
305
+ }
306
+ }]);
307
+ }(StorageProvider);
308
+ function getStorageProvider(storageProvider, config) {
309
+ switch (storageProvider) {
310
+ case 'gs':
311
+ return new GoogleStorageProvider(config);
312
+ case 's3':
313
+ return new AwsStorageProvider(config);
314
+ default:
315
+ throw new Error(`Storage provider ${storageProvider} not supported`);
316
+ }
317
+ }
318
+
319
+ var _bucketEnvironment = /*#__PURE__*/_classPrivateFieldLooseKey("bucketEnvironment");
320
+ var _bucketRegion = /*#__PURE__*/_classPrivateFieldLooseKey("bucketRegion");
321
+ let GoogleStorageUploadScriptsGenerator = /*#__PURE__*/function () {
322
+ function GoogleStorageUploadScriptsGenerator(config) {
323
+ _classCallCheck(this, GoogleStorageUploadScriptsGenerator);
324
+ _Object$defineProperty(this, _bucketEnvironment, {
325
+ writable: true,
326
+ value: void 0
327
+ });
328
+ _Object$defineProperty(this, _bucketRegion, {
329
+ writable: true,
330
+ value: void 0
331
+ });
332
+ _classPrivateFieldLooseBase(this, _bucketRegion)[_bucketRegion] = config.bucketRegion;
333
+ _classPrivateFieldLooseBase(this, _bucketEnvironment)[_bucketEnvironment] = config.bucketEnvironment;
334
+ }
335
+ return _createClass(GoogleStorageUploadScriptsGenerator, [{
336
+ key: "getApplicationIndexUploadScript",
337
+ value: function getApplicationIndexUploadScript(_ref) {
338
+ let packageManagerName = _ref.packageManagerName,
339
+ bucketUrl = _ref.bucketUrl,
340
+ cdnUrl = _ref.cdnUrl,
341
+ buildRevision = _ref.buildRevision,
342
+ buildNumber = _ref.buildNumber,
343
+ applicationIndexOutFile = _ref.applicationIndexOutFile;
344
+ if (!_classPrivateFieldLooseBase(this, _bucketEnvironment)[_bucketEnvironment]) {
345
+ throw new Error("Missing 'bucketEnvironment' when generating application index.");
346
+ }
347
+ return `
348
+ #!/usr/bin/env bash
349
+
350
+ set -e
351
+
352
+ echo "Uploading compiled ${applicationIndexOutFile} to Google Storage bucket ${bucketUrl}"
353
+
354
+ gcloud storage cp \\
355
+ "$(dirname "$0")/${applicationIndexOutFile}" \\
356
+ "${bucketUrl}/" \\
357
+ -z html \\
358
+ --content-type="text/html" \\
359
+ --cache-control="public,max-age=0,no-transform"
360
+
361
+ echo "Creating version.json and uploading it to bucket ${bucketUrl}"
362
+
363
+ NODE_ENV=production ${packageManagerName} application-cli create-version \\
364
+ --version-url=${cdnUrl}/${_classPrivateFieldLooseBase(this, _bucketEnvironment)[_bucketEnvironment]}/version.json \\
365
+ --build-revision=${buildRevision} \\
366
+ --build-number=${buildNumber} \\
367
+ --out-file=$(dirname "$0")/version.json
368
+
369
+ gcloud storage cp \\
370
+ "$(dirname "$0")/version.json" \\
371
+ "${bucketUrl}/" \\
372
+ -z json \\
373
+ --content-type="application/json" \\
374
+ --cache-control="public,max-age=0,no-transform"
375
+ `;
376
+ }
377
+ }, {
378
+ key: "getProductionBundlesUploadScript",
379
+ value: function getProductionBundlesUploadScript(_ref2) {
380
+ let bucketUrl = _ref2.bucketUrl,
381
+ assetsPath = _ref2.assetsPath,
382
+ skipMenu = _ref2.skipMenu;
383
+ return `
384
+ #!/usr/bin/env bash
385
+
386
+ set -e
387
+
388
+ # NOTES:
389
+ # https://cloud.google.com/sdk/gcloud/reference/storage/cp
390
+ # 1. The '-z' option triggers compressing the assets before
391
+ # uploading them and sets the 'Content-Encoding' to 'gzip'.
392
+ # 2. The 'Accept-encoding: gzip' is set automatically by the 'gcloud storage'.
393
+ # 3. The 'max-age' is set to 1 year which is considered the maximum
394
+ # "valid" lifetime of an asset to be cached.
395
+ # 4. The '-n' will skip uploading existing files and prevents them to
396
+ # be overwritten
397
+ echo "Uploading static assets to Google Storage bucket ${bucketUrl}"
398
+
399
+ gcloud storage cp \\
400
+ ${assetsPath}/public/{*.css,*.js,*.js.map,*.html} \\
401
+ "${bucketUrl}" \\
402
+ -n \\
403
+ -z js,css \\
404
+ --cache-control="public,max-age=31536000,no-transform"
405
+
406
+ # We need to upload the PNG and HTML files separately because we want them
407
+ # to be able to overwrite the existing files (if any). For instance, the
408
+ # file or the favicons.
409
+ # This is controlled with the '-n' option (which is used for the JS and CSS
410
+ # as we don't want to overwrite them)
411
+ gcloud storage cp \\
412
+ ${assetsPath}/public/{*.png,robots.txt} \\
413
+ "${bucketUrl}" \\
414
+ -z txt \\
415
+ --cache-control="public,max-age=31536000,no-transform"
416
+
417
+ if ${skipMenu}; then
418
+ echo "Skipping menu.json upload"
419
+ else
420
+ echo "Uploading menu.json to bucket ${bucketUrl}"
421
+ # NOTE: somehow the 'cache-control:private' doesn't work.
422
+ # I mean, the file is uploaded with the correct metadata but when I fetch
423
+ # the file the response contains the header
424
+ # 'cache-control: public,max-age=31536000,no-transform', even though the
425
+ # documentation clearly states that by marking the header as 'private' will
426
+ # disable the cache (for publicly readable objects).
427
+ # https://cloud.google.com/storage/docs/gsutil/addlhelp/WorkingWithObjectMetadata#cache-control
428
+ # However, I found out that, by requesting the file with any RANDOM
429
+ # query parameter, will instruct the storage to return a 'fresh' object
430
+ # (without any cache control).
431
+ # Unofficial source: https://stackoverflow.com/a/49052895
432
+ # This seems to be the 'easiest' option to 'disable' the cache for public
433
+ # objects. Other alternative approaces are:
434
+ # * make the object private with some simple ACL (private objects are not cached)
435
+ # * suffix the file name with e.g. the git SHA, so we have different files
436
+ # for each upload ('index.html.template-\${CIRCLE_SHA1}'). The server knows
437
+ # the git SHA on runtime and can get the correct file when it starts.
438
+ # * find out why the 'private' cache control does not work
439
+ gcloud storage cp \\
440
+ ${assetsPath}/menu.json \\
441
+ ${bucketUrl} \\
442
+ -z json \\
443
+ --content-type="application/json" \\
444
+ --cache-control="public,max-age=0,no-transform"
445
+ fi
446
+ `;
447
+ }
448
+ }]);
449
+ }();
450
+ var _bucketEnvironment2 = /*#__PURE__*/_classPrivateFieldLooseKey("bucketEnvironment");
451
+ var _bucketRegion2 = /*#__PURE__*/_classPrivateFieldLooseKey("bucketRegion");
452
+ let AwsStorageUploadScriptsGenerator = /*#__PURE__*/function () {
453
+ function AwsStorageUploadScriptsGenerator(config) {
454
+ _classCallCheck(this, AwsStorageUploadScriptsGenerator);
455
+ _Object$defineProperty(this, _bucketEnvironment2, {
456
+ writable: true,
457
+ value: void 0
458
+ });
459
+ _Object$defineProperty(this, _bucketRegion2, {
460
+ writable: true,
461
+ value: void 0
462
+ });
463
+ _classPrivateFieldLooseBase(this, _bucketRegion2)[_bucketRegion2] = config.bucketRegion;
464
+ _classPrivateFieldLooseBase(this, _bucketEnvironment2)[_bucketEnvironment2] = config.bucketEnvironment;
465
+ }
466
+ return _createClass(AwsStorageUploadScriptsGenerator, [{
467
+ key: "getApplicationIndexUploadScript",
468
+ value: function getApplicationIndexUploadScript(_ref3) {
469
+ let packageManagerName = _ref3.packageManagerName,
470
+ bucketUrl = _ref3.bucketUrl,
471
+ cdnUrl = _ref3.cdnUrl,
472
+ buildRevision = _ref3.buildRevision,
473
+ buildNumber = _ref3.buildNumber,
474
+ applicationIndexOutFile = _ref3.applicationIndexOutFile;
475
+ return `
476
+ #!/usr/bin/env bash
477
+
478
+ echo "Uploading static assets to Amazon S3 bucket ${bucketUrl}"
479
+
480
+ set -e
481
+
482
+ aws s3 cp "$(dirname "$0")/${applicationIndexOutFile}" \\
483
+ "${bucketUrl}/" \\
484
+ --content-type="text/html" \\
485
+ --cache-control="public,max-age=0,no-transform" \\
486
+ --profile ${_classPrivateFieldLooseBase(this, _bucketRegion2)[_bucketRegion2]}
487
+
488
+ echo "Creating version.json and uploading it to bucket ${bucketUrl}"
489
+
490
+ NODE_ENV=production ${packageManagerName} application-cli create-version \\
491
+ --version-url=${cdnUrl}/${_classPrivateFieldLooseBase(this, _bucketEnvironment2)[_bucketEnvironment2]}/version.json \\
492
+ --build-revision=${buildRevision} \\
493
+ --build-number=${buildNumber} \\
494
+ --out-file=$(dirname "$0")/version.json
495
+
496
+ aws s3 cp "$(dirname "$0")/version.json" \\
497
+ "${bucketUrl}/" \\
498
+ --content-type="application/json" \\
499
+ --cache-control="public,max-age=0,no-transform" \\
500
+ --profile ${_classPrivateFieldLooseBase(this, _bucketRegion2)[_bucketRegion2]}
501
+ `;
502
+ }
503
+ }, {
504
+ key: "getProductionBundlesUploadScript",
505
+ value: function getProductionBundlesUploadScript(_ref4) {
506
+ let bucketUrl = _ref4.bucketUrl,
507
+ assetsPath = _ref4.assetsPath,
508
+ skipMenu = _ref4.skipMenu;
509
+ return `
510
+ #!/usr/bin/env bash
511
+
512
+ echo "Uploading static assets to Amazon S3 bucket ${bucketUrl}"
513
+
514
+ set -e
515
+
516
+ # NOTE:
517
+ # The sync command on the AWS CLI is different to the -n option on the gcloud CLI.
518
+ # Sync will only upload files that are not already in the bucket, but it will skip existing ones
519
+ # that have been changed locally.
520
+ # The -n option on the gcloud CLI will skip uploading existing files and prevents them to be overwritten.
521
+ # https://docs.aws.amazon.com/cli/latest/reference/s3/sync.html
522
+ # https://cloud.google.com/sdk/gcloud/reference/storage/cp
523
+ #
524
+ # Compression (gzip) is enabled on CloudFront by default. Hence compression does happing while uploading.
525
+ # https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/ServingCompressedFiles.html#compressed-content-cloudfront-configuring
526
+ aws s3 sync ${assetsPath}/public \\
527
+ "${bucketUrl}" \\
528
+ --exclude "*" \\
529
+ --include "*.css" \\
530
+ --include "*.js" \\
531
+ --include "*.js.map" \\
532
+ --include "*.html" \\
533
+ --cache-control="public,max-age=31536000,no-transform" \\
534
+ --profile ${_classPrivateFieldLooseBase(this, _bucketRegion2)[_bucketRegion2]}
535
+
536
+ # We need to upload the PNG and HTML files separately because we want them
537
+ # to be able to overwrite the existing files (if any). For instance, the
538
+ # file or the favicons.
539
+ aws s3 cp ${assetsPath}/public \\
540
+ "${bucketUrl}" \\
541
+ --recursive \\
542
+ --exclude "*" \\
543
+ --include "*.png" \\
544
+ --include "robots.txt" \\
545
+ --cache-control="public,max-age=31536000,no-transform" \\
546
+ --profile ${_classPrivateFieldLooseBase(this, _bucketRegion2)[_bucketRegion2]}
547
+
548
+ if ${skipMenu}; then
549
+ echo "Skipping menu.json upload"
550
+ else
551
+ echo "Uploading menu.json to bucket ${bucketUrl}"
552
+
553
+ aws s3 cp ${assetsPath}/menu.json \\
554
+ "${bucketUrl}/" \\
555
+ --content-type="application/json" \\
556
+ --cache-control="public,max-age=0,no-transform" \\
557
+ --profile ${_classPrivateFieldLooseBase(this, _bucketRegion2)[_bucketRegion2]}
558
+ fi
559
+ `;
560
+ }
561
+ }]);
562
+ }();
563
+ function getUploadScriptsGenerator(storageProvider, config) {
564
+ switch (storageProvider) {
565
+ case 'gs':
566
+ return new GoogleStorageUploadScriptsGenerator(config);
567
+ case 's3':
568
+ return new AwsStorageUploadScriptsGenerator(config);
569
+ default:
570
+ throw new Error(`Storage provider ${storageProvider} not supported`);
571
+ }
572
+ }
573
+
574
+ function doesFileExist(filePath) {
575
+ try {
576
+ fs$1.accessSync(filePath);
577
+ return true;
578
+ } catch (e) {
579
+ return false;
580
+ }
581
+ }
582
+
583
+ function ownKeys$3(e, r) { var t = _Object$keys(e); if (_Object$getOwnPropertySymbols) { var o = _Object$getOwnPropertySymbols(e); r && (o = _filterInstanceProperty(o).call(o, function (r) { return _Object$getOwnPropertyDescriptor(e, r).enumerable; })), t.push.apply(t, o); } return t; }
584
+ function _objectSpread$3(e) { for (var r = 1; r < arguments.length; r++) { var _context, _context2; var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? _forEachInstanceProperty(_context = ownKeys$3(Object(t), !0)).call(_context, function (r) { _defineProperty(e, r, t[r]); }) : _Object$getOwnPropertyDescriptors ? _Object$defineProperties(e, _Object$getOwnPropertyDescriptors(t)) : _forEachInstanceProperty(_context2 = ownKeys$3(Object(t))).call(_context2, function (r) { _Object$defineProperty(e, r, _Object$getOwnPropertyDescriptor(t, r)); }); } return e; }
585
+ function loadDotenvFiles(_ref) {
586
+ let dotenvPath = _ref.dotenvPath,
587
+ cloudEnvironment = _ref.cloudEnvironment;
588
+ // No path requested, skip.
589
+ if (!dotenvPath) {
590
+ return {};
591
+ }
592
+
593
+ // Check if the given path exists.
594
+ if (!doesFileExist(dotenvPath)) {
595
+ throw new Error(`The dotenv folder path does not exist: "${dotenvPath}".`);
596
+ }
597
+
598
+ // Load the environment values
599
+ const sharedDotenvFile = '.env.production';
600
+ const cloudDotenvFile = `.env.${cloudEnvironment}`;
601
+
602
+ // The shared dotenv file across environments is optional
603
+ const sharedProductionEnvironment = dotenv.config({
604
+ encoding: 'utf8',
605
+ path: path.join(dotenvPath, sharedDotenvFile)
606
+ });
607
+ const cloudSpecificProductionEnvironment = dotenv.config({
608
+ encoding: 'utf8',
609
+ path: path.join(dotenvPath, cloudDotenvFile)
610
+ });
611
+ if (cloudSpecificProductionEnvironment.error) {
612
+ throw new Error(`Failed loading '${cloudDotenvFile}' in '${dotenvPath}'. Make sure it exists.`);
613
+ }
614
+ if (sharedProductionEnvironment.error) {
615
+ throw new Error(`Failed loading '${sharedDotenvFile}' in '${dotenvPath}'. Make sure it exists.`);
616
+ }
617
+ return _objectSpread$3(_objectSpread$3({}, sharedProductionEnvironment.parsed), cloudSpecificProductionEnvironment.parsed);
618
+ }
619
+
620
+ function ownKeys$2(e, r) { var t = _Object$keys(e); if (_Object$getOwnPropertySymbols) { var o = _Object$getOwnPropertySymbols(e); r && (o = _filterInstanceProperty(o).call(o, function (r) { return _Object$getOwnPropertyDescriptor(e, r).enumerable; })), t.push.apply(t, o); } return t; }
621
+ function _objectSpread$2(e) { for (var r = 1; r < arguments.length; r++) { var _context3, _context4; var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? _forEachInstanceProperty(_context3 = ownKeys$2(Object(t), !0)).call(_context3, function (r) { _defineProperty(e, r, t[r]); }) : _Object$getOwnPropertyDescriptors ? _Object$defineProperties(e, _Object$getOwnPropertyDescriptors(t)) : _forEachInstanceProperty(_context4 = ownKeys$2(Object(t))).call(_context4, function (r) { _Object$defineProperty(e, r, _Object$getOwnPropertyDescriptor(t, r)); }); } return e; }
622
+ function writeUploadScriptFile(_ref) {
623
+ let fileName = _ref.fileName,
624
+ fileContent = _ref.fileContent,
625
+ filePath = _ref.filePath;
626
+ fs.writeFileSync(path$1.join(filePath, fileName), fileContent, {
627
+ // Make the script executable
628
+ mode: 0o755,
629
+ encoding: 'utf8'
630
+ });
631
+ }
632
+ async function compileApplicationAssets(_ref2) {
633
+ let cliFlags = _ref2.cliFlags,
634
+ storageProvider = _ref2.storageProvider,
635
+ uploadScriptsGenerator = _ref2.uploadScriptsGenerator,
636
+ paths = _ref2.paths;
637
+ const applicationAssetsUploadScriptContent = uploadScriptsGenerator.getProductionBundlesUploadScript({
638
+ storageProvider,
639
+ bucketUrl: storageProvider.getAssetsBucketUrl({
640
+ prNumber: cliFlags.prNumber,
641
+ applicationName: cliFlags.applicationName
642
+ }),
643
+ assetsPath: paths.assetsPath,
644
+ skipMenu: cliFlags.skipMenu
645
+ });
646
+ const parsedApplicationAssetsUploadScriptFile = path$1.parse(cliFlags.applicationAssetsUploadScriptOutFile);
647
+ const applicationAssetsUploadScriptFileName = `${parsedApplicationAssetsUploadScriptFile.name}-${storageProvider.getBucketRegion()}${parsedApplicationAssetsUploadScriptFile.ext}`;
648
+ writeUploadScriptFile({
649
+ fileName: applicationAssetsUploadScriptFileName,
650
+ fileContent: applicationAssetsUploadScriptContent,
651
+ filePath: path$1.join(paths.deploymentsPath, storageProvider.getTag())
652
+ });
653
+ }
654
+ async function compileEnvironmentApplicationIndexes(_ref3) {
655
+ let cliFlags = _ref3.cliFlags,
656
+ storageProvider = _ref3.storageProvider,
657
+ uploadScriptsGenerator = _ref3.uploadScriptsGenerator,
658
+ paths = _ref3.paths,
659
+ cloudEnvironment = _ref3.cloudEnvironment;
660
+ const cloudEnvironmentDeploymentPath = path$1.join(paths.deploymentsPath, storageProvider.getTag(), cloudEnvironment);
661
+ // Ensure the folder exists
662
+ const createDeploymentsFolderResult = await execa('mkdir', ['-p', cloudEnvironmentDeploymentPath], {
663
+ encoding: 'utf8'
664
+ });
665
+ if (createDeploymentsFolderResult.failed) {
666
+ throw new Error(createDeploymentsFolderResult.stderr);
667
+ }
668
+
669
+ // Construct the proper CDN URL for the specific application
670
+ const cdnUrl = storageProvider.getCdnUrl({
671
+ prNumber: cliFlags.prNumber,
672
+ applicationName: cliFlags.applicationName
673
+ });
674
+ const environmentVariablesForCompilation = _objectSpread$2(_objectSpread$2(_objectSpread$2(_objectSpread$2({}, loadDotenvFiles({
675
+ dotenvPath: paths.dotenvPath,
676
+ cloudEnvironment
677
+ })), {}, {
678
+ // The trailing slash is important to indicate to the CSP directive that all the resources
679
+ // under that path should be allowed.
680
+ MC_CDN_URL: `${cdnUrl}/`
681
+ }, cliFlags.mcUrl ? {
682
+ MC_URL: cliFlags.mcUrl
683
+ } : {}), cliFlags.mcApiUrl ? {
684
+ MC_API_URL: cliFlags.mcApiUrl
685
+ } : {}), {}, {
686
+ // Will be used by the Application Kit for Sentry and exposed on `window.app.revision`.
687
+ REVISION: cliFlags.buildRevision
688
+ });
689
+
690
+ /// Sentry and GTM is disabled on branch deployments
691
+ if (cliFlags.prNumber) {
692
+ // @ts-expect-error The env is sometimes overwritten by code to a boolean
693
+ process.env.TRACKING_SENTRY = null;
694
+ // @ts-expect-error The env is sometimes overwritten by code to a boolean
695
+ process.env.TRACKING_GTM = null;
696
+ // @ts-expect-error
697
+ environmentVariablesForCompilation.TRACKING_SENTRY = null;
698
+ // @ts-expect-error
699
+ environmentVariablesForCompilation.TRACKING_GTM = null;
700
+ }
701
+
702
+ // Compile the application using the loaded environment values
703
+ const compileResult = await execa('mc-scripts', ['compile-html'], {
704
+ encoding: 'utf8',
705
+ preferLocal: true,
706
+ extendEnv: true,
707
+ env: environmentVariablesForCompilation
708
+ });
709
+ if (compileResult.failed) {
710
+ throw new Error(compileResult.stderr);
711
+ }
712
+ const applicationIndexUploadScriptContent = uploadScriptsGenerator.getApplicationIndexUploadScript({
713
+ storageProvider,
714
+ packageManagerName: cliFlags.packageManagerName,
715
+ bucketUrl: storageProvider.getApplicationIndexBucketUrl({
716
+ prNumber: cliFlags.prNumber,
717
+ applicationName: cliFlags.applicationName
718
+ }),
719
+ cdnUrl,
720
+ buildRevision: cliFlags.buildRevision,
721
+ buildNumber: cliFlags.buildNumber,
722
+ applicationIndexOutFile: cliFlags.applicationIndexOutFile
723
+ });
724
+ writeUploadScriptFile({
725
+ fileName: cliFlags.applicationIndexUploadScriptOutFile,
726
+ fileContent: applicationIndexUploadScriptContent,
727
+ filePath: cloudEnvironmentDeploymentPath
728
+ });
729
+
730
+ // Move the compiled `index.html` to the deployments folder of the related cloud environment.
731
+ const moveResult = await execa('mv', [path$1.join(paths.publicAssetsPath, 'index.html'), path$1.join(cloudEnvironmentDeploymentPath, cliFlags.applicationIndexOutFile)]);
732
+ if (moveResult.failed) {
733
+ throw new Error(moveResult.stderr);
734
+ }
735
+ }
736
+ async function command$4(cliFlags, cwd) {
737
+ var _context;
738
+ const storageBucketConfig = await loadStorageBucketsConfig();
739
+ const applicationDirectory = getApplicationDirectory(cwd);
740
+ let assetsPath;
741
+ if (cliFlags.ciAssetsRootPath && isCI()) {
742
+ assetsPath = applicationDirectory.replace('/home/circleci/', cliFlags.ciAssetsRootPath);
743
+ } else {
744
+ assetsPath = applicationDirectory;
745
+ }
746
+ const monorepoRoot = findRootSync(cwd);
747
+ const paths = {
748
+ publicAssetsPath: resolveInApplication('public', cwd),
749
+ deploymentsPath: resolveInApplication('deployments', cwd),
750
+ dotenvPath: cliFlags.dotenvFolder && path$1.join(monorepoRoot.rootDir, cliFlags.dotenvFolder),
751
+ assetsPath
752
+ };
753
+ const defaultStorageProviders = [storageProviders.gs.tag];
754
+ const taskList = new Listr(_mapInstanceProperty(_context = _Object$entries(storageBucketConfig)).call(_context, _ref4 => {
755
+ let _ref5 = _slicedToArray(_ref4, 2),
756
+ bucketRegion = _ref5[0],
757
+ bucketEnvironmentConfigs = _ref5[1];
758
+ return {
759
+ title: `Compiling for bucket region ${bucketRegion}`,
760
+ task: (_bucketRegionCtx, bucketRegionTask) => {
761
+ // NOTE: Application assets need to be compiled
762
+ // for all storage providers once per region.
763
+ const allStorageProvidersForBucketRegion = [...new _Set(_flatMapInstanceProperty(bucketEnvironmentConfigs).call(bucketEnvironmentConfigs, bucketEnvironmentConfig => bucketEnvironmentConfig.storageProviders || defaultStorageProviders))];
764
+ const allApplicationAssetTasks = _mapInstanceProperty(allStorageProvidersForBucketRegion).call(allStorageProvidersForBucketRegion, storageProviderTag => {
765
+ const uploadScriptsGeneratorConfig = {
766
+ bucketRegion: bucketRegion
767
+ };
768
+ const storageProviderConfig = {
769
+ bucketRegion: bucketRegion
770
+ };
771
+ const storageProvider = getStorageProvider(storageProviderTag, storageProviderConfig);
772
+ const uploadScriptsGenerator = getUploadScriptsGenerator(storageProviderTag, uploadScriptsGeneratorConfig);
773
+ return {
774
+ title: `Compiling application assets for '${storageProviderTag}'`,
775
+ task: () => compileApplicationAssets({
776
+ cliFlags,
777
+ storageProvider,
778
+ uploadScriptsGenerator,
779
+ paths
780
+ })
781
+ };
782
+ });
783
+ const allApplicationIndexTasks = _mapInstanceProperty(bucketEnvironmentConfigs).call(bucketEnvironmentConfigs, bucketEnvironmentConfig => {
784
+ const cloudEnvironment = bucketEnvironmentConfig.cloudEnvironment,
785
+ bucketEnvironment = bucketEnvironmentConfig.bucketEnvironment,
786
+ storageProviders = bucketEnvironmentConfig.storageProviders;
787
+ const storageProviderConfig = {
788
+ bucketRegion: bucketRegion,
789
+ bucketEnvironment
790
+ };
791
+ return {
792
+ title: `Compiling for cloud environment '${cloudEnvironment}'`,
793
+ task: (_storageProviderCtx, storageProviderTask) => {
794
+ var _context2;
795
+ const applicationIndexTasksForStorageProviders = _mapInstanceProperty(_context2 = storageProviders || defaultStorageProviders).call(_context2, storageProviderTag => {
796
+ const storageProvider = getStorageProvider(storageProviderTag, storageProviderConfig);
797
+ const uploadScriptsGenerator = getUploadScriptsGenerator(storageProviderTag, storageProviderConfig);
798
+ return {
799
+ title: `Compiling application index for storage provider '${storageProviderTag}'`,
800
+ task: () => {
801
+ return compileEnvironmentApplicationIndexes({
802
+ cliFlags,
803
+ storageProvider,
804
+ uploadScriptsGenerator,
805
+ paths,
806
+ cloudEnvironment
807
+ });
808
+ }
809
+ };
810
+ });
811
+ return storageProviderTask.newListr(applicationIndexTasksForStorageProviders);
812
+ }
813
+ };
814
+ });
815
+ return bucketRegionTask.newListr([...allApplicationIndexTasks, ...allApplicationAssetTasks]);
816
+ }
817
+ };
818
+ }), {
819
+ // @ts-ignore
820
+ renderer: isCI() ? 'verbose' : 'default'
821
+ });
822
+ await taskList.run();
823
+ }
824
+
825
+ function ownKeys$1(e, r) { var t = _Object$keys(e); if (_Object$getOwnPropertySymbols) { var o = _Object$getOwnPropertySymbols(e); r && (o = _filterInstanceProperty(o).call(o, function (r) { return _Object$getOwnPropertyDescriptor(e, r).enumerable; })), t.push.apply(t, o); } return t; }
826
+ function _objectSpread$1(e) { for (var r = 1; r < arguments.length; r++) { var _context3, _context4; var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? _forEachInstanceProperty(_context3 = ownKeys$1(Object(t), !0)).call(_context3, function (r) { _defineProperty(e, r, t[r]); }) : _Object$getOwnPropertyDescriptors ? _Object$defineProperties(e, _Object$getOwnPropertyDescriptors(t)) : _forEachInstanceProperty(_context4 = ownKeys$1(Object(t))).call(_context4, function (r) { _Object$defineProperty(e, r, _Object$getOwnPropertyDescriptor(t, r)); }); } return e; }
827
+
828
+ // The menu links are only parsed from the config in development mode.
829
+ process.env.NODE_ENV = 'development';
830
+ const mapLabelAllLocalesWithDefaults = (labelAllLocales, defaultLabel) => {
831
+ let mappedLabelAllLocales = labelAllLocales;
832
+ if (defaultLabel) {
833
+ var _context;
834
+ // Map all supported locales with the given localized labels.
835
+ // If a locale is not defined in the config, we use the `default` label as the value.
836
+ // This is only needed for development as we're trying to map two different schemas.
837
+ mappedLabelAllLocales = _mapInstanceProperty(_context = getSupportedLocales()).call(_context, supportedLocale => {
838
+ const existingField = _findInstanceProperty(labelAllLocales).call(labelAllLocales, field => field.locale === supportedLocale);
839
+ if (existingField) return existingField;
840
+ return {
841
+ locale: supportedLocale,
842
+ value: defaultLabel
843
+ };
844
+ });
845
+ }
846
+ return mappedLabelAllLocales;
847
+ };
848
+
849
+ /**
850
+ * Transform menu links defined in the `custom-application-config.json` to the format
851
+ * used by the HTTP Proxy GraphQL API.
852
+ */
853
+
854
+ const mapApplicationMenuConfigToGraqhQLMenuJson = config => {
855
+ var _context2;
856
+ const entryPointUriPath = config.env.entryPointUriPath;
857
+
858
+ // @ts-expect-error: the `accountLinks` is not explicitly typed as it's only used by the account app.
859
+ const accountLinks = config.env.__DEVELOPMENT__?.accountLinks ?? [];
860
+ if (accountLinks.length > 0) {
861
+ return _mapInstanceProperty(accountLinks).call(accountLinks, menuLink => ({
862
+ key: menuLink.uriPath,
863
+ uriPath: menuLink.uriPath,
864
+ labelAllLocales: mapLabelAllLocalesWithDefaults(menuLink.labelAllLocales, menuLink.defaultLabel),
865
+ permissions: menuLink.permissions ?? [],
866
+ // @ts-ignore: not defined in schema, as it's only used internally.
867
+ featureToggle: menuLink.featureToggle ?? null
868
+ }));
869
+ }
870
+ const menuLinks = config.env.__DEVELOPMENT__?.menuLinks;
871
+ return {
872
+ key: entryPointUriPath,
873
+ uriPath: entryPointUriPath,
874
+ icon: menuLinks.icon,
875
+ labelAllLocales: mapLabelAllLocalesWithDefaults(menuLinks?.labelAllLocales, menuLinks?.defaultLabel),
876
+ permissions: menuLinks.permissions,
877
+ // @ts-ignore: not defined in schema, as it's only used internally.
878
+ featureToggle: menuLinks.featureToggle ?? null,
879
+ // @ts-ignore: not defined in schema, as it's only used internally.
880
+ menuVisibility: menuLinks.menuVisibility ?? null,
881
+ // @ts-ignore: not defined in schema, as it's only used internally.
882
+ actionRights: menuLinks.actionRights ?? null,
883
+ // @ts-ignore: not defined in schema, as it's only used internally.
884
+ dataFences: menuLinks.dataFences ?? null,
885
+ submenu: _mapInstanceProperty(_context2 = menuLinks.submenuLinks).call(_context2, submenuLink => ({
886
+ key: submenuLink.uriPath.replace('/', '-'),
887
+ uriPath: submenuLink.uriPath,
888
+ labelAllLocales: mapLabelAllLocalesWithDefaults(submenuLink.labelAllLocales, submenuLink.defaultLabel),
889
+ permissions: submenuLink.permissions,
890
+ // @ts-ignore: not defined in schema, as it's only used internally.
891
+ featureToggle: submenuLink.featureToggle ?? null,
892
+ // @ts-ignore: not defined in schema, as it's only used internally.
893
+ menuVisibility: submenuLink.menuVisibility ?? null,
894
+ // @ts-ignore: not defined in schema, as it's only used internally.
895
+ actionRights: submenuLink.actionRights ?? null,
896
+ // @ts-ignore: not defined in schema, as it's only used internally.
897
+ dataFences: submenuLink.dataFences ?? null
898
+ })),
899
+ // @ts-ignore: not defined in schema, as it's only used internally.
900
+ shouldRenderDivider: menuLinks.shouldRenderDivider ?? false
901
+ };
902
+ };
903
+ async function command$3(cliFlags, cwd) {
904
+ const applicationDirectory = getApplicationDirectory(cwd);
905
+ const monorepoRoot = findRootSync(cwd);
906
+ const dotenvPath = cliFlags.dotenvFolder && path.join(monorepoRoot.rootDir, cliFlags.dotenvFolder);
907
+
908
+ // The env itself is not important for the menu. However, the application config
909
+ // uses environment placeholders and therefore we need to provide the variables for it.
910
+ const cloudEnvironment = clusterContexts['ctp_staging_gcp_europe-west1_v1'];
911
+ const processEnv = _objectSpread$1(_objectSpread$1({}, loadDotenvFiles({
912
+ dotenvPath,
913
+ cloudEnvironment
914
+ })), {}, {
915
+ // Again, make sure that the environment is "development", otherwise
916
+ // the menu config won't be available.
917
+ NODE_ENV: 'development',
918
+ MC_APP_ENV: 'development',
919
+ // Something random, just to have environment variable defined.
920
+ REVISION: '123'
921
+ });
922
+ const applicationRuntimeConfig = await processConfig({
923
+ disableCache: true,
924
+ applicationPath: applicationDirectory,
925
+ processEnv
926
+ });
927
+ const applicationMenu = mapApplicationMenuConfigToGraqhQLMenuJson(applicationRuntimeConfig);
928
+ const formattedJson = _JSON$stringify(applicationMenu, null, 2);
929
+ fs$1.writeFileSync(path.join(applicationDirectory, 'menu.json'), formattedJson, {
930
+ encoding: 'utf8'
931
+ });
932
+ }
933
+
934
+ async function command$2(cliFlags) {
935
+ const numberOfRollbacks = cliFlags.rollbacks - 1;
936
+ let nextRollbacks;
937
+ try {
938
+ var _context, _context2;
939
+ // The last build's JSON becomes the first rollback
940
+ // while all previous rollbacks remain but are sliced.
941
+ const lastVersionResponse = await fetch(cliFlags.versionUrl);
942
+ const lastVersionJson = await lastVersionResponse.json();
943
+ const previousBuild = lastVersionJson && {
944
+ buildNumber: lastVersionJson.buildNumber,
945
+ revision: lastVersionJson.revision,
946
+ deployedAt: lastVersionJson.deployedAt
947
+ };
948
+ nextRollbacks = _sliceInstanceProperty(_context = _filterInstanceProperty(_context2 = [previousBuild, ...lastVersionJson.rollbacks]).call(_context2, Boolean)).call(_context, 0, numberOfRollbacks);
949
+ } catch (error) {
950
+ nextRollbacks = [];
951
+ }
952
+ const nextBuild = {
953
+ buildNumber: cliFlags.buildNumber,
954
+ revision: cliFlags.buildRevision,
955
+ deployedAt: new Date().toISOString(),
956
+ rollbacks: nextRollbacks
957
+ };
958
+ const formattedJson = _JSON$stringify(nextBuild, null, 2);
959
+ // Logging to stdout which is from where it will be picked
960
+ // up by the caller (a bash script).
961
+ if (cliFlags.outFile) {
962
+ fs$1.writeFileSync(cliFlags.outFile, formattedJson, {
963
+ encoding: 'utf8'
964
+ });
965
+ } else {
966
+ console.log(formattedJson);
967
+ }
968
+ }
969
+
970
+ /**
971
+ * This is heavily inspired by https://circleci.com/developer/orbs/orb/circleci/path-filtering.
972
+ *
973
+ * It detects changed files between `HEAD` and a base revision.
974
+ * To match them against configured RegEx tr
975
+ * All matched triggers will be written as a dotenv file.
976
+ * The dotenv file is read in a CircleCI step and be evaluated.
977
+ */
978
+ const git = {
979
+ // https://git-scm.com/docs/git-merge-base
980
+ base: (baseBranch, headRevision) => `git merge-base ${baseBranch} ${headRevision}`,
981
+ // https://git-scm.com/docs/git-diff
982
+ changedFiles: (mergeRevision, headRevision) => `git diff --name-only ${mergeRevision} ${headRevision}`,
983
+ commitMessage: headRevision => `git log --format=oneline -n 1 ${headRevision}`
984
+ };
985
+ const helpers = {
986
+ async writeOutDotEnvFile(cliFlags, cwd, matchingTriggers) {
987
+ var _context;
988
+ // If desired read the env file and write out the matching triggers.
989
+ if (!cliFlags.outEnvFile) {
990
+ return;
991
+ }
992
+ const filePath = path$1.join(fs.realpathSync(cwd), cliFlags.outEnvFile);
993
+ const fileContents = _mapInstanceProperty(_context = _Object$entries(matchingTriggers)).call(_context, _ref => {
994
+ let _ref2 = _slicedToArray(_ref, 2),
995
+ triggerName = _ref2[0],
996
+ triggerValue = _ref2[1];
997
+ const triggerNameForEnvFile = `${snakeCase(triggerName).toUpperCase()}`;
998
+
999
+ // General pipeline optimization hints are not transformed
1000
+ if (_startsWithInstanceProperty(triggerName).call(triggerName, 'allowPipelineOptimizations')) {
1001
+ return `${triggerNameForEnvFile}=${triggerValue}`;
1002
+ }
1003
+ return `DID_${triggerNameForEnvFile}_CHANGE=${triggerValue}`;
1004
+ }).join('\n');
1005
+ await fs.promises.writeFile(filePath, fileContents);
1006
+ if (!cliFlags.silent) {
1007
+ console.log(`📝 Wrote out file to '${filePath}' with contents:`);
1008
+ console.log(fileContents);
1009
+ }
1010
+ },
1011
+ async getChangedFiles(cliFlags) {
1012
+ var _context2, _context3;
1013
+ const baseCmdResult = await command$5(git.base(cliFlags.baseBranch, cliFlags.headRevision));
1014
+ const mergeRevision = baseCmdResult.stdout;
1015
+ const changedFilesCmdResult = await command$5(git.changedFiles(mergeRevision, cliFlags.headRevision));
1016
+ const changedFiles = _filterInstanceProperty(_context2 = _mapInstanceProperty(_context3 = changedFilesCmdResult.stdout.split('\n')).call(_context3, filePath => _trimInstanceProperty(filePath).call(filePath))).call(_context2, filePath => filePath.length > 0);
1017
+ return changedFiles;
1018
+ },
1019
+ async matchTriggersAgainstChangedFiles(cliFlags, config, changedFiles) {
1020
+ const matchedTriggers = {};
1021
+
1022
+ // Evaluate each trigger against each file.
1023
+ _forEachInstanceProperty(config).call(config, async trigger => {
1024
+ const hasTriggerBeenInitialized = typeof matchedTriggers[trigger.name] === 'number';
1025
+
1026
+ // Given the trigger with this name was never evaluated it has to be defaulted to 0.
1027
+ // As without any matches we should indicate nothing changed.
1028
+ if (!hasTriggerBeenInitialized) {
1029
+ matchedTriggers[trigger.name] = 0;
1030
+ }
1031
+ // Given the trigger was already evaluated to be positive we can skip this evaluation.
1032
+ if (matchedTriggers[trigger.name] === 1) {
1033
+ return matchedTriggers;
1034
+ }
1035
+
1036
+ // In any other case we evaluate this trigger.
1037
+ const anyFileChangedForTrigger = _someInstanceProperty(micromatch).call(micromatch, changedFiles, trigger.include, {
1038
+ ignore: trigger.ignore
1039
+ });
1040
+ if (!cliFlags.silent && anyFileChangedForTrigger) {
1041
+ console.log(`ℹ️ Files for trigger ${trigger.name} changed.`);
1042
+ }
1043
+ let onlyExcludedFilesChangedForTrigger = false;
1044
+ if (trigger.exclude?.length > 0) {
1045
+ // NOTE: `micromatch.every` evaluates if every file matches
1046
+ // every pattern.
1047
+ // We need to evaluate if every file matches some pattern.
1048
+ onlyExcludedFilesChangedForTrigger = _everyInstanceProperty(changedFiles).call(changedFiles, changedFile => {
1049
+ return micromatch.isMatch(changedFile, trigger.exclude, {
1050
+ ignore: trigger.ignore
1051
+ });
1052
+ });
1053
+ }
1054
+ if (!cliFlags.silent && onlyExcludedFilesChangedForTrigger) {
1055
+ console.log(`ℹ️ Only excluded files for trigger ${trigger.name} changed.`);
1056
+ }
1057
+ if (onlyExcludedFilesChangedForTrigger) {
1058
+ matchedTriggers[trigger.name] = 0;
1059
+ } else {
1060
+ matchedTriggers[trigger.name] = Number(anyFileChangedForTrigger);
1061
+ }
1062
+ return matchedTriggers;
1063
+ });
1064
+ return matchedTriggers;
1065
+ }
1066
+ };
1067
+ async function command$1(cliFlags, config, cwd) {
1068
+ const enablePipelineOptimizations = process.env.ENABLE_PIPELINE_OPTIMIZATIONS === '1';
1069
+ const isDevelopmentBranch = cliFlags.branch !== cliFlags.baseBranch;
1070
+ const triggersContainingSharedFiles = _filterInstanceProperty(config).call(config, trigger => trigger.containsSharedFiles);
1071
+ if (!cliFlags.silent) {
1072
+ console.log(`ℹ️ Pipeline optimizations are ${enablePipelineOptimizations ? 'enabled' : 'disabled'}.`);
1073
+ console.log(`ℹ️ Changes have been commited to the ${isDevelopmentBranch ? 'a development' : 'the main'} branch.`);
1074
+ console.log(`🚧 Comparing '${cliFlags.baseBranch}' against '${cliFlags.headRevision}' to determine changed files.`);
1075
+ }
1076
+
1077
+ // Collect and parse changed files from git comparing base and head revision.
1078
+ const changedFiles = await helpers.getChangedFiles(cliFlags);
1079
+ if (!cliFlags.silent) {
1080
+ if (changedFiles.length === 0) {
1081
+ console.log(`ℹ️ No changes found.`);
1082
+ } else {
1083
+ console.log(`ℹ️ ${changedFiles.length} changes found.`);
1084
+ }
1085
+ }
1086
+
1087
+ // Read the trigger file to match the changed files against.
1088
+ const matchedTriggers = await helpers.matchTriggersAgainstChangedFiles(cliFlags, config, changedFiles);
1089
+ const commitMessageCmdResult = await command$5(git.commitMessage(cliFlags.headRevision));
1090
+ const commitMessage = commitMessageCmdResult.stdout;
1091
+ const hasCommitMessageTrigger = commitMessage && _includesInstanceProperty(commitMessage).call(commitMessage, '[ci all]');
1092
+ const doesSharedTriggerMatch = _someInstanceProperty(triggersContainingSharedFiles).call(triggersContainingSharedFiles, triggerContainingSharedFiles => matchedTriggers[triggerContainingSharedFiles.name] === 1);
1093
+ if (!cliFlags.silent) {
1094
+ console.log(`ℹ️ The git commit message ${hasCommitMessageTrigger ? 'does' : 'does not'} contain a [ci all] trigger.`);
1095
+ }
1096
+ const doesPackageFolderTriggerMatch = matchedTriggers[cliFlags.triggerName] === 1;
1097
+ if (enablePipelineOptimizations && isDevelopmentBranch && !hasCommitMessageTrigger && !doesSharedTriggerMatch && !doesPackageFolderTriggerMatch) {
1098
+ if (!cliFlags.silent) {
1099
+ console.log(`ℹ️ No relevant changes found for ${cliFlags.triggerName}.`);
1100
+ }
1101
+ matchedTriggers['allowPipelineOptimizationsForTrigger'] = 1;
1102
+ } else {
1103
+ if (!cliFlags.silent) {
1104
+ console.log(`ℹ️ Relevant changes found for ${cliFlags.triggerName}.`);
1105
+ }
1106
+ matchedTriggers['allowPipelineOptimizationsForTrigger'] = 0;
1107
+ }
1108
+ await helpers.writeOutDotEnvFile(cliFlags, cwd, matchedTriggers);
1109
+ return matchedTriggers;
1110
+ }
1111
+
1112
+ function ownKeys(e, r) { var t = _Object$keys(e); if (_Object$getOwnPropertySymbols) { var o = _Object$getOwnPropertySymbols(e); r && (o = _filterInstanceProperty(o).call(o, function (r) { return _Object$getOwnPropertyDescriptor(e, r).enumerable; })), t.push.apply(t, o); } return t; }
1113
+ function _objectSpread(e) { for (var r = 1; r < arguments.length; r++) { var _context, _context2; var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? _forEachInstanceProperty(_context = ownKeys(Object(t), !0)).call(_context, function (r) { _defineProperty(e, r, t[r]); }) : _Object$getOwnPropertyDescriptors ? _Object$defineProperties(e, _Object$getOwnPropertyDescriptors(t)) : _forEachInstanceProperty(_context2 = ownKeys(Object(t))).call(_context2, function (r) { _Object$defineProperty(e, r, _Object$getOwnPropertyDescriptor(t, r)); }); } return e; }
1114
+ const baseMenuProperties = {
1115
+ key: {
1116
+ type: 'string'
1117
+ },
1118
+ uriPath: {
1119
+ type: 'string'
1120
+ },
1121
+ icon: {
1122
+ type: 'string'
1123
+ },
1124
+ featureToggle: {
1125
+ type: ['string', 'null']
1126
+ },
1127
+ labelAllLocales: {
1128
+ type: 'array',
1129
+ items: [{
1130
+ type: 'object',
1131
+ properties: {
1132
+ locale: {
1133
+ type: 'string'
1134
+ },
1135
+ value: {
1136
+ type: 'string'
1137
+ }
1138
+ },
1139
+ required: ['locale', 'value']
1140
+ }]
1141
+ },
1142
+ menuVisibility: {
1143
+ type: ['string', 'null']
1144
+ },
1145
+ permissions: {
1146
+ type: 'array',
1147
+ items: {
1148
+ type: 'string'
1149
+ }
1150
+ },
1151
+ dataFences: {
1152
+ type: ['array', 'null'],
1153
+ items: [{
1154
+ type: ['object'],
1155
+ properties: {
1156
+ group: {
1157
+ type: 'string'
1158
+ },
1159
+ name: {
1160
+ type: 'string'
1161
+ },
1162
+ type: {
1163
+ type: 'string'
1164
+ }
1165
+ }
1166
+ }]
1167
+ },
1168
+ actionRights: {
1169
+ type: ['array', 'null'],
1170
+ items: [{
1171
+ type: ['object'],
1172
+ properties: {
1173
+ group: {
1174
+ type: 'string'
1175
+ },
1176
+ name: {
1177
+ type: 'string'
1178
+ }
1179
+ }
1180
+ }]
1181
+ }
1182
+ };
1183
+ const navbarMenuSchema = {
1184
+ $schema: 'https://json-schema.org/draft/2020-12/schema',
1185
+ // "$id":""
1186
+ title: 'NavbarMenu',
1187
+ type: 'object',
1188
+ properties: _objectSpread(_objectSpread({}, baseMenuProperties), {}, {
1189
+ submenu: {
1190
+ type: 'array',
1191
+ items: [{
1192
+ type: 'object',
1193
+ properties: baseMenuProperties
1194
+ }]
1195
+ }
1196
+ }),
1197
+ required: ['icon', 'key', 'labelAllLocales', 'permissions', 'submenu', 'uriPath']
1198
+ };
1199
+ const appbarMenuSchema = {
1200
+ $schema: 'https://json-schema.org/draft/2020-12/schema',
1201
+ // "$id":""
1202
+ title: 'AppbarMenu',
1203
+ type: 'array',
1204
+ items: [{
1205
+ type: 'object',
1206
+ properties: baseMenuProperties,
1207
+ required: ['key', 'labelAllLocales', 'permissions', 'uriPath']
1208
+ }]
1209
+ };
1210
+
1211
+ function validateMenu(menuJson) {
1212
+ let schema = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : navbarMenuSchema;
1213
+ const validator = new Validator();
1214
+ const result = validator.validate(menuJson, schema);
1215
+ if (result.valid) {
1216
+ return menuJson;
1217
+ } else {
1218
+ throw new Error('menu.json validation failed\n' + result.errors);
1219
+ }
1220
+ }
1221
+ async function command(cliFlags) {
1222
+ const menuJsonPath = cliFlags.inputFile;
1223
+ const isAppbarMenu = cliFlags.navigation === 'top';
1224
+ if (!menuJsonPath) throw new Error(`--input-file cannot be empty. please provide the path of compiled menu.json`);
1225
+ if (!doesFileExist(menuJsonPath)) throw new Error(`The menu.json file doesn't exist: ${menuJsonPath}`);
1226
+ const menuJson = fs$1.readFileSync(menuJsonPath, 'utf-8');
1227
+ return validateMenu(JSON.parse(menuJson), isAppbarMenu ? appbarMenuSchema : navbarMenuSchema);
1228
+ }
1229
+
1230
+ var pkgJson = {
1231
+ name: "@commercetools-frontend/application-cli",
1232
+ version: "5.0.2",
1233
+ description: "Internal CLI to manage Merchant Center application deployments across various environments.",
1234
+ keywords: [
1235
+ "commercetools",
1236
+ "cli",
1237
+ "custom-application"
1238
+ ],
1239
+ license: "MIT",
1240
+ main: "dist/commercetools-frontend-application-cli.cjs.js",
1241
+ module: "dist/commercetools-frontend-application-cli.esm.js",
1242
+ bin: "bin/cli.js",
1243
+ files: [
1244
+ "bin",
1245
+ "cli",
1246
+ "dist",
1247
+ "package.json",
1248
+ "LICENSE",
1249
+ "README.md"
1250
+ ],
1251
+ scripts: {
1252
+ typecheck: "tsc --noEmit"
1253
+ },
1254
+ dependencies: {
1255
+ "@babel/core": "^7.22.11",
1256
+ "@babel/runtime-corejs3": "^7.21.0",
1257
+ "@commercetools-frontend/application-config": "22.37.0",
1258
+ "@commercetools-frontend/constants": "22.37.0",
1259
+ "@commercetools-frontend/l10n": "22.37.0",
1260
+ "@manypkg/find-root": "2.2.3",
1261
+ cac: "^6.7.14",
1262
+ cosmiconfig: "9.0.0",
1263
+ dotenv: "16.4.5",
1264
+ execa: "5.1.1",
1265
+ jsonschema: "^1.4.1",
1266
+ listr2: "8.2.5",
1267
+ lodash: "4.17.21",
1268
+ micromatch: "4.0.8",
1269
+ "node-fetch": "2.7.0",
1270
+ "ts-deepmerge": "7.0.1"
1271
+ },
1272
+ devDependencies: {
1273
+ "@tsconfig/node20": "20.1.4",
1274
+ "@types/lodash": "^4.14.198",
1275
+ "@types/micromatch": "4.0.9",
1276
+ "@types/node": "20.17.13",
1277
+ typescript: "5.2.2"
1278
+ },
1279
+ engines: {
1280
+ node: ">=21",
1281
+ npm: ">=6"
1282
+ },
1283
+ publishConfig: {
1284
+ access: "public"
1285
+ },
1286
+ preconstruct: {
1287
+ entrypoints: [
1288
+ "./cli.ts",
1289
+ "./index.ts"
1290
+ ]
1291
+ }
1292
+ };
1293
+
1294
+ const cli = cac('application-cli');
1295
+ const cwd = process.cwd();
1296
+ const run = async () => {
1297
+ cli.option('--build-revision [git-sha]', '(optional) The git commit SHA which is being built.', {
1298
+ default: process.env.CIRCLE_SHA1
1299
+ }).option('--build-number [string]', '(optional) A number of the build on the Continuous Integration system.', {
1300
+ default: process.env.CIRCLE_BUILD_NUM
1301
+ }).option('--package-manager-name [string]', '(optional) Name of the binary of the used package manager (e.g. pnpm).', {
1302
+ default: 'yarn'
1303
+ });
1304
+
1305
+ // Default command
1306
+ cli.command('').usage('\n\n Compile deployments and menus and create versions for MC applications').action(cli.outputHelp);
1307
+ const usageCompileDeployment = 'Compile the deployments for an application for all environments.';
1308
+ cli.command('compile-deployments', usageCompileDeployment).usage(`compile-deployments \n\n ${usageCompileDeployment}`).option('--application-name <string>', '(required) The name of the application being compiled for example application-products.').option('--dotenv-folder [string]', '(optional) The path to a folder containing a dotenv file ".env.production" and a cloud-environment specific dotenv file (for example ".env.gcp-production-eu"). Those values are parsed and merged together to be used by the `mc-scripts compile-html` command.').option('--pr-number [string]', '(optional) A pull request number determining a scoped storage bucket for the deployment. Please use it carefully.').option('--mc-url [string]', '(optional) The MC URL of the deployment. This is usually inferred from the env file and overwrites the value. Please use it carefully.').option('--mc-api-url [string]', '(optional) The MC API URL of the deployment. This is usually inferred from the env file and overwrites the value. Please use it carefully.').option('--application-index-out-file [path]', '(optional) The name of the application index file.', {
1309
+ default: 'application.html'
1310
+ }).option('--application-index-upload-script-out-file [path]', '(optional) The name of the the application index upload script file.', {
1311
+ default: 'upload-index.sh'
1312
+ }).option('--application-assets-upload-script-out-file [path]', '(optional) The name of the the assets upload script file.', {
1313
+ default: 'upload-assets.sh'
1314
+ }).option('--ci-assets-root-path [path]', '(optional) A replacement value for the scripts root path only used on CI (e.g. "--ci-assets-root-path=/root/") used in generated scripts.').option('--skip-menu', '(optional) If provided, it will skip uploading the `menu.json`.', {
1315
+ default: false
1316
+ }).action(async options => {
1317
+ await command$4(options, cwd);
1318
+ });
1319
+ const usageCompileMenu = 'Compile the menu links of an application into a `menu.json`. This is only required for internal applications';
1320
+ cli.command('compile-menu', usageCompileMenu).usage(`compile-menu \n\n ${usageCompileMenu}`).option('--dotenv-folder [string]', '(optional) The path to a folder containing a dotenv file `.env.production` and a cloud-environment specific dotenv file (for example `.env.gcp-production-eu`). Those values are parsed and merged together to be used by the application config.').action(async options => {
1321
+ await command$3(options, cwd);
1322
+ });
1323
+ const usageValidateMenu = 'Validate compiled `menu.json` file';
1324
+ cli.command('validate-menu', usageValidateMenu).usage(`validate-menu \n\n ${usageValidateMenu}`).option('--input-file <path>', '(required) The path to the `menu.json` file to be validated.').option('--navigation [string]', '(optional) Location of the menu navigation. Possible values are `top`.').action(async options => {
1325
+ await command(options);
1326
+ });
1327
+ const usageCreateVersion = 'Output a JSON string about the information in the `version.json` for a deployment, including the updated list of rollbacks.';
1328
+ cli.command('create-version', usageCreateVersion).usage(`create-version \n\n ${usageCreateVersion}`).option('--version-url <url>', "(required) The path of an application's current `version.json` within the storage bucket.").option('--rollbacks [int]', '(optional) The number of max rollbacks to keep', {
1329
+ default: 15
1330
+ }).option('--out-file [path]', '(optional) The path to the file where to write the JSON. If not specified, the JSON is printed to stdout.').action(async options => {
1331
+ await command$2(options);
1332
+ });
1333
+
1334
+ // Command: Evaluate change triggers
1335
+ const usageEvaluateChangeTriggers = 'Evaluates changed files against a base and evaluates them against defined triggers.';
1336
+ cli.command('evaluate-change-triggers', usageEvaluateChangeTriggers).usage(`evaluate-change-triggers \n\n ${usageEvaluateChangeTriggers}`).option('--branch <string>', 'The branch of the pull request', {
1337
+ default: process.env.CIRCLE_BRANCH
1338
+ }).option('--base-branch <string>', 'The base revision of the git commit compare against (e.g. "main")').option('--head-revision <string>', 'The revision of the git head to compare with', {
1339
+ default: process.env.CIRCLE_SHA1
1340
+ }).option('--trigger-name <string>', 'The trigger to evaluate for.').option('--silent', '(optional) Disable logging', {
1341
+ default: false
1342
+ }).option('--out-env-file [string]', '(optional) A file path where the matched triggers are written as a dotenv file.').action(async options => {
1343
+ const config = await loadConfig('circleci-change-triggers', []);
1344
+ await command$1(options, config, cwd);
1345
+ });
1346
+ cli.help();
1347
+ cli.version(pkgJson.version);
1348
+ cli.parse(process.argv, {
1349
+ run: false
1350
+ });
1351
+ await cli.runMatchedCommand();
1352
+ };
1353
+
1354
+ export { run };