@commercetools-frontend/application-cli 3.0.1 → 3.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cli/dist/commercetools-frontend-application-cli-cli.cjs.dev.js +269 -130
- package/cli/dist/commercetools-frontend-application-cli-cli.cjs.prod.js +269 -130
- package/cli/dist/commercetools-frontend-application-cli-cli.esm.js +269 -129
- package/dist/commercetools-frontend-application-cli.cjs.dev.js +1 -3
- package/dist/commercetools-frontend-application-cli.cjs.prod.js +1 -3
- package/dist/commercetools-frontend-application-cli.esm.js +1 -3
- package/dist/{storage-buckets-config-6e04b3d3.esm.js → storage-buckets-config-2e7dcecf.esm.js} +7 -13
- package/dist/{storage-buckets-config-2f9a9168.cjs.prod.js → storage-buckets-config-73009f39.cjs.prod.js} +7 -15
- package/dist/{storage-buckets-config-89962880.cjs.dev.js → storage-buckets-config-ab143609.cjs.dev.js} +7 -15
- package/package.json +9 -9
|
@@ -9,12 +9,11 @@ import _Object$defineProperties from '@babel/runtime-corejs3/core-js-stable/obje
|
|
|
9
9
|
import _Object$defineProperty from '@babel/runtime-corejs3/core-js-stable/object/define-property';
|
|
10
10
|
import _slicedToArray from '@babel/runtime-corejs3/helpers/esm/slicedToArray';
|
|
11
11
|
import _defineProperty from '@babel/runtime-corejs3/helpers/esm/defineProperty';
|
|
12
|
-
import _concatInstanceProperty from '@babel/runtime-corejs3/core-js-stable/instance/concat';
|
|
13
12
|
import _mapInstanceProperty from '@babel/runtime-corejs3/core-js-stable/instance/map';
|
|
14
13
|
import _Object$entries from '@babel/runtime-corejs3/core-js-stable/object/entries';
|
|
15
14
|
import _Set from '@babel/runtime-corejs3/core-js-stable/set';
|
|
16
15
|
import _flatMapInstanceProperty from '@babel/runtime-corejs3/core-js-stable/instance/flat-map';
|
|
17
|
-
import { s as storageProviders, l as loadStorageBucketsConfig, c as clusterContexts, a as loadConfig } from '../../dist/storage-buckets-config-
|
|
16
|
+
import { s as storageProviders, l as loadStorageBucketsConfig, c as clusterContexts, a as loadConfig } from '../../dist/storage-buckets-config-2e7dcecf.esm.js';
|
|
18
17
|
import fs from 'node:fs';
|
|
19
18
|
import path$1 from 'node:path';
|
|
20
19
|
import { Listr } from 'listr2';
|
|
@@ -63,7 +62,7 @@ function isCI() {
|
|
|
63
62
|
}
|
|
64
63
|
|
|
65
64
|
function ownKeys$4(e, r) { var t = _Object$keys(e); if (_Object$getOwnPropertySymbols) { var o = _Object$getOwnPropertySymbols(e); r && (o = _filterInstanceProperty(o).call(o, function (r) { return _Object$getOwnPropertyDescriptor(e, r).enumerable; })), t.push.apply(t, o); } return t; }
|
|
66
|
-
function _objectSpread$4(e) { for (var r = 1; r < arguments.length; r++) { var
|
|
65
|
+
function _objectSpread$4(e) { for (var r = 1; r < arguments.length; r++) { var _context3, _context4; var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? _forEachInstanceProperty(_context3 = ownKeys$4(Object(t), !0)).call(_context3, function (r) { _defineProperty(e, r, t[r]); }) : _Object$getOwnPropertyDescriptors ? _Object$defineProperties(e, _Object$getOwnPropertyDescriptors(t)) : _forEachInstanceProperty(_context4 = ownKeys$4(Object(t))).call(_context4, function (r) { _Object$defineProperty(e, r, _Object$getOwnPropertyDescriptor(t, r)); }); } return e; }
|
|
67
66
|
function _callSuper(t, o, e) { return o = _getPrototypeOf(o), _possibleConstructorReturn(t, _isNativeReflectConstruct() ? _Reflect$construct(o, e || [], _getPrototypeOf(t).constructor) : o.apply(t, e)); }
|
|
68
67
|
function _isNativeReflectConstruct() { try { var t = !Boolean.prototype.valueOf.call(_Reflect$construct(Boolean, [], function () {})); } catch (t) {} return (_isNativeReflectConstruct = function () { return !!t; })(); }
|
|
69
68
|
var _bucketRegion$1 = /*#__PURE__*/_classPrivateFieldLooseKey("bucketRegion");
|
|
@@ -96,7 +95,7 @@ let StorageProvider = /*#__PURE__*/function () {
|
|
|
96
95
|
value: function getBucketNamespace(prNumber) {
|
|
97
96
|
if (!prNumber) return;
|
|
98
97
|
if (prNumber === 'merchant-center-preview') return prNumber;
|
|
99
|
-
return
|
|
98
|
+
return `mc-${prNumber}`;
|
|
100
99
|
}
|
|
101
100
|
|
|
102
101
|
/**
|
|
@@ -105,7 +104,7 @@ let StorageProvider = /*#__PURE__*/function () {
|
|
|
105
104
|
}, {
|
|
106
105
|
key: "getAssetsBucketUrl",
|
|
107
106
|
value: function getAssetsBucketUrl(_ref) {
|
|
108
|
-
var
|
|
107
|
+
var _context;
|
|
109
108
|
let applicationName = _ref.applicationName,
|
|
110
109
|
bucketProtocol = _ref.bucketProtocol,
|
|
111
110
|
bucketNamespace = _ref.bucketNamespace,
|
|
@@ -114,14 +113,13 @@ let StorageProvider = /*#__PURE__*/function () {
|
|
|
114
113
|
throw new Error("'bucketRegion' is not defined. Required to determine 'assetsBucketUrl'.");
|
|
115
114
|
}
|
|
116
115
|
const storageProvider = storageProviders[tag];
|
|
117
|
-
const assetBuketUrl =
|
|
116
|
+
const assetBuketUrl = storageProvider.urls.bucket?.[_classPrivateFieldLooseBase(this, _bucketRegion$1)[_bucketRegion$1]] ?? _classPrivateFieldLooseBase(this, _bucketRegion$1)[_bucketRegion$1];
|
|
118
117
|
const assetsBucketUrl = _filterInstanceProperty(_context = [assetBuketUrl, bucketNamespace, applicationName]).call(_context, Boolean).join('/');
|
|
119
|
-
return
|
|
118
|
+
return `${bucketProtocol}${assetsBucketUrl}`;
|
|
120
119
|
}
|
|
121
120
|
}, {
|
|
122
121
|
key: "getApplicationIndexBucketUrl",
|
|
123
122
|
value: function getApplicationIndexBucketUrl(_ref2) {
|
|
124
|
-
var _context3;
|
|
125
123
|
let tag = _ref2.tag,
|
|
126
124
|
prNumber = _ref2.prNumber,
|
|
127
125
|
applicationName = _ref2.applicationName,
|
|
@@ -134,31 +132,29 @@ let StorageProvider = /*#__PURE__*/function () {
|
|
|
134
132
|
bucketProtocol,
|
|
135
133
|
bucketNamespace
|
|
136
134
|
});
|
|
137
|
-
const applicationIndexBucketUrl =
|
|
135
|
+
const applicationIndexBucketUrl = `${applicationAssetsBucketUrl}/${_classPrivateFieldLooseBase(this, _bucketEnvironment$1)[_bucketEnvironment$1]}`;
|
|
138
136
|
return applicationIndexBucketUrl;
|
|
139
137
|
}
|
|
140
138
|
}, {
|
|
141
139
|
key: "getCdnUrl",
|
|
142
140
|
value: function getCdnUrl(_ref3) {
|
|
143
|
-
var
|
|
141
|
+
var _context2;
|
|
144
142
|
let applicationName = _ref3.applicationName,
|
|
145
143
|
prNumber = _ref3.prNumber,
|
|
146
144
|
publicBaseUrl = _ref3.publicBaseUrl,
|
|
147
145
|
excludeBucketRegion = _ref3.excludeBucketRegion;
|
|
148
|
-
return _filterInstanceProperty(
|
|
146
|
+
return _filterInstanceProperty(_context2 = [publicBaseUrl, excludeBucketRegion ? null : _classPrivateFieldLooseBase(this, _bucketRegion$1)[_bucketRegion$1], this.getBucketNamespace(prNumber), applicationName]).call(_context2, Boolean).join('/');
|
|
149
147
|
}
|
|
150
148
|
}, {
|
|
151
149
|
key: "getPublicBaseUrl",
|
|
152
150
|
value: function getPublicBaseUrl(tag) {
|
|
153
|
-
var _storageProvider$urls3;
|
|
154
151
|
if (!_classPrivateFieldLooseBase(this, _bucketEnvironment$1)[_bucketEnvironment$1]) {
|
|
155
152
|
throw new Error("'bucketEnvironment' is not defined. Required to determine 'publicBaseUrl'.");
|
|
156
153
|
}
|
|
157
154
|
const storageProvider = storageProviders[tag];
|
|
158
|
-
const publicBaseUrl =
|
|
155
|
+
const publicBaseUrl = storageProvider.urls.public[_classPrivateFieldLooseBase(this, _bucketEnvironment$1)[_bucketEnvironment$1]] ?? storageProvider.urls.public.default;
|
|
159
156
|
if (!publicBaseUrl) {
|
|
160
|
-
|
|
161
|
-
throw new Error(_concatInstanceProperty(_context5 = "'publicBaseUrl' is not defined for '".concat(tag, "' storage provider for ")).call(_context5, _classPrivateFieldLooseBase(this, _bucketEnvironment$1)[_bucketEnvironment$1], " or as default."));
|
|
157
|
+
throw new Error(`'publicBaseUrl' is not defined for '${tag}' storage provider for ${_classPrivateFieldLooseBase(this, _bucketEnvironment$1)[_bucketEnvironment$1]} or as default.`);
|
|
162
158
|
}
|
|
163
159
|
return publicBaseUrl;
|
|
164
160
|
}
|
|
@@ -316,7 +312,7 @@ function getStorageProvider(storageProvider, config) {
|
|
|
316
312
|
case 's3':
|
|
317
313
|
return new AwsStorageProvider(config);
|
|
318
314
|
default:
|
|
319
|
-
throw new Error(
|
|
315
|
+
throw new Error(`Storage provider ${storageProvider} not supported`);
|
|
320
316
|
}
|
|
321
317
|
}
|
|
322
318
|
|
|
@@ -339,7 +335,6 @@ let GoogleStorageUploadScriptsGenerator = /*#__PURE__*/function () {
|
|
|
339
335
|
return _createClass(GoogleStorageUploadScriptsGenerator, [{
|
|
340
336
|
key: "getApplicationIndexUploadScript",
|
|
341
337
|
value: function getApplicationIndexUploadScript(_ref) {
|
|
342
|
-
var _context, _context2, _context3, _context4, _context5, _context6, _context7, _context8, _context9, _context10;
|
|
343
338
|
let packageManagerName = _ref.packageManagerName,
|
|
344
339
|
bucketUrl = _ref.bucketUrl,
|
|
345
340
|
cdnUrl = _ref.cdnUrl,
|
|
@@ -349,16 +344,106 @@ let GoogleStorageUploadScriptsGenerator = /*#__PURE__*/function () {
|
|
|
349
344
|
if (!_classPrivateFieldLooseBase(this, _bucketEnvironment)[_bucketEnvironment]) {
|
|
350
345
|
throw new Error("Missing 'bucketEnvironment' when generating application index.");
|
|
351
346
|
}
|
|
352
|
-
return
|
|
347
|
+
return `
|
|
348
|
+
#!/usr/bin/env bash
|
|
349
|
+
|
|
350
|
+
set -e
|
|
351
|
+
|
|
352
|
+
echo "Uploading compiled ${applicationIndexOutFile} to Google Storage bucket ${bucketUrl}"
|
|
353
|
+
|
|
354
|
+
gcloud storage cp \\
|
|
355
|
+
"$(dirname "$0")/${applicationIndexOutFile}" \\
|
|
356
|
+
"${bucketUrl}/" \\
|
|
357
|
+
-z html \\
|
|
358
|
+
--content-type="text/html" \\
|
|
359
|
+
--cache-control="public,max-age=0,no-transform"
|
|
360
|
+
|
|
361
|
+
echo "Creating version.json and uploading it to bucket ${bucketUrl}"
|
|
362
|
+
|
|
363
|
+
NODE_ENV=production ${packageManagerName} application-cli create-version \\
|
|
364
|
+
--version-url=${cdnUrl}/${_classPrivateFieldLooseBase(this, _bucketEnvironment)[_bucketEnvironment]}/version.json \\
|
|
365
|
+
--build-revision=${buildRevision} \\
|
|
366
|
+
--build-number=${buildNumber} \\
|
|
367
|
+
--out-file=$(dirname "$0")/version.json
|
|
368
|
+
|
|
369
|
+
gcloud storage cp \\
|
|
370
|
+
"$(dirname "$0")/version.json" \\
|
|
371
|
+
"${bucketUrl}/" \\
|
|
372
|
+
-z json \\
|
|
373
|
+
--content-type="application/json" \\
|
|
374
|
+
--cache-control="public,max-age=0,no-transform"
|
|
375
|
+
`;
|
|
353
376
|
}
|
|
354
377
|
}, {
|
|
355
378
|
key: "getProductionBundlesUploadScript",
|
|
356
379
|
value: function getProductionBundlesUploadScript(_ref2) {
|
|
357
|
-
var _context11, _context12, _context13, _context14, _context15, _context16, _context17, _context18;
|
|
358
380
|
let bucketUrl = _ref2.bucketUrl,
|
|
359
381
|
assetsPath = _ref2.assetsPath,
|
|
360
382
|
skipMenu = _ref2.skipMenu;
|
|
361
|
-
return
|
|
383
|
+
return `
|
|
384
|
+
#!/usr/bin/env bash
|
|
385
|
+
|
|
386
|
+
set -e
|
|
387
|
+
|
|
388
|
+
# NOTES:
|
|
389
|
+
# https://cloud.google.com/sdk/gcloud/reference/storage/cp
|
|
390
|
+
# 1. The '-z' option triggers compressing the assets before
|
|
391
|
+
# uploading them and sets the 'Content-Encoding' to 'gzip'.
|
|
392
|
+
# 2. The 'Accept-encoding: gzip' is set automatically by the 'gcloud storage'.
|
|
393
|
+
# 3. The 'max-age' is set to 1 year which is considered the maximum
|
|
394
|
+
# "valid" lifetime of an asset to be cached.
|
|
395
|
+
# 4. The '-n' will skip uploading existing files and prevents them to
|
|
396
|
+
# be overwritten
|
|
397
|
+
echo "Uploading static assets to Google Storage bucket ${bucketUrl}"
|
|
398
|
+
|
|
399
|
+
gcloud storage cp \\
|
|
400
|
+
${assetsPath}/public/{*.css,*.js,*.js.map,*.html} \\
|
|
401
|
+
"${bucketUrl}" \\
|
|
402
|
+
-n \\
|
|
403
|
+
-z js,css \\
|
|
404
|
+
--cache-control="public,max-age=31536000,no-transform"
|
|
405
|
+
|
|
406
|
+
# We need to upload the PNG and HTML files separately because we want them
|
|
407
|
+
# to be able to overwrite the existing files (if any). For instance, the
|
|
408
|
+
# file or the favicons.
|
|
409
|
+
# This is controlled with the '-n' option (which is used for the JS and CSS
|
|
410
|
+
# as we don't want to overwrite them)
|
|
411
|
+
gcloud storage cp \\
|
|
412
|
+
${assetsPath}/public/{*.png,robots.txt} \\
|
|
413
|
+
"${bucketUrl}" \\
|
|
414
|
+
-z txt \\
|
|
415
|
+
--cache-control="public,max-age=31536000,no-transform"
|
|
416
|
+
|
|
417
|
+
if ${skipMenu}; then
|
|
418
|
+
echo "Skipping menu.json upload"
|
|
419
|
+
else
|
|
420
|
+
echo "Uploading menu.json to bucket ${bucketUrl}"
|
|
421
|
+
# NOTE: somehow the 'cache-control:private' doesn't work.
|
|
422
|
+
# I mean, the file is uploaded with the correct metadata but when I fetch
|
|
423
|
+
# the file the response contains the header
|
|
424
|
+
# 'cache-control: public,max-age=31536000,no-transform', even though the
|
|
425
|
+
# documentation clearly states that by marking the header as 'private' will
|
|
426
|
+
# disable the cache (for publicly readable objects).
|
|
427
|
+
# https://cloud.google.com/storage/docs/gsutil/addlhelp/WorkingWithObjectMetadata#cache-control
|
|
428
|
+
# However, I found out that, by requesting the file with any RANDOM
|
|
429
|
+
# query parameter, will instruct the storage to return a 'fresh' object
|
|
430
|
+
# (without any cache control).
|
|
431
|
+
# Unofficial source: https://stackoverflow.com/a/49052895
|
|
432
|
+
# This seems to be the 'easiest' option to 'disable' the cache for public
|
|
433
|
+
# objects. Other alternative approaces are:
|
|
434
|
+
# * make the object private with some simple ACL (private objects are not cached)
|
|
435
|
+
# * suffix the file name with e.g. the git SHA, so we have different files
|
|
436
|
+
# for each upload ('index.html.template-\${CIRCLE_SHA1}'). The server knows
|
|
437
|
+
# the git SHA on runtime and can get the correct file when it starts.
|
|
438
|
+
# * find out why the 'private' cache control does not work
|
|
439
|
+
gcloud storage cp \\
|
|
440
|
+
${assetsPath}/menu.json \\
|
|
441
|
+
${bucketUrl} \\
|
|
442
|
+
-z json \\
|
|
443
|
+
--content-type="application/json" \\
|
|
444
|
+
--cache-control="public,max-age=0,no-transform"
|
|
445
|
+
fi
|
|
446
|
+
`;
|
|
362
447
|
}
|
|
363
448
|
}]);
|
|
364
449
|
}();
|
|
@@ -381,23 +466,97 @@ let AwsStorageUploadScriptsGenerator = /*#__PURE__*/function () {
|
|
|
381
466
|
return _createClass(AwsStorageUploadScriptsGenerator, [{
|
|
382
467
|
key: "getApplicationIndexUploadScript",
|
|
383
468
|
value: function getApplicationIndexUploadScript(_ref3) {
|
|
384
|
-
var _context19, _context20, _context21, _context22, _context23, _context24, _context25, _context26, _context27, _context28, _context29;
|
|
385
469
|
let packageManagerName = _ref3.packageManagerName,
|
|
386
470
|
bucketUrl = _ref3.bucketUrl,
|
|
387
471
|
cdnUrl = _ref3.cdnUrl,
|
|
388
472
|
buildRevision = _ref3.buildRevision,
|
|
389
473
|
buildNumber = _ref3.buildNumber,
|
|
390
474
|
applicationIndexOutFile = _ref3.applicationIndexOutFile;
|
|
391
|
-
return
|
|
475
|
+
return `
|
|
476
|
+
#!/usr/bin/env bash
|
|
477
|
+
|
|
478
|
+
echo "Uploading static assets to Amazon S3 bucket ${bucketUrl}"
|
|
479
|
+
|
|
480
|
+
set -e
|
|
481
|
+
|
|
482
|
+
aws s3 cp "$(dirname "$0")/${applicationIndexOutFile}" \\
|
|
483
|
+
"${bucketUrl}/" \\
|
|
484
|
+
--content-type="text/html" \\
|
|
485
|
+
--cache-control="public,max-age=0,no-transform" \\
|
|
486
|
+
--profile ${_classPrivateFieldLooseBase(this, _bucketRegion2)[_bucketRegion2]}
|
|
487
|
+
|
|
488
|
+
echo "Creating version.json and uploading it to bucket ${bucketUrl}"
|
|
489
|
+
|
|
490
|
+
NODE_ENV=production ${packageManagerName} application-cli create-version \\
|
|
491
|
+
--version-url=${cdnUrl}/${_classPrivateFieldLooseBase(this, _bucketEnvironment2)[_bucketEnvironment2]}/version.json \\
|
|
492
|
+
--build-revision=${buildRevision} \\
|
|
493
|
+
--build-number=${buildNumber} \\
|
|
494
|
+
--out-file=$(dirname "$0")/version.json
|
|
495
|
+
|
|
496
|
+
aws s3 cp "$(dirname "$0")/version.json" \\
|
|
497
|
+
"${bucketUrl}/" \\
|
|
498
|
+
--content-type="application/json" \\
|
|
499
|
+
--cache-control="public,max-age=0,no-transform" \\
|
|
500
|
+
--profile ${_classPrivateFieldLooseBase(this, _bucketRegion2)[_bucketRegion2]}
|
|
501
|
+
`;
|
|
392
502
|
}
|
|
393
503
|
}, {
|
|
394
504
|
key: "getProductionBundlesUploadScript",
|
|
395
505
|
value: function getProductionBundlesUploadScript(_ref4) {
|
|
396
|
-
var _context30, _context31, _context32, _context33, _context34, _context35, _context36, _context37, _context38, _context39, _context40;
|
|
397
506
|
let bucketUrl = _ref4.bucketUrl,
|
|
398
507
|
assetsPath = _ref4.assetsPath,
|
|
399
508
|
skipMenu = _ref4.skipMenu;
|
|
400
|
-
return
|
|
509
|
+
return `
|
|
510
|
+
#!/usr/bin/env bash
|
|
511
|
+
|
|
512
|
+
echo "Uploading static assets to Amazon S3 bucket ${bucketUrl}"
|
|
513
|
+
|
|
514
|
+
set -e
|
|
515
|
+
|
|
516
|
+
# NOTE:
|
|
517
|
+
# The sync command on the AWS CLI is different to the -n option on the gcloud CLI.
|
|
518
|
+
# Sync will only upload files that are not already in the bucket, but it will skip existing ones
|
|
519
|
+
# that have been changed locally.
|
|
520
|
+
# The -n option on the gcloud CLI will skip uploading existing files and prevents them to be overwritten.
|
|
521
|
+
# https://docs.aws.amazon.com/cli/latest/reference/s3/sync.html
|
|
522
|
+
# https://cloud.google.com/sdk/gcloud/reference/storage/cp
|
|
523
|
+
#
|
|
524
|
+
# Compression (gzip) is enabled on CloudFront by default. Hence compression does happing while uploading.
|
|
525
|
+
# https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/ServingCompressedFiles.html#compressed-content-cloudfront-configuring
|
|
526
|
+
aws s3 sync ${assetsPath}/public \\
|
|
527
|
+
"${bucketUrl}" \\
|
|
528
|
+
--exclude "*" \\
|
|
529
|
+
--include "*.css" \\
|
|
530
|
+
--include "*.js" \\
|
|
531
|
+
--include "*.js.map" \\
|
|
532
|
+
--include "*.html" \\
|
|
533
|
+
--cache-control="public,max-age=31536000,no-transform" \\
|
|
534
|
+
--profile ${_classPrivateFieldLooseBase(this, _bucketRegion2)[_bucketRegion2]}
|
|
535
|
+
|
|
536
|
+
# We need to upload the PNG and HTML files separately because we want them
|
|
537
|
+
# to be able to overwrite the existing files (if any). For instance, the
|
|
538
|
+
# file or the favicons.
|
|
539
|
+
aws s3 cp ${assetsPath}/public \\
|
|
540
|
+
"${bucketUrl}" \\
|
|
541
|
+
--recursive \\
|
|
542
|
+
--exclude "*" \\
|
|
543
|
+
--include "*.png" \\
|
|
544
|
+
--include "robots.txt" \\
|
|
545
|
+
--cache-control="public,max-age=31536000,no-transform" \\
|
|
546
|
+
--profile ${_classPrivateFieldLooseBase(this, _bucketRegion2)[_bucketRegion2]}
|
|
547
|
+
|
|
548
|
+
if ${skipMenu}; then
|
|
549
|
+
echo "Skipping menu.json upload"
|
|
550
|
+
else
|
|
551
|
+
echo "Uploading menu.json to bucket ${bucketUrl}"
|
|
552
|
+
|
|
553
|
+
aws s3 cp ${assetsPath}/menu.json \\
|
|
554
|
+
"${bucketUrl}/" \\
|
|
555
|
+
--content-type="application/json" \\
|
|
556
|
+
--cache-control="public,max-age=0,no-transform" \\
|
|
557
|
+
--profile ${_classPrivateFieldLooseBase(this, _bucketRegion2)[_bucketRegion2]}
|
|
558
|
+
fi
|
|
559
|
+
`;
|
|
401
560
|
}
|
|
402
561
|
}]);
|
|
403
562
|
}();
|
|
@@ -408,7 +567,7 @@ function getUploadScriptsGenerator(storageProvider, config) {
|
|
|
408
567
|
case 's3':
|
|
409
568
|
return new AwsStorageUploadScriptsGenerator(config);
|
|
410
569
|
default:
|
|
411
|
-
throw new Error(
|
|
570
|
+
throw new Error(`Storage provider ${storageProvider} not supported`);
|
|
412
571
|
}
|
|
413
572
|
}
|
|
414
573
|
|
|
@@ -422,7 +581,7 @@ function doesFileExist(filePath) {
|
|
|
422
581
|
}
|
|
423
582
|
|
|
424
583
|
function ownKeys$3(e, r) { var t = _Object$keys(e); if (_Object$getOwnPropertySymbols) { var o = _Object$getOwnPropertySymbols(e); r && (o = _filterInstanceProperty(o).call(o, function (r) { return _Object$getOwnPropertyDescriptor(e, r).enumerable; })), t.push.apply(t, o); } return t; }
|
|
425
|
-
function _objectSpread$3(e) { for (var r = 1; r < arguments.length; r++) { var
|
|
584
|
+
function _objectSpread$3(e) { for (var r = 1; r < arguments.length; r++) { var _context, _context2; var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? _forEachInstanceProperty(_context = ownKeys$3(Object(t), !0)).call(_context, function (r) { _defineProperty(e, r, t[r]); }) : _Object$getOwnPropertyDescriptors ? _Object$defineProperties(e, _Object$getOwnPropertyDescriptors(t)) : _forEachInstanceProperty(_context2 = ownKeys$3(Object(t))).call(_context2, function (r) { _Object$defineProperty(e, r, _Object$getOwnPropertyDescriptor(t, r)); }); } return e; }
|
|
426
585
|
function loadDotenvFiles(_ref) {
|
|
427
586
|
let dotenvPath = _ref.dotenvPath,
|
|
428
587
|
cloudEnvironment = _ref.cloudEnvironment;
|
|
@@ -433,12 +592,12 @@ function loadDotenvFiles(_ref) {
|
|
|
433
592
|
|
|
434
593
|
// Check if the given path exists.
|
|
435
594
|
if (!doesFileExist(dotenvPath)) {
|
|
436
|
-
throw new Error(
|
|
595
|
+
throw new Error(`The dotenv folder path does not exist: "${dotenvPath}".`);
|
|
437
596
|
}
|
|
438
597
|
|
|
439
598
|
// Load the environment values
|
|
440
599
|
const sharedDotenvFile = '.env.production';
|
|
441
|
-
const cloudDotenvFile =
|
|
600
|
+
const cloudDotenvFile = `.env.${cloudEnvironment}`;
|
|
442
601
|
|
|
443
602
|
// The shared dotenv file across environments is optional
|
|
444
603
|
const sharedProductionEnvironment = dotenv.config({
|
|
@@ -450,18 +609,16 @@ function loadDotenvFiles(_ref) {
|
|
|
450
609
|
path: path.join(dotenvPath, cloudDotenvFile)
|
|
451
610
|
});
|
|
452
611
|
if (cloudSpecificProductionEnvironment.error) {
|
|
453
|
-
|
|
454
|
-
throw new Error(_concatInstanceProperty(_context = "Failed loading '".concat(cloudDotenvFile, "' in '")).call(_context, dotenvPath, "'. Make sure it exists."));
|
|
612
|
+
throw new Error(`Failed loading '${cloudDotenvFile}' in '${dotenvPath}'. Make sure it exists.`);
|
|
455
613
|
}
|
|
456
614
|
if (sharedProductionEnvironment.error) {
|
|
457
|
-
|
|
458
|
-
throw new Error(_concatInstanceProperty(_context2 = "Failed loading '".concat(sharedDotenvFile, "' in '")).call(_context2, dotenvPath, "'. Make sure it exists."));
|
|
615
|
+
throw new Error(`Failed loading '${sharedDotenvFile}' in '${dotenvPath}'. Make sure it exists.`);
|
|
459
616
|
}
|
|
460
617
|
return _objectSpread$3(_objectSpread$3({}, sharedProductionEnvironment.parsed), cloudSpecificProductionEnvironment.parsed);
|
|
461
618
|
}
|
|
462
619
|
|
|
463
620
|
function ownKeys$2(e, r) { var t = _Object$keys(e); if (_Object$getOwnPropertySymbols) { var o = _Object$getOwnPropertySymbols(e); r && (o = _filterInstanceProperty(o).call(o, function (r) { return _Object$getOwnPropertyDescriptor(e, r).enumerable; })), t.push.apply(t, o); } return t; }
|
|
464
|
-
function _objectSpread$2(e) { for (var r = 1; r < arguments.length; r++) { var
|
|
621
|
+
function _objectSpread$2(e) { for (var r = 1; r < arguments.length; r++) { var _context3, _context4; var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? _forEachInstanceProperty(_context3 = ownKeys$2(Object(t), !0)).call(_context3, function (r) { _defineProperty(e, r, t[r]); }) : _Object$getOwnPropertyDescriptors ? _Object$defineProperties(e, _Object$getOwnPropertyDescriptors(t)) : _forEachInstanceProperty(_context4 = ownKeys$2(Object(t))).call(_context4, function (r) { _Object$defineProperty(e, r, _Object$getOwnPropertyDescriptor(t, r)); }); } return e; }
|
|
465
622
|
function writeUploadScriptFile(_ref) {
|
|
466
623
|
let fileName = _ref.fileName,
|
|
467
624
|
fileContent = _ref.fileContent,
|
|
@@ -473,7 +630,6 @@ function writeUploadScriptFile(_ref) {
|
|
|
473
630
|
});
|
|
474
631
|
}
|
|
475
632
|
async function compileApplicationAssets(_ref2) {
|
|
476
|
-
var _context, _context2;
|
|
477
633
|
let cliFlags = _ref2.cliFlags,
|
|
478
634
|
storageProvider = _ref2.storageProvider,
|
|
479
635
|
uploadScriptsGenerator = _ref2.uploadScriptsGenerator,
|
|
@@ -488,7 +644,7 @@ async function compileApplicationAssets(_ref2) {
|
|
|
488
644
|
skipMenu: cliFlags.skipMenu
|
|
489
645
|
});
|
|
490
646
|
const parsedApplicationAssetsUploadScriptFile = path$1.parse(cliFlags.applicationAssetsUploadScriptOutFile);
|
|
491
|
-
const applicationAssetsUploadScriptFileName =
|
|
647
|
+
const applicationAssetsUploadScriptFileName = `${parsedApplicationAssetsUploadScriptFile.name}-${storageProvider.getBucketRegion()}${parsedApplicationAssetsUploadScriptFile.ext}`;
|
|
492
648
|
writeUploadScriptFile({
|
|
493
649
|
fileName: applicationAssetsUploadScriptFileName,
|
|
494
650
|
fileContent: applicationAssetsUploadScriptContent,
|
|
@@ -521,7 +677,7 @@ async function compileEnvironmentApplicationIndexes(_ref3) {
|
|
|
521
677
|
})), {}, {
|
|
522
678
|
// The trailing slash is important to indicate to the CSP directive that all the resources
|
|
523
679
|
// under that path should be allowed.
|
|
524
|
-
MC_CDN_URL:
|
|
680
|
+
MC_CDN_URL: `${cdnUrl}/`
|
|
525
681
|
}, cliFlags.mcUrl ? {
|
|
526
682
|
MC_URL: cliFlags.mcUrl
|
|
527
683
|
} : {}), cliFlags.mcApiUrl ? {
|
|
@@ -578,7 +734,7 @@ async function compileEnvironmentApplicationIndexes(_ref3) {
|
|
|
578
734
|
}
|
|
579
735
|
}
|
|
580
736
|
async function command$4(cliFlags, cwd) {
|
|
581
|
-
var
|
|
737
|
+
var _context;
|
|
582
738
|
const storageBucketConfig = await loadStorageBucketsConfig();
|
|
583
739
|
const applicationDirectory = getApplicationDirectory(cwd);
|
|
584
740
|
let assetsPath;
|
|
@@ -595,12 +751,12 @@ async function command$4(cliFlags, cwd) {
|
|
|
595
751
|
assetsPath
|
|
596
752
|
};
|
|
597
753
|
const defaultStorageProviders = [storageProviders.gs.tag];
|
|
598
|
-
const taskList = new Listr(_mapInstanceProperty(
|
|
754
|
+
const taskList = new Listr(_mapInstanceProperty(_context = _Object$entries(storageBucketConfig)).call(_context, _ref4 => {
|
|
599
755
|
let _ref5 = _slicedToArray(_ref4, 2),
|
|
600
756
|
bucketRegion = _ref5[0],
|
|
601
757
|
bucketEnvironmentConfigs = _ref5[1];
|
|
602
758
|
return {
|
|
603
|
-
title:
|
|
759
|
+
title: `Compiling for bucket region ${bucketRegion}`,
|
|
604
760
|
task: (_bucketRegionCtx, bucketRegionTask) => {
|
|
605
761
|
// NOTE: Application assets need to be compiled
|
|
606
762
|
// for all storage providers once per region.
|
|
@@ -615,7 +771,7 @@ async function command$4(cliFlags, cwd) {
|
|
|
615
771
|
const storageProvider = getStorageProvider(storageProviderTag, storageProviderConfig);
|
|
616
772
|
const uploadScriptsGenerator = getUploadScriptsGenerator(storageProviderTag, uploadScriptsGeneratorConfig);
|
|
617
773
|
return {
|
|
618
|
-
title:
|
|
774
|
+
title: `Compiling application assets for '${storageProviderTag}'`,
|
|
619
775
|
task: () => compileApplicationAssets({
|
|
620
776
|
cliFlags,
|
|
621
777
|
storageProvider,
|
|
@@ -633,14 +789,14 @@ async function command$4(cliFlags, cwd) {
|
|
|
633
789
|
bucketEnvironment
|
|
634
790
|
};
|
|
635
791
|
return {
|
|
636
|
-
title:
|
|
792
|
+
title: `Compiling for cloud environment '${cloudEnvironment}'`,
|
|
637
793
|
task: (_storageProviderCtx, storageProviderTask) => {
|
|
638
|
-
var
|
|
639
|
-
const applicationIndexTasksForStorageProviders = _mapInstanceProperty(
|
|
794
|
+
var _context2;
|
|
795
|
+
const applicationIndexTasksForStorageProviders = _mapInstanceProperty(_context2 = storageProviders || defaultStorageProviders).call(_context2, storageProviderTag => {
|
|
640
796
|
const storageProvider = getStorageProvider(storageProviderTag, storageProviderConfig);
|
|
641
797
|
const uploadScriptsGenerator = getUploadScriptsGenerator(storageProviderTag, storageProviderConfig);
|
|
642
798
|
return {
|
|
643
|
-
title:
|
|
799
|
+
title: `Compiling application index for storage provider '${storageProviderTag}'`,
|
|
644
800
|
task: () => {
|
|
645
801
|
return compileEnvironmentApplicationIndexes({
|
|
646
802
|
cliFlags,
|
|
@@ -696,58 +852,52 @@ const mapLabelAllLocalesWithDefaults = (labelAllLocales, defaultLabel) => {
|
|
|
696
852
|
*/
|
|
697
853
|
|
|
698
854
|
const mapApplicationMenuConfigToGraqhQLMenuJson = config => {
|
|
699
|
-
var
|
|
855
|
+
var _context2;
|
|
700
856
|
const entryPointUriPath = config.env.entryPointUriPath;
|
|
701
857
|
|
|
702
858
|
// @ts-expect-error: the `accountLinks` is not explicitly typed as it's only used by the account app.
|
|
703
|
-
const accountLinks =
|
|
859
|
+
const accountLinks = config.env.__DEVELOPMENT__?.accountLinks ?? [];
|
|
704
860
|
if (accountLinks.length > 0) {
|
|
705
|
-
return _mapInstanceProperty(accountLinks).call(accountLinks, menuLink => {
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
featureToggle: (_menuLink$featureTogg = menuLink.featureToggle) !== null && _menuLink$featureTogg !== void 0 ? _menuLink$featureTogg : null
|
|
714
|
-
};
|
|
715
|
-
});
|
|
861
|
+
return _mapInstanceProperty(accountLinks).call(accountLinks, menuLink => ({
|
|
862
|
+
key: menuLink.uriPath,
|
|
863
|
+
uriPath: menuLink.uriPath,
|
|
864
|
+
labelAllLocales: mapLabelAllLocalesWithDefaults(menuLink.labelAllLocales, menuLink.defaultLabel),
|
|
865
|
+
permissions: menuLink.permissions ?? [],
|
|
866
|
+
// @ts-ignore: not defined in schema, as it's only used internally.
|
|
867
|
+
featureToggle: menuLink.featureToggle ?? null
|
|
868
|
+
}));
|
|
716
869
|
}
|
|
717
|
-
const menuLinks =
|
|
870
|
+
const menuLinks = config.env.__DEVELOPMENT__?.menuLinks;
|
|
718
871
|
return {
|
|
719
872
|
key: entryPointUriPath,
|
|
720
873
|
uriPath: entryPointUriPath,
|
|
721
874
|
icon: menuLinks.icon,
|
|
722
|
-
labelAllLocales: mapLabelAllLocalesWithDefaults(menuLinks
|
|
875
|
+
labelAllLocales: mapLabelAllLocalesWithDefaults(menuLinks?.labelAllLocales, menuLinks?.defaultLabel),
|
|
723
876
|
permissions: menuLinks.permissions,
|
|
724
877
|
// @ts-ignore: not defined in schema, as it's only used internally.
|
|
725
|
-
featureToggle:
|
|
878
|
+
featureToggle: menuLinks.featureToggle ?? null,
|
|
726
879
|
// @ts-ignore: not defined in schema, as it's only used internally.
|
|
727
|
-
menuVisibility:
|
|
880
|
+
menuVisibility: menuLinks.menuVisibility ?? null,
|
|
728
881
|
// @ts-ignore: not defined in schema, as it's only used internally.
|
|
729
|
-
actionRights:
|
|
882
|
+
actionRights: menuLinks.actionRights ?? null,
|
|
730
883
|
// @ts-ignore: not defined in schema, as it's only used internally.
|
|
731
|
-
dataFences:
|
|
732
|
-
submenu: _mapInstanceProperty(_context2 = menuLinks.submenuLinks).call(_context2, submenuLink => {
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
|
|
737
|
-
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
dataFences: (_submenuLink$dataFenc = submenuLink.dataFences) !== null && _submenuLink$dataFenc !== void 0 ? _submenuLink$dataFenc : null
|
|
747
|
-
};
|
|
748
|
-
}),
|
|
884
|
+
dataFences: menuLinks.dataFences ?? null,
|
|
885
|
+
submenu: _mapInstanceProperty(_context2 = menuLinks.submenuLinks).call(_context2, submenuLink => ({
|
|
886
|
+
key: submenuLink.uriPath.replace('/', '-'),
|
|
887
|
+
uriPath: submenuLink.uriPath,
|
|
888
|
+
labelAllLocales: mapLabelAllLocalesWithDefaults(submenuLink.labelAllLocales, submenuLink.defaultLabel),
|
|
889
|
+
permissions: submenuLink.permissions,
|
|
890
|
+
// @ts-ignore: not defined in schema, as it's only used internally.
|
|
891
|
+
featureToggle: submenuLink.featureToggle ?? null,
|
|
892
|
+
// @ts-ignore: not defined in schema, as it's only used internally.
|
|
893
|
+
menuVisibility: submenuLink.menuVisibility ?? null,
|
|
894
|
+
// @ts-ignore: not defined in schema, as it's only used internally.
|
|
895
|
+
actionRights: submenuLink.actionRights ?? null,
|
|
896
|
+
// @ts-ignore: not defined in schema, as it's only used internally.
|
|
897
|
+
dataFences: submenuLink.dataFences ?? null
|
|
898
|
+
})),
|
|
749
899
|
// @ts-ignore: not defined in schema, as it's only used internally.
|
|
750
|
-
shouldRenderDivider:
|
|
900
|
+
shouldRenderDivider: menuLinks.shouldRenderDivider ?? false
|
|
751
901
|
};
|
|
752
902
|
};
|
|
753
903
|
async function command$3(cliFlags, cwd) {
|
|
@@ -827,51 +977,43 @@ async function command$2(cliFlags) {
|
|
|
827
977
|
*/
|
|
828
978
|
const git = {
|
|
829
979
|
// https://git-scm.com/docs/git-merge-base
|
|
830
|
-
base: (baseBranch, headRevision) => {
|
|
831
|
-
var _context;
|
|
832
|
-
return _concatInstanceProperty(_context = "git merge-base ".concat(baseBranch, " ")).call(_context, headRevision);
|
|
833
|
-
},
|
|
980
|
+
base: (baseBranch, headRevision) => `git merge-base ${baseBranch} ${headRevision}`,
|
|
834
981
|
// https://git-scm.com/docs/git-diff
|
|
835
|
-
changedFiles: (mergeRevision, headRevision) => {
|
|
836
|
-
|
|
837
|
-
return _concatInstanceProperty(_context2 = "git diff --name-only ".concat(mergeRevision, " ")).call(_context2, headRevision);
|
|
838
|
-
},
|
|
839
|
-
commitMessage: headRevision => "git log --format=oneline -n 1 ".concat(headRevision)
|
|
982
|
+
changedFiles: (mergeRevision, headRevision) => `git diff --name-only ${mergeRevision} ${headRevision}`,
|
|
983
|
+
commitMessage: headRevision => `git log --format=oneline -n 1 ${headRevision}`
|
|
840
984
|
};
|
|
841
985
|
const helpers = {
|
|
842
986
|
async writeOutDotEnvFile(cliFlags, cwd, matchingTriggers) {
|
|
843
|
-
var
|
|
987
|
+
var _context;
|
|
844
988
|
// If desired read the env file and write out the matching triggers.
|
|
845
989
|
if (!cliFlags.outEnvFile) {
|
|
846
990
|
return;
|
|
847
991
|
}
|
|
848
992
|
const filePath = path$1.join(fs.realpathSync(cwd), cliFlags.outEnvFile);
|
|
849
|
-
const fileContents = _mapInstanceProperty(
|
|
850
|
-
var _context5;
|
|
993
|
+
const fileContents = _mapInstanceProperty(_context = _Object$entries(matchingTriggers)).call(_context, _ref => {
|
|
851
994
|
let _ref2 = _slicedToArray(_ref, 2),
|
|
852
995
|
triggerName = _ref2[0],
|
|
853
996
|
triggerValue = _ref2[1];
|
|
854
|
-
const triggerNameForEnvFile =
|
|
997
|
+
const triggerNameForEnvFile = `${snakeCase(triggerName).toUpperCase()}`;
|
|
855
998
|
|
|
856
999
|
// General pipeline optimization hints are not transformed
|
|
857
1000
|
if (_startsWithInstanceProperty(triggerName).call(triggerName, 'allowPipelineOptimizations')) {
|
|
858
|
-
|
|
859
|
-
return _concatInstanceProperty(_context4 = "".concat(triggerNameForEnvFile, "=")).call(_context4, triggerValue);
|
|
1001
|
+
return `${triggerNameForEnvFile}=${triggerValue}`;
|
|
860
1002
|
}
|
|
861
|
-
return
|
|
1003
|
+
return `DID_${triggerNameForEnvFile}_CHANGE=${triggerValue}`;
|
|
862
1004
|
}).join('\n');
|
|
863
1005
|
await fs.promises.writeFile(filePath, fileContents);
|
|
864
1006
|
if (!cliFlags.silent) {
|
|
865
|
-
console.log(
|
|
1007
|
+
console.log(`📝 Wrote out file to '${filePath}' with contents:`);
|
|
866
1008
|
console.log(fileContents);
|
|
867
1009
|
}
|
|
868
1010
|
},
|
|
869
1011
|
async getChangedFiles(cliFlags) {
|
|
870
|
-
var
|
|
1012
|
+
var _context2, _context3;
|
|
871
1013
|
const baseCmdResult = await command$5(git.base(cliFlags.baseBranch, cliFlags.headRevision));
|
|
872
1014
|
const mergeRevision = baseCmdResult.stdout;
|
|
873
1015
|
const changedFilesCmdResult = await command$5(git.changedFiles(mergeRevision, cliFlags.headRevision));
|
|
874
|
-
const changedFiles = _filterInstanceProperty(
|
|
1016
|
+
const changedFiles = _filterInstanceProperty(_context2 = _mapInstanceProperty(_context3 = changedFilesCmdResult.stdout.split('\n')).call(_context3, filePath => _trimInstanceProperty(filePath).call(filePath))).call(_context2, filePath => filePath.length > 0);
|
|
875
1017
|
return changedFiles;
|
|
876
1018
|
},
|
|
877
1019
|
async matchTriggersAgainstChangedFiles(cliFlags, config, changedFiles) {
|
|
@@ -879,7 +1021,6 @@ const helpers = {
|
|
|
879
1021
|
|
|
880
1022
|
// Evaluate each trigger against each file.
|
|
881
1023
|
_forEachInstanceProperty(config).call(config, async trigger => {
|
|
882
|
-
var _trigger$exclude;
|
|
883
1024
|
const hasTriggerBeenInitialized = typeof matchedTriggers[trigger.name] === 'number';
|
|
884
1025
|
|
|
885
1026
|
// Given the trigger with this name was never evaluated it has to be defaulted to 0.
|
|
@@ -897,10 +1038,10 @@ const helpers = {
|
|
|
897
1038
|
ignore: trigger.ignore
|
|
898
1039
|
});
|
|
899
1040
|
if (!cliFlags.silent && anyFileChangedForTrigger) {
|
|
900
|
-
console.log(
|
|
1041
|
+
console.log(`ℹ️ Files for trigger ${trigger.name} changed.`);
|
|
901
1042
|
}
|
|
902
1043
|
let onlyExcludedFilesChangedForTrigger = false;
|
|
903
|
-
if (
|
|
1044
|
+
if (trigger.exclude?.length > 0) {
|
|
904
1045
|
// NOTE: `micromatch.every` evaluates if every file matches
|
|
905
1046
|
// every pattern.
|
|
906
1047
|
// We need to evaluate if every file matches some pattern.
|
|
@@ -911,7 +1052,7 @@ const helpers = {
|
|
|
911
1052
|
});
|
|
912
1053
|
}
|
|
913
1054
|
if (!cliFlags.silent && onlyExcludedFilesChangedForTrigger) {
|
|
914
|
-
console.log(
|
|
1055
|
+
console.log(`ℹ️ Only excluded files for trigger ${trigger.name} changed.`);
|
|
915
1056
|
}
|
|
916
1057
|
if (onlyExcludedFilesChangedForTrigger) {
|
|
917
1058
|
matchedTriggers[trigger.name] = 0;
|
|
@@ -928,19 +1069,18 @@ async function command$1(cliFlags, config, cwd) {
|
|
|
928
1069
|
const isDevelopmentBranch = cliFlags.branch !== cliFlags.baseBranch;
|
|
929
1070
|
const triggersContainingSharedFiles = _filterInstanceProperty(config).call(config, trigger => trigger.containsSharedFiles);
|
|
930
1071
|
if (!cliFlags.silent) {
|
|
931
|
-
|
|
932
|
-
console.log(
|
|
933
|
-
console.log(
|
|
934
|
-
console.log(_concatInstanceProperty(_context8 = "\uD83D\uDEA7 Comparing '".concat(cliFlags.baseBranch, "' against '")).call(_context8, cliFlags.headRevision, "' to determine changed files."));
|
|
1072
|
+
console.log(`ℹ️ Pipeline optimizations are ${enablePipelineOptimizations ? 'enabled' : 'disabled'}.`);
|
|
1073
|
+
console.log(`ℹ️ Changes have been commited to the ${isDevelopmentBranch ? 'a development' : 'the main'} branch.`);
|
|
1074
|
+
console.log(`🚧 Comparing '${cliFlags.baseBranch}' against '${cliFlags.headRevision}' to determine changed files.`);
|
|
935
1075
|
}
|
|
936
1076
|
|
|
937
1077
|
// Collect and parse changed files from git comparing base and head revision.
|
|
938
1078
|
const changedFiles = await helpers.getChangedFiles(cliFlags);
|
|
939
1079
|
if (!cliFlags.silent) {
|
|
940
1080
|
if (changedFiles.length === 0) {
|
|
941
|
-
console.log(
|
|
1081
|
+
console.log(`ℹ️ No changes found.`);
|
|
942
1082
|
} else {
|
|
943
|
-
console.log(
|
|
1083
|
+
console.log(`ℹ️ ${changedFiles.length} changes found.`);
|
|
944
1084
|
}
|
|
945
1085
|
}
|
|
946
1086
|
|
|
@@ -951,17 +1091,17 @@ async function command$1(cliFlags, config, cwd) {
|
|
|
951
1091
|
const hasCommitMessageTrigger = commitMessage && _includesInstanceProperty(commitMessage).call(commitMessage, '[ci all]');
|
|
952
1092
|
const doesSharedTriggerMatch = _someInstanceProperty(triggersContainingSharedFiles).call(triggersContainingSharedFiles, triggerContainingSharedFiles => matchedTriggers[triggerContainingSharedFiles.name] === 1);
|
|
953
1093
|
if (!cliFlags.silent) {
|
|
954
|
-
console.log(
|
|
1094
|
+
console.log(`ℹ️ The git commit message ${hasCommitMessageTrigger ? 'does' : 'does not'} contain a [ci all] trigger.`);
|
|
955
1095
|
}
|
|
956
1096
|
const doesPackageFolderTriggerMatch = matchedTriggers[cliFlags.triggerName] === 1;
|
|
957
1097
|
if (enablePipelineOptimizations && isDevelopmentBranch && !hasCommitMessageTrigger && !doesSharedTriggerMatch && !doesPackageFolderTriggerMatch) {
|
|
958
1098
|
if (!cliFlags.silent) {
|
|
959
|
-
console.log(
|
|
1099
|
+
console.log(`ℹ️ No relevant changes found for ${cliFlags.triggerName}.`);
|
|
960
1100
|
}
|
|
961
1101
|
matchedTriggers['allowPipelineOptimizationsForTrigger'] = 1;
|
|
962
1102
|
} else {
|
|
963
1103
|
if (!cliFlags.silent) {
|
|
964
|
-
console.log(
|
|
1104
|
+
console.log(`ℹ️ Relevant changes found for ${cliFlags.triggerName}.`);
|
|
965
1105
|
}
|
|
966
1106
|
matchedTriggers['allowPipelineOptimizationsForTrigger'] = 0;
|
|
967
1107
|
}
|
|
@@ -1081,15 +1221,15 @@ function validateMenu(menuJson) {
|
|
|
1081
1221
|
async function command(cliFlags) {
|
|
1082
1222
|
const menuJsonPath = cliFlags.inputFile;
|
|
1083
1223
|
const isAppbarMenu = cliFlags.navigation === 'top';
|
|
1084
|
-
if (!menuJsonPath) throw new Error(
|
|
1085
|
-
if (!doesFileExist(menuJsonPath)) throw new Error(
|
|
1224
|
+
if (!menuJsonPath) throw new Error(`--input-file cannot be empty. please provide the path of compiled menu.json`);
|
|
1225
|
+
if (!doesFileExist(menuJsonPath)) throw new Error(`The menu.json file doesn't exist: ${menuJsonPath}`);
|
|
1086
1226
|
const menuJson = fs$1.readFileSync(menuJsonPath, 'utf-8');
|
|
1087
1227
|
return validateMenu(JSON.parse(menuJson), isAppbarMenu ? appbarMenuSchema : navbarMenuSchema);
|
|
1088
1228
|
}
|
|
1089
1229
|
|
|
1090
1230
|
var pkgJson = {
|
|
1091
1231
|
name: "@commercetools-frontend/application-cli",
|
|
1092
|
-
version: "3.0.
|
|
1232
|
+
version: "3.0.3",
|
|
1093
1233
|
description: "Internal CLI to manage Merchant Center application deployments across various environments.",
|
|
1094
1234
|
keywords: [
|
|
1095
1235
|
"commercetools",
|
|
@@ -1115,26 +1255,26 @@ var pkgJson = {
|
|
|
1115
1255
|
"@babel/core": "^7.22.11",
|
|
1116
1256
|
"@babel/runtime": "^7.21.0",
|
|
1117
1257
|
"@babel/runtime-corejs3": "^7.21.0",
|
|
1118
|
-
"@commercetools-frontend/application-config": "22.
|
|
1119
|
-
"@commercetools-frontend/constants": "22.
|
|
1120
|
-
"@commercetools-frontend/l10n": "22.
|
|
1121
|
-
"@manypkg/find-root": "2.2.
|
|
1258
|
+
"@commercetools-frontend/application-config": "22.30.3",
|
|
1259
|
+
"@commercetools-frontend/constants": "22.30.3",
|
|
1260
|
+
"@commercetools-frontend/l10n": "22.30.3",
|
|
1261
|
+
"@manypkg/find-root": "2.2.2",
|
|
1122
1262
|
cac: "^6.7.14",
|
|
1123
1263
|
cosmiconfig: "9.0.0",
|
|
1124
1264
|
dotenv: "16.4.5",
|
|
1125
1265
|
execa: "5.1.1",
|
|
1126
1266
|
jsonschema: "^1.4.1",
|
|
1127
|
-
listr2: "8.2.
|
|
1267
|
+
listr2: "8.2.4",
|
|
1128
1268
|
lodash: "4.17.21",
|
|
1129
1269
|
micromatch: "4.0.7",
|
|
1130
1270
|
"node-fetch": "2.7.0",
|
|
1131
|
-
"ts-deepmerge": "7.0.
|
|
1271
|
+
"ts-deepmerge": "7.0.1"
|
|
1132
1272
|
},
|
|
1133
1273
|
devDependencies: {
|
|
1134
1274
|
"@tsconfig/node20": "20.1.4",
|
|
1135
1275
|
"@types/lodash": "^4.14.198",
|
|
1136
|
-
"@types/micromatch": "4.0.
|
|
1137
|
-
"@types/node": "20.14.
|
|
1276
|
+
"@types/micromatch": "4.0.9",
|
|
1277
|
+
"@types/node": "20.14.14",
|
|
1138
1278
|
typescript: "5.2.2"
|
|
1139
1279
|
},
|
|
1140
1280
|
engines: {
|
|
@@ -1166,7 +1306,7 @@ const run = async () => {
|
|
|
1166
1306
|
// Default command
|
|
1167
1307
|
cli.command('').usage('\n\n Compile deployments and menus and create versions for MC applications').action(cli.outputHelp);
|
|
1168
1308
|
const usageCompileDeployment = 'Compile the deployments for an application for all environments.';
|
|
1169
|
-
cli.command('compile-deployments', usageCompileDeployment).usage(
|
|
1309
|
+
cli.command('compile-deployments', usageCompileDeployment).usage(`compile-deployments \n\n ${usageCompileDeployment}`).option('--application-name <string>', '(required) The name of the application being compiled for example application-products.').option('--dotenv-folder [string]', '(optional) The path to a folder containing a dotenv file ".env.production" and a cloud-environment specific dotenv file (for example ".env.gcp-production-eu"). Those values are parsed and merged together to be used by the `mc-scripts compile-html` command.').option('--pr-number [string]', '(optional) A pull request number determining a scoped storage bucket for the deployment. Please use it carefully.').option('--mc-url [string]', '(optional) The MC URL of the deployment. This is usually inferred from the env file and overwrites the value. Please use it carefully.').option('--mc-api-url [string]', '(optional) The MC API URL of the deployment. This is usually inferred from the env file and overwrites the value. Please use it carefully.').option('--application-index-out-file [path]', '(optional) The name of the application index file.', {
|
|
1170
1310
|
default: 'application.html'
|
|
1171
1311
|
}).option('--application-index-upload-script-out-file [path]', '(optional) The name of the the application index upload script file.', {
|
|
1172
1312
|
default: 'upload-index.sh'
|
|
@@ -1178,15 +1318,15 @@ const run = async () => {
|
|
|
1178
1318
|
await command$4(options, cwd);
|
|
1179
1319
|
});
|
|
1180
1320
|
const usageCompileMenu = 'Compile the menu links of an application into a `menu.json`. This is only required for internal applications';
|
|
1181
|
-
cli.command('compile-menu', usageCompileMenu).usage(
|
|
1321
|
+
cli.command('compile-menu', usageCompileMenu).usage(`compile-menu \n\n ${usageCompileMenu}`).option('--dotenv-folder [string]', '(optional) The path to a folder containing a dotenv file `.env.production` and a cloud-environment specific dotenv file (for example `.env.gcp-production-eu`). Those values are parsed and merged together to be used by the application config.').action(async options => {
|
|
1182
1322
|
await command$3(options, cwd);
|
|
1183
1323
|
});
|
|
1184
1324
|
const usageValidateMenu = 'Validate compiled `menu.json` file';
|
|
1185
|
-
cli.command('validate-menu', usageValidateMenu).usage(
|
|
1325
|
+
cli.command('validate-menu', usageValidateMenu).usage(`validate-menu \n\n ${usageValidateMenu}`).option('--input-file <path>', '(required) The path to the `menu.json` file to be validated.').option('--navigation [string]', '(optional) Location of the menu navigation. Possible values are `top`.').action(async options => {
|
|
1186
1326
|
await command(options);
|
|
1187
1327
|
});
|
|
1188
1328
|
const usageCreateVersion = 'Output a JSON string about the information in the `version.json` for a deployment, including the updated list of rollbacks.';
|
|
1189
|
-
cli.command('create-version', usageCreateVersion).usage(
|
|
1329
|
+
cli.command('create-version', usageCreateVersion).usage(`create-version \n\n ${usageCreateVersion}`).option('--version-url <url>', "(required) The path of an application's current `version.json` within the storage bucket.").option('--rollbacks [int]', '(optional) The number of max rollbacks to keep', {
|
|
1190
1330
|
default: 15
|
|
1191
1331
|
}).option('--out-file [path]', '(optional) The path to the file where to write the JSON. If not specified, the JSON is printed to stdout.').action(async options => {
|
|
1192
1332
|
await command$2(options);
|
|
@@ -1194,7 +1334,7 @@ const run = async () => {
|
|
|
1194
1334
|
|
|
1195
1335
|
// Command: Evaluate change triggers
|
|
1196
1336
|
const usageEvaluateChangeTriggers = 'Evaluates changed files against a base and evaluates them against defined triggers.';
|
|
1197
|
-
cli.command('evaluate-change-triggers', usageEvaluateChangeTriggers).usage(
|
|
1337
|
+
cli.command('evaluate-change-triggers', usageEvaluateChangeTriggers).usage(`evaluate-change-triggers \n\n ${usageEvaluateChangeTriggers}`).option('--branch <string>', 'The branch of the pull request', {
|
|
1198
1338
|
default: process.env.CIRCLE_BRANCH
|
|
1199
1339
|
}).option('--base-branch <string>', 'The base revision of the git commit compare against (e.g. "main")').option('--head-revision <string>', 'The revision of the git head to compare with', {
|
|
1200
1340
|
default: process.env.CIRCLE_SHA1
|