@commercetools-frontend/application-cli 0.0.0-FEC-212-react19-20250122084835
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +120 -0
- package/bin/cli.js +9 -0
- package/cli/dist/commercetools-frontend-application-cli-cli.cjs.d.ts +2 -0
- package/cli/dist/commercetools-frontend-application-cli-cli.cjs.dev.js +1390 -0
- package/cli/dist/commercetools-frontend-application-cli-cli.cjs.js +7 -0
- package/cli/dist/commercetools-frontend-application-cli-cli.cjs.prod.js +1390 -0
- package/cli/dist/commercetools-frontend-application-cli-cli.esm.js +1354 -0
- package/cli/package.json +4 -0
- package/dist/commercetools-frontend-application-cli.cjs.d.ts +2 -0
- package/dist/commercetools-frontend-application-cli.cjs.dev.js +18 -0
- package/dist/commercetools-frontend-application-cli.cjs.js +7 -0
- package/dist/commercetools-frontend-application-cli.cjs.prod.js +18 -0
- package/dist/commercetools-frontend-application-cli.esm.js +6 -0
- package/dist/declarations/src/cli.d.ts +2 -0
- package/dist/declarations/src/constants.d.ts +32 -0
- package/dist/declarations/src/index.d.ts +3 -0
- package/dist/declarations/src/storage-buckets-config.d.ts +13 -0
- package/dist/declarations/src/types.d.ts +145 -0
- package/dist/storage-buckets-config-30069ab7.cjs.dev.js +163 -0
- package/dist/storage-buckets-config-6c4043df.cjs.prod.js +163 -0
- package/dist/storage-buckets-config-fa564d15.esm.js +149 -0
- package/package.json +63 -0
|
@@ -0,0 +1,1390 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, '__esModule', { value: true });
|
|
4
|
+
|
|
5
|
+
var cac = require('cac');
|
|
6
|
+
var _Object$keys = require('@babel/runtime-corejs3/core-js-stable/object/keys');
|
|
7
|
+
var _Object$getOwnPropertySymbols = require('@babel/runtime-corejs3/core-js-stable/object/get-own-property-symbols');
|
|
8
|
+
var _filterInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/filter');
|
|
9
|
+
var _Object$getOwnPropertyDescriptor = require('@babel/runtime-corejs3/core-js-stable/object/get-own-property-descriptor');
|
|
10
|
+
var _forEachInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/for-each');
|
|
11
|
+
var _Object$getOwnPropertyDescriptors = require('@babel/runtime-corejs3/core-js-stable/object/get-own-property-descriptors');
|
|
12
|
+
var _Object$defineProperties = require('@babel/runtime-corejs3/core-js-stable/object/define-properties');
|
|
13
|
+
var _Object$defineProperty = require('@babel/runtime-corejs3/core-js-stable/object/define-property');
|
|
14
|
+
var _slicedToArray = require('@babel/runtime-corejs3/helpers/slicedToArray');
|
|
15
|
+
var _defineProperty = require('@babel/runtime-corejs3/helpers/defineProperty');
|
|
16
|
+
var _mapInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/map');
|
|
17
|
+
var _Object$entries = require('@babel/runtime-corejs3/core-js-stable/object/entries');
|
|
18
|
+
var _Set = require('@babel/runtime-corejs3/core-js-stable/set');
|
|
19
|
+
var _flatMapInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/flat-map');
|
|
20
|
+
var storageBucketsConfig = require('../../dist/storage-buckets-config-30069ab7.cjs.dev.js');
|
|
21
|
+
var fs = require('node:fs');
|
|
22
|
+
var path$1 = require('node:path');
|
|
23
|
+
var listr2 = require('listr2');
|
|
24
|
+
var execa = require('execa');
|
|
25
|
+
var findRoot = require('@manypkg/find-root');
|
|
26
|
+
var path = require('path');
|
|
27
|
+
var _possibleConstructorReturn = require('@babel/runtime-corejs3/helpers/possibleConstructorReturn');
|
|
28
|
+
var _get = require('@babel/runtime-corejs3/helpers/get');
|
|
29
|
+
var _getPrototypeOf = require('@babel/runtime-corejs3/helpers/getPrototypeOf');
|
|
30
|
+
var _inherits = require('@babel/runtime-corejs3/helpers/inherits');
|
|
31
|
+
var _classCallCheck = require('@babel/runtime-corejs3/helpers/classCallCheck');
|
|
32
|
+
var _createClass = require('@babel/runtime-corejs3/helpers/createClass');
|
|
33
|
+
var _classPrivateFieldLooseBase = require('@babel/runtime-corejs3/helpers/classPrivateFieldLooseBase');
|
|
34
|
+
var _classPrivateFieldLooseKey = require('@babel/runtime-corejs3/helpers/classPrivateFieldLooseKey');
|
|
35
|
+
var _Reflect$construct = require('@babel/runtime-corejs3/core-js-stable/reflect/construct');
|
|
36
|
+
var dotenv = require('dotenv');
|
|
37
|
+
var fs$1 = require('fs');
|
|
38
|
+
var _findInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/find');
|
|
39
|
+
var _JSON$stringify = require('@babel/runtime-corejs3/core-js-stable/json/stringify');
|
|
40
|
+
var applicationConfig = require('@commercetools-frontend/application-config');
|
|
41
|
+
var l10n = require('@commercetools-frontend/l10n');
|
|
42
|
+
var _sliceInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/slice');
|
|
43
|
+
var _startsWithInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/starts-with');
|
|
44
|
+
var _trimInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/trim');
|
|
45
|
+
var _someInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/some');
|
|
46
|
+
var _everyInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/every');
|
|
47
|
+
var _includesInstanceProperty = require('@babel/runtime-corejs3/core-js-stable/instance/includes');
|
|
48
|
+
var micromatch = require('micromatch');
|
|
49
|
+
var snakeCase = require('lodash/snakeCase');
|
|
50
|
+
var jsonschema = require('jsonschema');
|
|
51
|
+
require('cosmiconfig');
|
|
52
|
+
require('ts-deepmerge');
|
|
53
|
+
require('lodash');
|
|
54
|
+
|
|
55
|
+
function _interopDefault (e) { return e && e.__esModule ? e : { 'default': e }; }
|
|
56
|
+
|
|
57
|
+
var _Object$keys__default = /*#__PURE__*/_interopDefault(_Object$keys);
|
|
58
|
+
var _Object$getOwnPropertySymbols__default = /*#__PURE__*/_interopDefault(_Object$getOwnPropertySymbols);
|
|
59
|
+
var _filterInstanceProperty__default = /*#__PURE__*/_interopDefault(_filterInstanceProperty);
|
|
60
|
+
var _Object$getOwnPropertyDescriptor__default = /*#__PURE__*/_interopDefault(_Object$getOwnPropertyDescriptor);
|
|
61
|
+
var _forEachInstanceProperty__default = /*#__PURE__*/_interopDefault(_forEachInstanceProperty);
|
|
62
|
+
var _Object$getOwnPropertyDescriptors__default = /*#__PURE__*/_interopDefault(_Object$getOwnPropertyDescriptors);
|
|
63
|
+
var _Object$defineProperties__default = /*#__PURE__*/_interopDefault(_Object$defineProperties);
|
|
64
|
+
var _Object$defineProperty__default = /*#__PURE__*/_interopDefault(_Object$defineProperty);
|
|
65
|
+
var _mapInstanceProperty__default = /*#__PURE__*/_interopDefault(_mapInstanceProperty);
|
|
66
|
+
var _Object$entries__default = /*#__PURE__*/_interopDefault(_Object$entries);
|
|
67
|
+
var _Set__default = /*#__PURE__*/_interopDefault(_Set);
|
|
68
|
+
var _flatMapInstanceProperty__default = /*#__PURE__*/_interopDefault(_flatMapInstanceProperty);
|
|
69
|
+
var fs__default = /*#__PURE__*/_interopDefault(fs);
|
|
70
|
+
var path__default$1 = /*#__PURE__*/_interopDefault(path$1);
|
|
71
|
+
var execa__default = /*#__PURE__*/_interopDefault(execa);
|
|
72
|
+
var path__default = /*#__PURE__*/_interopDefault(path);
|
|
73
|
+
var _Reflect$construct__default = /*#__PURE__*/_interopDefault(_Reflect$construct);
|
|
74
|
+
var dotenv__default = /*#__PURE__*/_interopDefault(dotenv);
|
|
75
|
+
var fs__default$1 = /*#__PURE__*/_interopDefault(fs$1);
|
|
76
|
+
var _findInstanceProperty__default = /*#__PURE__*/_interopDefault(_findInstanceProperty);
|
|
77
|
+
var _JSON$stringify__default = /*#__PURE__*/_interopDefault(_JSON$stringify);
|
|
78
|
+
var _sliceInstanceProperty__default = /*#__PURE__*/_interopDefault(_sliceInstanceProperty);
|
|
79
|
+
var _startsWithInstanceProperty__default = /*#__PURE__*/_interopDefault(_startsWithInstanceProperty);
|
|
80
|
+
var _trimInstanceProperty__default = /*#__PURE__*/_interopDefault(_trimInstanceProperty);
|
|
81
|
+
var _someInstanceProperty__default = /*#__PURE__*/_interopDefault(_someInstanceProperty);
|
|
82
|
+
var _everyInstanceProperty__default = /*#__PURE__*/_interopDefault(_everyInstanceProperty);
|
|
83
|
+
var _includesInstanceProperty__default = /*#__PURE__*/_interopDefault(_includesInstanceProperty);
|
|
84
|
+
var micromatch__default = /*#__PURE__*/_interopDefault(micromatch);
|
|
85
|
+
var snakeCase__default = /*#__PURE__*/_interopDefault(snakeCase);
|
|
86
|
+
|
|
87
|
+
function getApplicationDirectory(cwd) {
|
|
88
|
+
return fs__default["default"].realpathSync(cwd);
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
function resolveInApplication(relativePath, cwd) {
|
|
92
|
+
return path__default["default"].resolve(getApplicationDirectory(cwd), relativePath);
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
function isCI() {
|
|
96
|
+
// @ts-expect-error The env is sometimes overwritten by code to a boolean
|
|
97
|
+
return process.env.CI === true || process.env.CI === 'true';
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
function ownKeys$4(e, r) { var t = _Object$keys__default["default"](e); if (_Object$getOwnPropertySymbols__default["default"]) { var o = _Object$getOwnPropertySymbols__default["default"](e); r && (o = _filterInstanceProperty__default["default"](o).call(o, function (r) { return _Object$getOwnPropertyDescriptor__default["default"](e, r).enumerable; })), t.push.apply(t, o); } return t; }
|
|
101
|
+
function _objectSpread$4(e) { for (var r = 1; r < arguments.length; r++) { var _context3, _context4; var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? _forEachInstanceProperty__default["default"](_context3 = ownKeys$4(Object(t), !0)).call(_context3, function (r) { _defineProperty(e, r, t[r]); }) : _Object$getOwnPropertyDescriptors__default["default"] ? _Object$defineProperties__default["default"](e, _Object$getOwnPropertyDescriptors__default["default"](t)) : _forEachInstanceProperty__default["default"](_context4 = ownKeys$4(Object(t))).call(_context4, function (r) { _Object$defineProperty__default["default"](e, r, _Object$getOwnPropertyDescriptor__default["default"](t, r)); }); } return e; }
|
|
102
|
+
function _callSuper(t, o, e) { return o = _getPrototypeOf(o), _possibleConstructorReturn(t, _isNativeReflectConstruct() ? _Reflect$construct__default["default"](o, e || [], _getPrototypeOf(t).constructor) : o.apply(t, e)); }
|
|
103
|
+
function _isNativeReflectConstruct() { try { var t = !Boolean.prototype.valueOf.call(_Reflect$construct__default["default"](Boolean, [], function () {})); } catch (t) {} return (_isNativeReflectConstruct = function () { return !!t; })(); }
|
|
104
|
+
var _bucketRegion$1 = /*#__PURE__*/_classPrivateFieldLooseKey("bucketRegion");
|
|
105
|
+
var _bucketEnvironment$1 = /*#__PURE__*/_classPrivateFieldLooseKey("bucketEnvironment");
|
|
106
|
+
let StorageProvider = /*#__PURE__*/function () {
|
|
107
|
+
function StorageProvider(config) {
|
|
108
|
+
_classCallCheck(this, StorageProvider);
|
|
109
|
+
_Object$defineProperty__default["default"](this, _bucketRegion$1, {
|
|
110
|
+
writable: true,
|
|
111
|
+
value: void 0
|
|
112
|
+
});
|
|
113
|
+
_Object$defineProperty__default["default"](this, _bucketEnvironment$1, {
|
|
114
|
+
writable: true,
|
|
115
|
+
value: void 0
|
|
116
|
+
});
|
|
117
|
+
_classPrivateFieldLooseBase(this, _bucketRegion$1)[_bucketRegion$1] = config.bucketRegion;
|
|
118
|
+
_classPrivateFieldLooseBase(this, _bucketEnvironment$1)[_bucketEnvironment$1] = config.bucketEnvironment;
|
|
119
|
+
}
|
|
120
|
+
/**
|
|
121
|
+
* Construct the storage bucket URL for the specific application and cloud environment.
|
|
122
|
+
*
|
|
123
|
+
* 1. Static assets are uploaded to `:bucketRegion/:prNumber?/:applicationName`
|
|
124
|
+
* 2. The application index is uploaded to `:bucketRegion/:prNumber?/:applicationName/:cloudEnvironment`
|
|
125
|
+
*
|
|
126
|
+
* This allows all cloud environments sharing the same static assets while each application's index
|
|
127
|
+
* is uploaded with different headers (e.g. CSP rules).
|
|
128
|
+
*/
|
|
129
|
+
return _createClass(StorageProvider, [{
|
|
130
|
+
key: "getBucketNamespace",
|
|
131
|
+
value: function getBucketNamespace(prNumber) {
|
|
132
|
+
if (!prNumber) return;
|
|
133
|
+
if (prNumber === 'merchant-center-preview') return prNumber;
|
|
134
|
+
return `mc-${prNumber}`;
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
/**
|
|
138
|
+
* Construct the storage bucket URL for the specific application and cloud environment.
|
|
139
|
+
*/
|
|
140
|
+
}, {
|
|
141
|
+
key: "getAssetsBucketUrl",
|
|
142
|
+
value: function getAssetsBucketUrl(_ref) {
|
|
143
|
+
var _context;
|
|
144
|
+
let applicationName = _ref.applicationName,
|
|
145
|
+
bucketProtocol = _ref.bucketProtocol,
|
|
146
|
+
bucketNamespace = _ref.bucketNamespace,
|
|
147
|
+
tag = _ref.tag;
|
|
148
|
+
if (!_classPrivateFieldLooseBase(this, _bucketRegion$1)[_bucketRegion$1]) {
|
|
149
|
+
throw new Error("'bucketRegion' is not defined. Required to determine 'assetsBucketUrl'.");
|
|
150
|
+
}
|
|
151
|
+
const storageProvider = storageBucketsConfig.storageProviders[tag];
|
|
152
|
+
const assetBuketUrl = storageProvider.urls.bucket?.[_classPrivateFieldLooseBase(this, _bucketRegion$1)[_bucketRegion$1]] ?? _classPrivateFieldLooseBase(this, _bucketRegion$1)[_bucketRegion$1];
|
|
153
|
+
const assetsBucketUrl = _filterInstanceProperty__default["default"](_context = [assetBuketUrl, bucketNamespace, applicationName]).call(_context, Boolean).join('/');
|
|
154
|
+
return `${bucketProtocol}${assetsBucketUrl}`;
|
|
155
|
+
}
|
|
156
|
+
}, {
|
|
157
|
+
key: "getApplicationIndexBucketUrl",
|
|
158
|
+
value: function getApplicationIndexBucketUrl(_ref2) {
|
|
159
|
+
let tag = _ref2.tag,
|
|
160
|
+
prNumber = _ref2.prNumber,
|
|
161
|
+
applicationName = _ref2.applicationName,
|
|
162
|
+
bucketProtocol = _ref2.bucketProtocol,
|
|
163
|
+
bucketNamespace = _ref2.bucketNamespace;
|
|
164
|
+
const applicationAssetsBucketUrl = this.getAssetsBucketUrl({
|
|
165
|
+
tag,
|
|
166
|
+
applicationName,
|
|
167
|
+
prNumber,
|
|
168
|
+
bucketProtocol,
|
|
169
|
+
bucketNamespace
|
|
170
|
+
});
|
|
171
|
+
const applicationIndexBucketUrl = `${applicationAssetsBucketUrl}/${_classPrivateFieldLooseBase(this, _bucketEnvironment$1)[_bucketEnvironment$1]}`;
|
|
172
|
+
return applicationIndexBucketUrl;
|
|
173
|
+
}
|
|
174
|
+
}, {
|
|
175
|
+
key: "getCdnUrl",
|
|
176
|
+
value: function getCdnUrl(_ref3) {
|
|
177
|
+
var _context2;
|
|
178
|
+
let applicationName = _ref3.applicationName,
|
|
179
|
+
prNumber = _ref3.prNumber,
|
|
180
|
+
publicBaseUrl = _ref3.publicBaseUrl,
|
|
181
|
+
excludeBucketRegion = _ref3.excludeBucketRegion;
|
|
182
|
+
return _filterInstanceProperty__default["default"](_context2 = [publicBaseUrl, excludeBucketRegion ? null : _classPrivateFieldLooseBase(this, _bucketRegion$1)[_bucketRegion$1], this.getBucketNamespace(prNumber), applicationName]).call(_context2, Boolean).join('/');
|
|
183
|
+
}
|
|
184
|
+
}, {
|
|
185
|
+
key: "getPublicBaseUrl",
|
|
186
|
+
value: function getPublicBaseUrl(tag) {
|
|
187
|
+
if (!_classPrivateFieldLooseBase(this, _bucketEnvironment$1)[_bucketEnvironment$1]) {
|
|
188
|
+
throw new Error("'bucketEnvironment' is not defined. Required to determine 'publicBaseUrl'.");
|
|
189
|
+
}
|
|
190
|
+
const storageProvider = storageBucketsConfig.storageProviders[tag];
|
|
191
|
+
const publicBaseUrl = storageProvider.urls.public[_classPrivateFieldLooseBase(this, _bucketEnvironment$1)[_bucketEnvironment$1]] ?? storageProvider.urls.public.default;
|
|
192
|
+
if (!publicBaseUrl) {
|
|
193
|
+
throw new Error(`'publicBaseUrl' is not defined for '${tag}' storage provider for ${_classPrivateFieldLooseBase(this, _bucketEnvironment$1)[_bucketEnvironment$1]} or as default.`);
|
|
194
|
+
}
|
|
195
|
+
return publicBaseUrl;
|
|
196
|
+
}
|
|
197
|
+
}]);
|
|
198
|
+
}();
|
|
199
|
+
var _bucketRegion2$1 = /*#__PURE__*/_classPrivateFieldLooseKey("bucketRegion");
|
|
200
|
+
var _bucketEnvironment2$1 = /*#__PURE__*/_classPrivateFieldLooseKey("bucketEnvironment");
|
|
201
|
+
let GoogleStorageProvider = /*#__PURE__*/function (_StorageProvider2) {
|
|
202
|
+
function GoogleStorageProvider(config) {
|
|
203
|
+
var _this;
|
|
204
|
+
_classCallCheck(this, GoogleStorageProvider);
|
|
205
|
+
_this = _callSuper(this, GoogleStorageProvider, [config]);
|
|
206
|
+
_Object$defineProperty__default["default"](_this, _bucketRegion2$1, {
|
|
207
|
+
writable: true,
|
|
208
|
+
value: void 0
|
|
209
|
+
});
|
|
210
|
+
_Object$defineProperty__default["default"](_this, _bucketEnvironment2$1, {
|
|
211
|
+
writable: true,
|
|
212
|
+
value: void 0
|
|
213
|
+
});
|
|
214
|
+
_classPrivateFieldLooseBase(_this, _bucketRegion2$1)[_bucketRegion2$1] = config.bucketRegion;
|
|
215
|
+
_classPrivateFieldLooseBase(_this, _bucketEnvironment2$1)[_bucketEnvironment2$1] = config.bucketEnvironment;
|
|
216
|
+
return _this;
|
|
217
|
+
}
|
|
218
|
+
_inherits(GoogleStorageProvider, _StorageProvider2);
|
|
219
|
+
return _createClass(GoogleStorageProvider, [{
|
|
220
|
+
key: "getTag",
|
|
221
|
+
value: function getTag() {
|
|
222
|
+
return 'gs';
|
|
223
|
+
}
|
|
224
|
+
}, {
|
|
225
|
+
key: "getBucketRegion",
|
|
226
|
+
value: function getBucketRegion() {
|
|
227
|
+
return _classPrivateFieldLooseBase(this, _bucketRegion2$1)[_bucketRegion2$1];
|
|
228
|
+
}
|
|
229
|
+
}, {
|
|
230
|
+
key: "getBucketEnvironment",
|
|
231
|
+
value: function getBucketEnvironment() {
|
|
232
|
+
return _classPrivateFieldLooseBase(this, _bucketEnvironment2$1)[_bucketEnvironment2$1];
|
|
233
|
+
}
|
|
234
|
+
}, {
|
|
235
|
+
key: "getProtocol",
|
|
236
|
+
value: function getProtocol() {
|
|
237
|
+
return 'gs://';
|
|
238
|
+
}
|
|
239
|
+
}, {
|
|
240
|
+
key: "getPublicBaseUrl",
|
|
241
|
+
value: function getPublicBaseUrl() {
|
|
242
|
+
return _get(_getPrototypeOf(GoogleStorageProvider.prototype), "getPublicBaseUrl", this).call(this, this.getTag());
|
|
243
|
+
}
|
|
244
|
+
}, {
|
|
245
|
+
key: "getCdnUrl",
|
|
246
|
+
value: function getCdnUrl(config) {
|
|
247
|
+
return _get(_getPrototypeOf(GoogleStorageProvider.prototype), "getCdnUrl", this).call(this, _objectSpread$4({
|
|
248
|
+
publicBaseUrl: this.getPublicBaseUrl()
|
|
249
|
+
}, config));
|
|
250
|
+
}
|
|
251
|
+
}, {
|
|
252
|
+
key: "getAssetsBucketUrl",
|
|
253
|
+
value: function getAssetsBucketUrl(config) {
|
|
254
|
+
return _get(_getPrototypeOf(GoogleStorageProvider.prototype), "getAssetsBucketUrl", this).call(this, _objectSpread$4({
|
|
255
|
+
tag: this.getTag(),
|
|
256
|
+
bucketProtocol: this.getProtocol(),
|
|
257
|
+
bucketNamespace: _get(_getPrototypeOf(GoogleStorageProvider.prototype), "getBucketNamespace", this).call(this, config.prNumber)
|
|
258
|
+
}, config));
|
|
259
|
+
}
|
|
260
|
+
}, {
|
|
261
|
+
key: "getApplicationIndexBucketUrl",
|
|
262
|
+
value: function getApplicationIndexBucketUrl(config) {
|
|
263
|
+
return _get(_getPrototypeOf(GoogleStorageProvider.prototype), "getApplicationIndexBucketUrl", this).call(this, _objectSpread$4({
|
|
264
|
+
tag: this.getTag(),
|
|
265
|
+
bucketProtocol: this.getProtocol(),
|
|
266
|
+
bucketNamespace: _get(_getPrototypeOf(GoogleStorageProvider.prototype), "getBucketNamespace", this).call(this, config.prNumber)
|
|
267
|
+
}, config));
|
|
268
|
+
}
|
|
269
|
+
}]);
|
|
270
|
+
}(StorageProvider);
|
|
271
|
+
var _bucketRegion3 = /*#__PURE__*/_classPrivateFieldLooseKey("bucketRegion");
|
|
272
|
+
var _bucketEnvironment3 = /*#__PURE__*/_classPrivateFieldLooseKey("bucketEnvironment");
|
|
273
|
+
let AwsStorageProvider = /*#__PURE__*/function (_StorageProvider3) {
|
|
274
|
+
function AwsStorageProvider(config) {
|
|
275
|
+
var _this2;
|
|
276
|
+
_classCallCheck(this, AwsStorageProvider);
|
|
277
|
+
_this2 = _callSuper(this, AwsStorageProvider, [config]);
|
|
278
|
+
_Object$defineProperty__default["default"](_this2, _bucketRegion3, {
|
|
279
|
+
writable: true,
|
|
280
|
+
value: void 0
|
|
281
|
+
});
|
|
282
|
+
_Object$defineProperty__default["default"](_this2, _bucketEnvironment3, {
|
|
283
|
+
writable: true,
|
|
284
|
+
value: void 0
|
|
285
|
+
});
|
|
286
|
+
_classPrivateFieldLooseBase(_this2, _bucketRegion3)[_bucketRegion3] = config.bucketRegion;
|
|
287
|
+
_classPrivateFieldLooseBase(_this2, _bucketEnvironment3)[_bucketEnvironment3] = config.bucketEnvironment;
|
|
288
|
+
return _this2;
|
|
289
|
+
}
|
|
290
|
+
_inherits(AwsStorageProvider, _StorageProvider3);
|
|
291
|
+
return _createClass(AwsStorageProvider, [{
|
|
292
|
+
key: "getTag",
|
|
293
|
+
value: function getTag() {
|
|
294
|
+
return 's3';
|
|
295
|
+
}
|
|
296
|
+
}, {
|
|
297
|
+
key: "getBucketRegion",
|
|
298
|
+
value: function getBucketRegion() {
|
|
299
|
+
return _classPrivateFieldLooseBase(this, _bucketRegion3)[_bucketRegion3];
|
|
300
|
+
}
|
|
301
|
+
}, {
|
|
302
|
+
key: "getBucketEnvironment",
|
|
303
|
+
value: function getBucketEnvironment() {
|
|
304
|
+
return _classPrivateFieldLooseBase(this, _bucketEnvironment3)[_bucketEnvironment3];
|
|
305
|
+
}
|
|
306
|
+
}, {
|
|
307
|
+
key: "getProtocol",
|
|
308
|
+
value: function getProtocol() {
|
|
309
|
+
return 's3://';
|
|
310
|
+
}
|
|
311
|
+
}, {
|
|
312
|
+
key: "getPublicBaseUrl",
|
|
313
|
+
value: function getPublicBaseUrl() {
|
|
314
|
+
return _get(_getPrototypeOf(AwsStorageProvider.prototype), "getPublicBaseUrl", this).call(this, this.getTag());
|
|
315
|
+
}
|
|
316
|
+
}, {
|
|
317
|
+
key: "getCdnUrl",
|
|
318
|
+
value: function getCdnUrl(config) {
|
|
319
|
+
return _get(_getPrototypeOf(AwsStorageProvider.prototype), "getCdnUrl", this).call(this, _objectSpread$4({
|
|
320
|
+
publicBaseUrl: this.getPublicBaseUrl(),
|
|
321
|
+
excludeBucketRegion: true
|
|
322
|
+
}, config));
|
|
323
|
+
}
|
|
324
|
+
}, {
|
|
325
|
+
key: "getAssetsBucketUrl",
|
|
326
|
+
value: function getAssetsBucketUrl(config) {
|
|
327
|
+
return _get(_getPrototypeOf(AwsStorageProvider.prototype), "getAssetsBucketUrl", this).call(this, _objectSpread$4({
|
|
328
|
+
tag: this.getTag(),
|
|
329
|
+
bucketProtocol: this.getProtocol(),
|
|
330
|
+
bucketNamespace: _get(_getPrototypeOf(AwsStorageProvider.prototype), "getBucketNamespace", this).call(this, config.prNumber)
|
|
331
|
+
}, config));
|
|
332
|
+
}
|
|
333
|
+
}, {
|
|
334
|
+
key: "getApplicationIndexBucketUrl",
|
|
335
|
+
value: function getApplicationIndexBucketUrl(config) {
|
|
336
|
+
return _get(_getPrototypeOf(AwsStorageProvider.prototype), "getApplicationIndexBucketUrl", this).call(this, _objectSpread$4({
|
|
337
|
+
tag: this.getTag(),
|
|
338
|
+
bucketProtocol: this.getProtocol(),
|
|
339
|
+
bucketNamespace: _get(_getPrototypeOf(AwsStorageProvider.prototype), "getBucketNamespace", this).call(this, config.prNumber)
|
|
340
|
+
}, config));
|
|
341
|
+
}
|
|
342
|
+
}]);
|
|
343
|
+
}(StorageProvider);
|
|
344
|
+
function getStorageProvider(storageProvider, config) {
|
|
345
|
+
switch (storageProvider) {
|
|
346
|
+
case 'gs':
|
|
347
|
+
return new GoogleStorageProvider(config);
|
|
348
|
+
case 's3':
|
|
349
|
+
return new AwsStorageProvider(config);
|
|
350
|
+
default:
|
|
351
|
+
throw new Error(`Storage provider ${storageProvider} not supported`);
|
|
352
|
+
}
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
var _bucketEnvironment = /*#__PURE__*/_classPrivateFieldLooseKey("bucketEnvironment");
|
|
356
|
+
var _bucketRegion = /*#__PURE__*/_classPrivateFieldLooseKey("bucketRegion");
|
|
357
|
+
let GoogleStorageUploadScriptsGenerator = /*#__PURE__*/function () {
|
|
358
|
+
function GoogleStorageUploadScriptsGenerator(config) {
|
|
359
|
+
_classCallCheck(this, GoogleStorageUploadScriptsGenerator);
|
|
360
|
+
_Object$defineProperty__default["default"](this, _bucketEnvironment, {
|
|
361
|
+
writable: true,
|
|
362
|
+
value: void 0
|
|
363
|
+
});
|
|
364
|
+
_Object$defineProperty__default["default"](this, _bucketRegion, {
|
|
365
|
+
writable: true,
|
|
366
|
+
value: void 0
|
|
367
|
+
});
|
|
368
|
+
_classPrivateFieldLooseBase(this, _bucketRegion)[_bucketRegion] = config.bucketRegion;
|
|
369
|
+
_classPrivateFieldLooseBase(this, _bucketEnvironment)[_bucketEnvironment] = config.bucketEnvironment;
|
|
370
|
+
}
|
|
371
|
+
return _createClass(GoogleStorageUploadScriptsGenerator, [{
|
|
372
|
+
key: "getApplicationIndexUploadScript",
|
|
373
|
+
value: function getApplicationIndexUploadScript(_ref) {
|
|
374
|
+
let packageManagerName = _ref.packageManagerName,
|
|
375
|
+
bucketUrl = _ref.bucketUrl,
|
|
376
|
+
cdnUrl = _ref.cdnUrl,
|
|
377
|
+
buildRevision = _ref.buildRevision,
|
|
378
|
+
buildNumber = _ref.buildNumber,
|
|
379
|
+
applicationIndexOutFile = _ref.applicationIndexOutFile;
|
|
380
|
+
if (!_classPrivateFieldLooseBase(this, _bucketEnvironment)[_bucketEnvironment]) {
|
|
381
|
+
throw new Error("Missing 'bucketEnvironment' when generating application index.");
|
|
382
|
+
}
|
|
383
|
+
return `
|
|
384
|
+
#!/usr/bin/env bash
|
|
385
|
+
|
|
386
|
+
set -e
|
|
387
|
+
|
|
388
|
+
echo "Uploading compiled ${applicationIndexOutFile} to Google Storage bucket ${bucketUrl}"
|
|
389
|
+
|
|
390
|
+
gcloud storage cp \\
|
|
391
|
+
"$(dirname "$0")/${applicationIndexOutFile}" \\
|
|
392
|
+
"${bucketUrl}/" \\
|
|
393
|
+
-z html \\
|
|
394
|
+
--content-type="text/html" \\
|
|
395
|
+
--cache-control="public,max-age=0,no-transform"
|
|
396
|
+
|
|
397
|
+
echo "Creating version.json and uploading it to bucket ${bucketUrl}"
|
|
398
|
+
|
|
399
|
+
NODE_ENV=production ${packageManagerName} application-cli create-version \\
|
|
400
|
+
--version-url=${cdnUrl}/${_classPrivateFieldLooseBase(this, _bucketEnvironment)[_bucketEnvironment]}/version.json \\
|
|
401
|
+
--build-revision=${buildRevision} \\
|
|
402
|
+
--build-number=${buildNumber} \\
|
|
403
|
+
--out-file=$(dirname "$0")/version.json
|
|
404
|
+
|
|
405
|
+
gcloud storage cp \\
|
|
406
|
+
"$(dirname "$0")/version.json" \\
|
|
407
|
+
"${bucketUrl}/" \\
|
|
408
|
+
-z json \\
|
|
409
|
+
--content-type="application/json" \\
|
|
410
|
+
--cache-control="public,max-age=0,no-transform"
|
|
411
|
+
`;
|
|
412
|
+
}
|
|
413
|
+
}, {
|
|
414
|
+
key: "getProductionBundlesUploadScript",
|
|
415
|
+
value: function getProductionBundlesUploadScript(_ref2) {
|
|
416
|
+
let bucketUrl = _ref2.bucketUrl,
|
|
417
|
+
assetsPath = _ref2.assetsPath,
|
|
418
|
+
skipMenu = _ref2.skipMenu;
|
|
419
|
+
return `
|
|
420
|
+
#!/usr/bin/env bash
|
|
421
|
+
|
|
422
|
+
set -e
|
|
423
|
+
|
|
424
|
+
# NOTES:
|
|
425
|
+
# https://cloud.google.com/sdk/gcloud/reference/storage/cp
|
|
426
|
+
# 1. The '-z' option triggers compressing the assets before
|
|
427
|
+
# uploading them and sets the 'Content-Encoding' to 'gzip'.
|
|
428
|
+
# 2. The 'Accept-encoding: gzip' is set automatically by the 'gcloud storage'.
|
|
429
|
+
# 3. The 'max-age' is set to 1 year which is considered the maximum
|
|
430
|
+
# "valid" lifetime of an asset to be cached.
|
|
431
|
+
# 4. The '-n' will skip uploading existing files and prevents them to
|
|
432
|
+
# be overwritten
|
|
433
|
+
echo "Uploading static assets to Google Storage bucket ${bucketUrl}"
|
|
434
|
+
|
|
435
|
+
gcloud storage cp \\
|
|
436
|
+
${assetsPath}/public/{*.css,*.js,*.js.map,*.html} \\
|
|
437
|
+
"${bucketUrl}" \\
|
|
438
|
+
-n \\
|
|
439
|
+
-z js,css \\
|
|
440
|
+
--cache-control="public,max-age=31536000,no-transform"
|
|
441
|
+
|
|
442
|
+
# We need to upload the PNG and HTML files separately because we want them
|
|
443
|
+
# to be able to overwrite the existing files (if any). For instance, the
|
|
444
|
+
# file or the favicons.
|
|
445
|
+
# This is controlled with the '-n' option (which is used for the JS and CSS
|
|
446
|
+
# as we don't want to overwrite them)
|
|
447
|
+
gcloud storage cp \\
|
|
448
|
+
${assetsPath}/public/{*.png,robots.txt} \\
|
|
449
|
+
"${bucketUrl}" \\
|
|
450
|
+
-z txt \\
|
|
451
|
+
--cache-control="public,max-age=31536000,no-transform"
|
|
452
|
+
|
|
453
|
+
if ${skipMenu}; then
|
|
454
|
+
echo "Skipping menu.json upload"
|
|
455
|
+
else
|
|
456
|
+
echo "Uploading menu.json to bucket ${bucketUrl}"
|
|
457
|
+
# NOTE: somehow the 'cache-control:private' doesn't work.
|
|
458
|
+
# I mean, the file is uploaded with the correct metadata but when I fetch
|
|
459
|
+
# the file the response contains the header
|
|
460
|
+
# 'cache-control: public,max-age=31536000,no-transform', even though the
|
|
461
|
+
# documentation clearly states that by marking the header as 'private' will
|
|
462
|
+
# disable the cache (for publicly readable objects).
|
|
463
|
+
# https://cloud.google.com/storage/docs/gsutil/addlhelp/WorkingWithObjectMetadata#cache-control
|
|
464
|
+
# However, I found out that, by requesting the file with any RANDOM
|
|
465
|
+
# query parameter, will instruct the storage to return a 'fresh' object
|
|
466
|
+
# (without any cache control).
|
|
467
|
+
# Unofficial source: https://stackoverflow.com/a/49052895
|
|
468
|
+
# This seems to be the 'easiest' option to 'disable' the cache for public
|
|
469
|
+
# objects. Other alternative approaces are:
|
|
470
|
+
# * make the object private with some simple ACL (private objects are not cached)
|
|
471
|
+
# * suffix the file name with e.g. the git SHA, so we have different files
|
|
472
|
+
# for each upload ('index.html.template-\${CIRCLE_SHA1}'). The server knows
|
|
473
|
+
# the git SHA on runtime and can get the correct file when it starts.
|
|
474
|
+
# * find out why the 'private' cache control does not work
|
|
475
|
+
gcloud storage cp \\
|
|
476
|
+
${assetsPath}/menu.json \\
|
|
477
|
+
${bucketUrl} \\
|
|
478
|
+
-z json \\
|
|
479
|
+
--content-type="application/json" \\
|
|
480
|
+
--cache-control="public,max-age=0,no-transform"
|
|
481
|
+
fi
|
|
482
|
+
`;
|
|
483
|
+
}
|
|
484
|
+
}]);
|
|
485
|
+
}();
|
|
486
|
+
var _bucketEnvironment2 = /*#__PURE__*/_classPrivateFieldLooseKey("bucketEnvironment");
|
|
487
|
+
var _bucketRegion2 = /*#__PURE__*/_classPrivateFieldLooseKey("bucketRegion");
|
|
488
|
+
let AwsStorageUploadScriptsGenerator = /*#__PURE__*/function () {
|
|
489
|
+
function AwsStorageUploadScriptsGenerator(config) {
|
|
490
|
+
_classCallCheck(this, AwsStorageUploadScriptsGenerator);
|
|
491
|
+
_Object$defineProperty__default["default"](this, _bucketEnvironment2, {
|
|
492
|
+
writable: true,
|
|
493
|
+
value: void 0
|
|
494
|
+
});
|
|
495
|
+
_Object$defineProperty__default["default"](this, _bucketRegion2, {
|
|
496
|
+
writable: true,
|
|
497
|
+
value: void 0
|
|
498
|
+
});
|
|
499
|
+
_classPrivateFieldLooseBase(this, _bucketRegion2)[_bucketRegion2] = config.bucketRegion;
|
|
500
|
+
_classPrivateFieldLooseBase(this, _bucketEnvironment2)[_bucketEnvironment2] = config.bucketEnvironment;
|
|
501
|
+
}
|
|
502
|
+
return _createClass(AwsStorageUploadScriptsGenerator, [{
|
|
503
|
+
key: "getApplicationIndexUploadScript",
|
|
504
|
+
value: function getApplicationIndexUploadScript(_ref3) {
|
|
505
|
+
let packageManagerName = _ref3.packageManagerName,
|
|
506
|
+
bucketUrl = _ref3.bucketUrl,
|
|
507
|
+
cdnUrl = _ref3.cdnUrl,
|
|
508
|
+
buildRevision = _ref3.buildRevision,
|
|
509
|
+
buildNumber = _ref3.buildNumber,
|
|
510
|
+
applicationIndexOutFile = _ref3.applicationIndexOutFile;
|
|
511
|
+
return `
|
|
512
|
+
#!/usr/bin/env bash
|
|
513
|
+
|
|
514
|
+
echo "Uploading static assets to Amazon S3 bucket ${bucketUrl}"
|
|
515
|
+
|
|
516
|
+
set -e
|
|
517
|
+
|
|
518
|
+
aws s3 cp "$(dirname "$0")/${applicationIndexOutFile}" \\
|
|
519
|
+
"${bucketUrl}/" \\
|
|
520
|
+
--content-type="text/html" \\
|
|
521
|
+
--cache-control="public,max-age=0,no-transform" \\
|
|
522
|
+
--profile ${_classPrivateFieldLooseBase(this, _bucketRegion2)[_bucketRegion2]}
|
|
523
|
+
|
|
524
|
+
echo "Creating version.json and uploading it to bucket ${bucketUrl}"
|
|
525
|
+
|
|
526
|
+
NODE_ENV=production ${packageManagerName} application-cli create-version \\
|
|
527
|
+
--version-url=${cdnUrl}/${_classPrivateFieldLooseBase(this, _bucketEnvironment2)[_bucketEnvironment2]}/version.json \\
|
|
528
|
+
--build-revision=${buildRevision} \\
|
|
529
|
+
--build-number=${buildNumber} \\
|
|
530
|
+
--out-file=$(dirname "$0")/version.json
|
|
531
|
+
|
|
532
|
+
aws s3 cp "$(dirname "$0")/version.json" \\
|
|
533
|
+
"${bucketUrl}/" \\
|
|
534
|
+
--content-type="application/json" \\
|
|
535
|
+
--cache-control="public,max-age=0,no-transform" \\
|
|
536
|
+
--profile ${_classPrivateFieldLooseBase(this, _bucketRegion2)[_bucketRegion2]}
|
|
537
|
+
`;
|
|
538
|
+
}
|
|
539
|
+
}, {
|
|
540
|
+
key: "getProductionBundlesUploadScript",
|
|
541
|
+
value: function getProductionBundlesUploadScript(_ref4) {
|
|
542
|
+
let bucketUrl = _ref4.bucketUrl,
|
|
543
|
+
assetsPath = _ref4.assetsPath,
|
|
544
|
+
skipMenu = _ref4.skipMenu;
|
|
545
|
+
return `
|
|
546
|
+
#!/usr/bin/env bash
|
|
547
|
+
|
|
548
|
+
echo "Uploading static assets to Amazon S3 bucket ${bucketUrl}"
|
|
549
|
+
|
|
550
|
+
set -e
|
|
551
|
+
|
|
552
|
+
# NOTE:
|
|
553
|
+
# The sync command on the AWS CLI is different to the -n option on the gcloud CLI.
|
|
554
|
+
# Sync will only upload files that are not already in the bucket, but it will skip existing ones
|
|
555
|
+
# that have been changed locally.
|
|
556
|
+
# The -n option on the gcloud CLI will skip uploading existing files and prevents them to be overwritten.
|
|
557
|
+
# https://docs.aws.amazon.com/cli/latest/reference/s3/sync.html
|
|
558
|
+
# https://cloud.google.com/sdk/gcloud/reference/storage/cp
|
|
559
|
+
#
|
|
560
|
+
# Compression (gzip) is enabled on CloudFront by default. Hence compression does happing while uploading.
|
|
561
|
+
# https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/ServingCompressedFiles.html#compressed-content-cloudfront-configuring
|
|
562
|
+
aws s3 sync ${assetsPath}/public \\
|
|
563
|
+
"${bucketUrl}" \\
|
|
564
|
+
--exclude "*" \\
|
|
565
|
+
--include "*.css" \\
|
|
566
|
+
--include "*.js" \\
|
|
567
|
+
--include "*.js.map" \\
|
|
568
|
+
--include "*.html" \\
|
|
569
|
+
--cache-control="public,max-age=31536000,no-transform" \\
|
|
570
|
+
--profile ${_classPrivateFieldLooseBase(this, _bucketRegion2)[_bucketRegion2]}
|
|
571
|
+
|
|
572
|
+
# We need to upload the PNG and HTML files separately because we want them
|
|
573
|
+
# to be able to overwrite the existing files (if any). For instance, the
|
|
574
|
+
# file or the favicons.
|
|
575
|
+
aws s3 cp ${assetsPath}/public \\
|
|
576
|
+
"${bucketUrl}" \\
|
|
577
|
+
--recursive \\
|
|
578
|
+
--exclude "*" \\
|
|
579
|
+
--include "*.png" \\
|
|
580
|
+
--include "robots.txt" \\
|
|
581
|
+
--cache-control="public,max-age=31536000,no-transform" \\
|
|
582
|
+
--profile ${_classPrivateFieldLooseBase(this, _bucketRegion2)[_bucketRegion2]}
|
|
583
|
+
|
|
584
|
+
if ${skipMenu}; then
|
|
585
|
+
echo "Skipping menu.json upload"
|
|
586
|
+
else
|
|
587
|
+
echo "Uploading menu.json to bucket ${bucketUrl}"
|
|
588
|
+
|
|
589
|
+
aws s3 cp ${assetsPath}/menu.json \\
|
|
590
|
+
"${bucketUrl}/" \\
|
|
591
|
+
--content-type="application/json" \\
|
|
592
|
+
--cache-control="public,max-age=0,no-transform" \\
|
|
593
|
+
--profile ${_classPrivateFieldLooseBase(this, _bucketRegion2)[_bucketRegion2]}
|
|
594
|
+
fi
|
|
595
|
+
`;
|
|
596
|
+
}
|
|
597
|
+
}]);
|
|
598
|
+
}();
|
|
599
|
+
function getUploadScriptsGenerator(storageProvider, config) {
|
|
600
|
+
switch (storageProvider) {
|
|
601
|
+
case 'gs':
|
|
602
|
+
return new GoogleStorageUploadScriptsGenerator(config);
|
|
603
|
+
case 's3':
|
|
604
|
+
return new AwsStorageUploadScriptsGenerator(config);
|
|
605
|
+
default:
|
|
606
|
+
throw new Error(`Storage provider ${storageProvider} not supported`);
|
|
607
|
+
}
|
|
608
|
+
}
|
|
609
|
+
|
|
610
|
+
function doesFileExist(filePath) {
|
|
611
|
+
try {
|
|
612
|
+
fs__default$1["default"].accessSync(filePath);
|
|
613
|
+
return true;
|
|
614
|
+
} catch (e) {
|
|
615
|
+
return false;
|
|
616
|
+
}
|
|
617
|
+
}
|
|
618
|
+
|
|
619
|
+
function ownKeys$3(e, r) { var t = _Object$keys__default["default"](e); if (_Object$getOwnPropertySymbols__default["default"]) { var o = _Object$getOwnPropertySymbols__default["default"](e); r && (o = _filterInstanceProperty__default["default"](o).call(o, function (r) { return _Object$getOwnPropertyDescriptor__default["default"](e, r).enumerable; })), t.push.apply(t, o); } return t; }
|
|
620
|
+
function _objectSpread$3(e) { for (var r = 1; r < arguments.length; r++) { var _context, _context2; var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? _forEachInstanceProperty__default["default"](_context = ownKeys$3(Object(t), !0)).call(_context, function (r) { _defineProperty(e, r, t[r]); }) : _Object$getOwnPropertyDescriptors__default["default"] ? _Object$defineProperties__default["default"](e, _Object$getOwnPropertyDescriptors__default["default"](t)) : _forEachInstanceProperty__default["default"](_context2 = ownKeys$3(Object(t))).call(_context2, function (r) { _Object$defineProperty__default["default"](e, r, _Object$getOwnPropertyDescriptor__default["default"](t, r)); }); } return e; }
|
|
621
|
+
function loadDotenvFiles(_ref) {
|
|
622
|
+
let dotenvPath = _ref.dotenvPath,
|
|
623
|
+
cloudEnvironment = _ref.cloudEnvironment;
|
|
624
|
+
// No path requested, skip.
|
|
625
|
+
if (!dotenvPath) {
|
|
626
|
+
return {};
|
|
627
|
+
}
|
|
628
|
+
|
|
629
|
+
// Check if the given path exists.
|
|
630
|
+
if (!doesFileExist(dotenvPath)) {
|
|
631
|
+
throw new Error(`The dotenv folder path does not exist: "${dotenvPath}".`);
|
|
632
|
+
}
|
|
633
|
+
|
|
634
|
+
// Load the environment values
|
|
635
|
+
const sharedDotenvFile = '.env.production';
|
|
636
|
+
const cloudDotenvFile = `.env.${cloudEnvironment}`;
|
|
637
|
+
|
|
638
|
+
// The shared dotenv file across environments is optional
|
|
639
|
+
const sharedProductionEnvironment = dotenv__default["default"].config({
|
|
640
|
+
encoding: 'utf8',
|
|
641
|
+
path: path__default["default"].join(dotenvPath, sharedDotenvFile)
|
|
642
|
+
});
|
|
643
|
+
const cloudSpecificProductionEnvironment = dotenv__default["default"].config({
|
|
644
|
+
encoding: 'utf8',
|
|
645
|
+
path: path__default["default"].join(dotenvPath, cloudDotenvFile)
|
|
646
|
+
});
|
|
647
|
+
if (cloudSpecificProductionEnvironment.error) {
|
|
648
|
+
throw new Error(`Failed loading '${cloudDotenvFile}' in '${dotenvPath}'. Make sure it exists.`);
|
|
649
|
+
}
|
|
650
|
+
if (sharedProductionEnvironment.error) {
|
|
651
|
+
throw new Error(`Failed loading '${sharedDotenvFile}' in '${dotenvPath}'. Make sure it exists.`);
|
|
652
|
+
}
|
|
653
|
+
return _objectSpread$3(_objectSpread$3({}, sharedProductionEnvironment.parsed), cloudSpecificProductionEnvironment.parsed);
|
|
654
|
+
}
|
|
655
|
+
|
|
656
|
+
function ownKeys$2(e, r) { var t = _Object$keys__default["default"](e); if (_Object$getOwnPropertySymbols__default["default"]) { var o = _Object$getOwnPropertySymbols__default["default"](e); r && (o = _filterInstanceProperty__default["default"](o).call(o, function (r) { return _Object$getOwnPropertyDescriptor__default["default"](e, r).enumerable; })), t.push.apply(t, o); } return t; }
|
|
657
|
+
function _objectSpread$2(e) { for (var r = 1; r < arguments.length; r++) { var _context3, _context4; var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? _forEachInstanceProperty__default["default"](_context3 = ownKeys$2(Object(t), !0)).call(_context3, function (r) { _defineProperty(e, r, t[r]); }) : _Object$getOwnPropertyDescriptors__default["default"] ? _Object$defineProperties__default["default"](e, _Object$getOwnPropertyDescriptors__default["default"](t)) : _forEachInstanceProperty__default["default"](_context4 = ownKeys$2(Object(t))).call(_context4, function (r) { _Object$defineProperty__default["default"](e, r, _Object$getOwnPropertyDescriptor__default["default"](t, r)); }); } return e; }
|
|
658
|
+
function writeUploadScriptFile(_ref) {
|
|
659
|
+
let fileName = _ref.fileName,
|
|
660
|
+
fileContent = _ref.fileContent,
|
|
661
|
+
filePath = _ref.filePath;
|
|
662
|
+
fs__default["default"].writeFileSync(path__default$1["default"].join(filePath, fileName), fileContent, {
|
|
663
|
+
// Make the script executable
|
|
664
|
+
mode: 0o755,
|
|
665
|
+
encoding: 'utf8'
|
|
666
|
+
});
|
|
667
|
+
}
|
|
668
|
+
async function compileApplicationAssets(_ref2) {
|
|
669
|
+
let cliFlags = _ref2.cliFlags,
|
|
670
|
+
storageProvider = _ref2.storageProvider,
|
|
671
|
+
uploadScriptsGenerator = _ref2.uploadScriptsGenerator,
|
|
672
|
+
paths = _ref2.paths;
|
|
673
|
+
const applicationAssetsUploadScriptContent = uploadScriptsGenerator.getProductionBundlesUploadScript({
|
|
674
|
+
storageProvider,
|
|
675
|
+
bucketUrl: storageProvider.getAssetsBucketUrl({
|
|
676
|
+
prNumber: cliFlags.prNumber,
|
|
677
|
+
applicationName: cliFlags.applicationName
|
|
678
|
+
}),
|
|
679
|
+
assetsPath: paths.assetsPath,
|
|
680
|
+
skipMenu: cliFlags.skipMenu
|
|
681
|
+
});
|
|
682
|
+
const parsedApplicationAssetsUploadScriptFile = path__default$1["default"].parse(cliFlags.applicationAssetsUploadScriptOutFile);
|
|
683
|
+
const applicationAssetsUploadScriptFileName = `${parsedApplicationAssetsUploadScriptFile.name}-${storageProvider.getBucketRegion()}${parsedApplicationAssetsUploadScriptFile.ext}`;
|
|
684
|
+
writeUploadScriptFile({
|
|
685
|
+
fileName: applicationAssetsUploadScriptFileName,
|
|
686
|
+
fileContent: applicationAssetsUploadScriptContent,
|
|
687
|
+
filePath: path__default$1["default"].join(paths.deploymentsPath, storageProvider.getTag())
|
|
688
|
+
});
|
|
689
|
+
}
|
|
690
|
+
async function compileEnvironmentApplicationIndexes(_ref3) {
|
|
691
|
+
let cliFlags = _ref3.cliFlags,
|
|
692
|
+
storageProvider = _ref3.storageProvider,
|
|
693
|
+
uploadScriptsGenerator = _ref3.uploadScriptsGenerator,
|
|
694
|
+
paths = _ref3.paths,
|
|
695
|
+
cloudEnvironment = _ref3.cloudEnvironment;
|
|
696
|
+
const cloudEnvironmentDeploymentPath = path__default$1["default"].join(paths.deploymentsPath, storageProvider.getTag(), cloudEnvironment);
|
|
697
|
+
// Ensure the folder exists
|
|
698
|
+
const createDeploymentsFolderResult = await execa__default["default"]('mkdir', ['-p', cloudEnvironmentDeploymentPath], {
|
|
699
|
+
encoding: 'utf8'
|
|
700
|
+
});
|
|
701
|
+
if (createDeploymentsFolderResult.failed) {
|
|
702
|
+
throw new Error(createDeploymentsFolderResult.stderr);
|
|
703
|
+
}
|
|
704
|
+
|
|
705
|
+
// Construct the proper CDN URL for the specific application
|
|
706
|
+
const cdnUrl = storageProvider.getCdnUrl({
|
|
707
|
+
prNumber: cliFlags.prNumber,
|
|
708
|
+
applicationName: cliFlags.applicationName
|
|
709
|
+
});
|
|
710
|
+
const environmentVariablesForCompilation = _objectSpread$2(_objectSpread$2(_objectSpread$2(_objectSpread$2({}, loadDotenvFiles({
|
|
711
|
+
dotenvPath: paths.dotenvPath,
|
|
712
|
+
cloudEnvironment
|
|
713
|
+
})), {}, {
|
|
714
|
+
// The trailing slash is important to indicate to the CSP directive that all the resources
|
|
715
|
+
// under that path should be allowed.
|
|
716
|
+
MC_CDN_URL: `${cdnUrl}/`
|
|
717
|
+
}, cliFlags.mcUrl ? {
|
|
718
|
+
MC_URL: cliFlags.mcUrl
|
|
719
|
+
} : {}), cliFlags.mcApiUrl ? {
|
|
720
|
+
MC_API_URL: cliFlags.mcApiUrl
|
|
721
|
+
} : {}), {}, {
|
|
722
|
+
// Will be used by the Application Kit for Sentry and exposed on `window.app.revision`.
|
|
723
|
+
REVISION: cliFlags.buildRevision
|
|
724
|
+
});
|
|
725
|
+
|
|
726
|
+
/// Sentry and GTM is disabled on branch deployments
|
|
727
|
+
if (cliFlags.prNumber) {
|
|
728
|
+
// @ts-expect-error The env is sometimes overwritten by code to a boolean
|
|
729
|
+
process.env.TRACKING_SENTRY = null;
|
|
730
|
+
// @ts-expect-error The env is sometimes overwritten by code to a boolean
|
|
731
|
+
process.env.TRACKING_GTM = null;
|
|
732
|
+
// @ts-expect-error
|
|
733
|
+
environmentVariablesForCompilation.TRACKING_SENTRY = null;
|
|
734
|
+
// @ts-expect-error
|
|
735
|
+
environmentVariablesForCompilation.TRACKING_GTM = null;
|
|
736
|
+
}
|
|
737
|
+
|
|
738
|
+
// Compile the application using the loaded environment values
|
|
739
|
+
const compileResult = await execa__default["default"]('mc-scripts', ['compile-html'], {
|
|
740
|
+
encoding: 'utf8',
|
|
741
|
+
preferLocal: true,
|
|
742
|
+
extendEnv: true,
|
|
743
|
+
env: environmentVariablesForCompilation
|
|
744
|
+
});
|
|
745
|
+
if (compileResult.failed) {
|
|
746
|
+
throw new Error(compileResult.stderr);
|
|
747
|
+
}
|
|
748
|
+
const applicationIndexUploadScriptContent = uploadScriptsGenerator.getApplicationIndexUploadScript({
|
|
749
|
+
storageProvider,
|
|
750
|
+
packageManagerName: cliFlags.packageManagerName,
|
|
751
|
+
bucketUrl: storageProvider.getApplicationIndexBucketUrl({
|
|
752
|
+
prNumber: cliFlags.prNumber,
|
|
753
|
+
applicationName: cliFlags.applicationName
|
|
754
|
+
}),
|
|
755
|
+
cdnUrl,
|
|
756
|
+
buildRevision: cliFlags.buildRevision,
|
|
757
|
+
buildNumber: cliFlags.buildNumber,
|
|
758
|
+
applicationIndexOutFile: cliFlags.applicationIndexOutFile
|
|
759
|
+
});
|
|
760
|
+
writeUploadScriptFile({
|
|
761
|
+
fileName: cliFlags.applicationIndexUploadScriptOutFile,
|
|
762
|
+
fileContent: applicationIndexUploadScriptContent,
|
|
763
|
+
filePath: cloudEnvironmentDeploymentPath
|
|
764
|
+
});
|
|
765
|
+
|
|
766
|
+
// Move the compiled `index.html` to the deployments folder of the related cloud environment.
|
|
767
|
+
const moveResult = await execa__default["default"]('mv', [path__default$1["default"].join(paths.publicAssetsPath, 'index.html'), path__default$1["default"].join(cloudEnvironmentDeploymentPath, cliFlags.applicationIndexOutFile)]);
|
|
768
|
+
if (moveResult.failed) {
|
|
769
|
+
throw new Error(moveResult.stderr);
|
|
770
|
+
}
|
|
771
|
+
}
|
|
772
|
+
async function command$4(cliFlags, cwd) {
|
|
773
|
+
var _context;
|
|
774
|
+
const storageBucketConfig = await storageBucketsConfig.loadStorageBucketsConfig();
|
|
775
|
+
const applicationDirectory = getApplicationDirectory(cwd);
|
|
776
|
+
let assetsPath;
|
|
777
|
+
if (cliFlags.ciAssetsRootPath && isCI()) {
|
|
778
|
+
assetsPath = applicationDirectory.replace('/home/circleci/', cliFlags.ciAssetsRootPath);
|
|
779
|
+
} else {
|
|
780
|
+
assetsPath = applicationDirectory;
|
|
781
|
+
}
|
|
782
|
+
const monorepoRoot = findRoot.findRootSync(cwd);
|
|
783
|
+
const paths = {
|
|
784
|
+
publicAssetsPath: resolveInApplication('public', cwd),
|
|
785
|
+
deploymentsPath: resolveInApplication('deployments', cwd),
|
|
786
|
+
dotenvPath: cliFlags.dotenvFolder && path__default$1["default"].join(monorepoRoot.rootDir, cliFlags.dotenvFolder),
|
|
787
|
+
assetsPath
|
|
788
|
+
};
|
|
789
|
+
const defaultStorageProviders = [storageBucketsConfig.storageProviders.gs.tag];
|
|
790
|
+
const taskList = new listr2.Listr(_mapInstanceProperty__default["default"](_context = _Object$entries__default["default"](storageBucketConfig)).call(_context, _ref4 => {
|
|
791
|
+
let _ref5 = _slicedToArray(_ref4, 2),
|
|
792
|
+
bucketRegion = _ref5[0],
|
|
793
|
+
bucketEnvironmentConfigs = _ref5[1];
|
|
794
|
+
return {
|
|
795
|
+
title: `Compiling for bucket region ${bucketRegion}`,
|
|
796
|
+
task: (_bucketRegionCtx, bucketRegionTask) => {
|
|
797
|
+
// NOTE: Application assets need to be compiled
|
|
798
|
+
// for all storage providers once per region.
|
|
799
|
+
const allStorageProvidersForBucketRegion = [...new _Set__default["default"](_flatMapInstanceProperty__default["default"](bucketEnvironmentConfigs).call(bucketEnvironmentConfigs, bucketEnvironmentConfig => bucketEnvironmentConfig.storageProviders || defaultStorageProviders))];
|
|
800
|
+
const allApplicationAssetTasks = _mapInstanceProperty__default["default"](allStorageProvidersForBucketRegion).call(allStorageProvidersForBucketRegion, storageProviderTag => {
|
|
801
|
+
const uploadScriptsGeneratorConfig = {
|
|
802
|
+
bucketRegion: bucketRegion
|
|
803
|
+
};
|
|
804
|
+
const storageProviderConfig = {
|
|
805
|
+
bucketRegion: bucketRegion
|
|
806
|
+
};
|
|
807
|
+
const storageProvider = getStorageProvider(storageProviderTag, storageProviderConfig);
|
|
808
|
+
const uploadScriptsGenerator = getUploadScriptsGenerator(storageProviderTag, uploadScriptsGeneratorConfig);
|
|
809
|
+
return {
|
|
810
|
+
title: `Compiling application assets for '${storageProviderTag}'`,
|
|
811
|
+
task: () => compileApplicationAssets({
|
|
812
|
+
cliFlags,
|
|
813
|
+
storageProvider,
|
|
814
|
+
uploadScriptsGenerator,
|
|
815
|
+
paths
|
|
816
|
+
})
|
|
817
|
+
};
|
|
818
|
+
});
|
|
819
|
+
const allApplicationIndexTasks = _mapInstanceProperty__default["default"](bucketEnvironmentConfigs).call(bucketEnvironmentConfigs, bucketEnvironmentConfig => {
|
|
820
|
+
const cloudEnvironment = bucketEnvironmentConfig.cloudEnvironment,
|
|
821
|
+
bucketEnvironment = bucketEnvironmentConfig.bucketEnvironment,
|
|
822
|
+
storageProviders = bucketEnvironmentConfig.storageProviders;
|
|
823
|
+
const storageProviderConfig = {
|
|
824
|
+
bucketRegion: bucketRegion,
|
|
825
|
+
bucketEnvironment
|
|
826
|
+
};
|
|
827
|
+
return {
|
|
828
|
+
title: `Compiling for cloud environment '${cloudEnvironment}'`,
|
|
829
|
+
task: (_storageProviderCtx, storageProviderTask) => {
|
|
830
|
+
var _context2;
|
|
831
|
+
const applicationIndexTasksForStorageProviders = _mapInstanceProperty__default["default"](_context2 = storageProviders || defaultStorageProviders).call(_context2, storageProviderTag => {
|
|
832
|
+
const storageProvider = getStorageProvider(storageProviderTag, storageProviderConfig);
|
|
833
|
+
const uploadScriptsGenerator = getUploadScriptsGenerator(storageProviderTag, storageProviderConfig);
|
|
834
|
+
return {
|
|
835
|
+
title: `Compiling application index for storage provider '${storageProviderTag}'`,
|
|
836
|
+
task: () => {
|
|
837
|
+
return compileEnvironmentApplicationIndexes({
|
|
838
|
+
cliFlags,
|
|
839
|
+
storageProvider,
|
|
840
|
+
uploadScriptsGenerator,
|
|
841
|
+
paths,
|
|
842
|
+
cloudEnvironment
|
|
843
|
+
});
|
|
844
|
+
}
|
|
845
|
+
};
|
|
846
|
+
});
|
|
847
|
+
return storageProviderTask.newListr(applicationIndexTasksForStorageProviders);
|
|
848
|
+
}
|
|
849
|
+
};
|
|
850
|
+
});
|
|
851
|
+
return bucketRegionTask.newListr([...allApplicationIndexTasks, ...allApplicationAssetTasks]);
|
|
852
|
+
}
|
|
853
|
+
};
|
|
854
|
+
}), {
|
|
855
|
+
// @ts-ignore
|
|
856
|
+
renderer: isCI() ? 'verbose' : 'default'
|
|
857
|
+
});
|
|
858
|
+
await taskList.run();
|
|
859
|
+
}
|
|
860
|
+
|
|
861
|
+
function ownKeys$1(e, r) { var t = _Object$keys__default["default"](e); if (_Object$getOwnPropertySymbols__default["default"]) { var o = _Object$getOwnPropertySymbols__default["default"](e); r && (o = _filterInstanceProperty__default["default"](o).call(o, function (r) { return _Object$getOwnPropertyDescriptor__default["default"](e, r).enumerable; })), t.push.apply(t, o); } return t; }
|
|
862
|
+
function _objectSpread$1(e) { for (var r = 1; r < arguments.length; r++) { var _context3, _context4; var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? _forEachInstanceProperty__default["default"](_context3 = ownKeys$1(Object(t), !0)).call(_context3, function (r) { _defineProperty(e, r, t[r]); }) : _Object$getOwnPropertyDescriptors__default["default"] ? _Object$defineProperties__default["default"](e, _Object$getOwnPropertyDescriptors__default["default"](t)) : _forEachInstanceProperty__default["default"](_context4 = ownKeys$1(Object(t))).call(_context4, function (r) { _Object$defineProperty__default["default"](e, r, _Object$getOwnPropertyDescriptor__default["default"](t, r)); }); } return e; }
|
|
863
|
+
|
|
864
|
+
// The menu links are only parsed from the config in development mode.
|
|
865
|
+
process.env.NODE_ENV = 'development';
|
|
866
|
+
const mapLabelAllLocalesWithDefaults = (labelAllLocales, defaultLabel) => {
|
|
867
|
+
let mappedLabelAllLocales = labelAllLocales;
|
|
868
|
+
if (defaultLabel) {
|
|
869
|
+
var _context;
|
|
870
|
+
// Map all supported locales with the given localized labels.
|
|
871
|
+
// If a locale is not defined in the config, we use the `default` label as the value.
|
|
872
|
+
// This is only needed for development as we're trying to map two different schemas.
|
|
873
|
+
mappedLabelAllLocales = _mapInstanceProperty__default["default"](_context = l10n.getSupportedLocales()).call(_context, supportedLocale => {
|
|
874
|
+
const existingField = _findInstanceProperty__default["default"](labelAllLocales).call(labelAllLocales, field => field.locale === supportedLocale);
|
|
875
|
+
if (existingField) return existingField;
|
|
876
|
+
return {
|
|
877
|
+
locale: supportedLocale,
|
|
878
|
+
value: defaultLabel
|
|
879
|
+
};
|
|
880
|
+
});
|
|
881
|
+
}
|
|
882
|
+
return mappedLabelAllLocales;
|
|
883
|
+
};
|
|
884
|
+
|
|
885
|
+
/**
|
|
886
|
+
* Transform menu links defined in the `custom-application-config.json` to the format
|
|
887
|
+
* used by the HTTP Proxy GraphQL API.
|
|
888
|
+
*/
|
|
889
|
+
|
|
890
|
+
const mapApplicationMenuConfigToGraqhQLMenuJson = config => {
|
|
891
|
+
var _context2;
|
|
892
|
+
const entryPointUriPath = config.env.entryPointUriPath;
|
|
893
|
+
|
|
894
|
+
// @ts-expect-error: the `accountLinks` is not explicitly typed as it's only used by the account app.
|
|
895
|
+
const accountLinks = config.env.__DEVELOPMENT__?.accountLinks ?? [];
|
|
896
|
+
if (accountLinks.length > 0) {
|
|
897
|
+
return _mapInstanceProperty__default["default"](accountLinks).call(accountLinks, menuLink => ({
|
|
898
|
+
key: menuLink.uriPath,
|
|
899
|
+
uriPath: menuLink.uriPath,
|
|
900
|
+
labelAllLocales: mapLabelAllLocalesWithDefaults(menuLink.labelAllLocales, menuLink.defaultLabel),
|
|
901
|
+
permissions: menuLink.permissions ?? [],
|
|
902
|
+
// @ts-ignore: not defined in schema, as it's only used internally.
|
|
903
|
+
featureToggle: menuLink.featureToggle ?? null
|
|
904
|
+
}));
|
|
905
|
+
}
|
|
906
|
+
const menuLinks = config.env.__DEVELOPMENT__?.menuLinks;
|
|
907
|
+
return {
|
|
908
|
+
key: entryPointUriPath,
|
|
909
|
+
uriPath: entryPointUriPath,
|
|
910
|
+
icon: menuLinks.icon,
|
|
911
|
+
labelAllLocales: mapLabelAllLocalesWithDefaults(menuLinks?.labelAllLocales, menuLinks?.defaultLabel),
|
|
912
|
+
permissions: menuLinks.permissions,
|
|
913
|
+
// @ts-ignore: not defined in schema, as it's only used internally.
|
|
914
|
+
featureToggle: menuLinks.featureToggle ?? null,
|
|
915
|
+
// @ts-ignore: not defined in schema, as it's only used internally.
|
|
916
|
+
menuVisibility: menuLinks.menuVisibility ?? null,
|
|
917
|
+
// @ts-ignore: not defined in schema, as it's only used internally.
|
|
918
|
+
actionRights: menuLinks.actionRights ?? null,
|
|
919
|
+
// @ts-ignore: not defined in schema, as it's only used internally.
|
|
920
|
+
dataFences: menuLinks.dataFences ?? null,
|
|
921
|
+
submenu: _mapInstanceProperty__default["default"](_context2 = menuLinks.submenuLinks).call(_context2, submenuLink => ({
|
|
922
|
+
key: submenuLink.uriPath.replace('/', '-'),
|
|
923
|
+
uriPath: submenuLink.uriPath,
|
|
924
|
+
labelAllLocales: mapLabelAllLocalesWithDefaults(submenuLink.labelAllLocales, submenuLink.defaultLabel),
|
|
925
|
+
permissions: submenuLink.permissions,
|
|
926
|
+
// @ts-ignore: not defined in schema, as it's only used internally.
|
|
927
|
+
featureToggle: submenuLink.featureToggle ?? null,
|
|
928
|
+
// @ts-ignore: not defined in schema, as it's only used internally.
|
|
929
|
+
menuVisibility: submenuLink.menuVisibility ?? null,
|
|
930
|
+
// @ts-ignore: not defined in schema, as it's only used internally.
|
|
931
|
+
actionRights: submenuLink.actionRights ?? null,
|
|
932
|
+
// @ts-ignore: not defined in schema, as it's only used internally.
|
|
933
|
+
dataFences: submenuLink.dataFences ?? null
|
|
934
|
+
})),
|
|
935
|
+
// @ts-ignore: not defined in schema, as it's only used internally.
|
|
936
|
+
shouldRenderDivider: menuLinks.shouldRenderDivider ?? false
|
|
937
|
+
};
|
|
938
|
+
};
|
|
939
|
+
async function command$3(cliFlags, cwd) {
|
|
940
|
+
const applicationDirectory = getApplicationDirectory(cwd);
|
|
941
|
+
const monorepoRoot = findRoot.findRootSync(cwd);
|
|
942
|
+
const dotenvPath = cliFlags.dotenvFolder && path__default["default"].join(monorepoRoot.rootDir, cliFlags.dotenvFolder);
|
|
943
|
+
|
|
944
|
+
// The env itself is not important for the menu. However, the application config
|
|
945
|
+
// uses environment placeholders and therefore we need to provide the variables for it.
|
|
946
|
+
const cloudEnvironment = storageBucketsConfig.clusterContexts['ctp_staging_gcp_europe-west1_v1'];
|
|
947
|
+
const processEnv = _objectSpread$1(_objectSpread$1({}, loadDotenvFiles({
|
|
948
|
+
dotenvPath,
|
|
949
|
+
cloudEnvironment
|
|
950
|
+
})), {}, {
|
|
951
|
+
// Again, make sure that the environment is "development", otherwise
|
|
952
|
+
// the menu config won't be available.
|
|
953
|
+
NODE_ENV: 'development',
|
|
954
|
+
MC_APP_ENV: 'development',
|
|
955
|
+
// Something random, just to have environment variable defined.
|
|
956
|
+
REVISION: '123'
|
|
957
|
+
});
|
|
958
|
+
const applicationRuntimeConfig = await applicationConfig.processConfig({
|
|
959
|
+
disableCache: true,
|
|
960
|
+
applicationPath: applicationDirectory,
|
|
961
|
+
processEnv
|
|
962
|
+
});
|
|
963
|
+
const applicationMenu = mapApplicationMenuConfigToGraqhQLMenuJson(applicationRuntimeConfig);
|
|
964
|
+
const formattedJson = _JSON$stringify__default["default"](applicationMenu, null, 2);
|
|
965
|
+
fs__default$1["default"].writeFileSync(path__default["default"].join(applicationDirectory, 'menu.json'), formattedJson, {
|
|
966
|
+
encoding: 'utf8'
|
|
967
|
+
});
|
|
968
|
+
}
|
|
969
|
+
|
|
970
|
+
async function command$2(cliFlags) {
|
|
971
|
+
const numberOfRollbacks = cliFlags.rollbacks - 1;
|
|
972
|
+
let nextRollbacks;
|
|
973
|
+
try {
|
|
974
|
+
var _context, _context2;
|
|
975
|
+
// The last build's JSON becomes the first rollback
|
|
976
|
+
// while all previous rollbacks remain but are sliced.
|
|
977
|
+
const lastVersionResponse = await fetch(cliFlags.versionUrl);
|
|
978
|
+
const lastVersionJson = await lastVersionResponse.json();
|
|
979
|
+
const previousBuild = lastVersionJson && {
|
|
980
|
+
buildNumber: lastVersionJson.buildNumber,
|
|
981
|
+
revision: lastVersionJson.revision,
|
|
982
|
+
deployedAt: lastVersionJson.deployedAt
|
|
983
|
+
};
|
|
984
|
+
nextRollbacks = _sliceInstanceProperty__default["default"](_context = _filterInstanceProperty__default["default"](_context2 = [previousBuild, ...lastVersionJson.rollbacks]).call(_context2, Boolean)).call(_context, 0, numberOfRollbacks);
|
|
985
|
+
} catch (error) {
|
|
986
|
+
nextRollbacks = [];
|
|
987
|
+
}
|
|
988
|
+
const nextBuild = {
|
|
989
|
+
buildNumber: cliFlags.buildNumber,
|
|
990
|
+
revision: cliFlags.buildRevision,
|
|
991
|
+
deployedAt: new Date().toISOString(),
|
|
992
|
+
rollbacks: nextRollbacks
|
|
993
|
+
};
|
|
994
|
+
const formattedJson = _JSON$stringify__default["default"](nextBuild, null, 2);
|
|
995
|
+
// Logging to stdout which is from where it will be picked
|
|
996
|
+
// up by the caller (a bash script).
|
|
997
|
+
if (cliFlags.outFile) {
|
|
998
|
+
fs__default$1["default"].writeFileSync(cliFlags.outFile, formattedJson, {
|
|
999
|
+
encoding: 'utf8'
|
|
1000
|
+
});
|
|
1001
|
+
} else {
|
|
1002
|
+
console.log(formattedJson);
|
|
1003
|
+
}
|
|
1004
|
+
}
|
|
1005
|
+
|
|
1006
|
+
/**
|
|
1007
|
+
* This is heavily inspired by https://circleci.com/developer/orbs/orb/circleci/path-filtering.
|
|
1008
|
+
*
|
|
1009
|
+
* It detects changed files between `HEAD` and a base revision.
|
|
1010
|
+
* To match them against configured RegEx tr
|
|
1011
|
+
* All matched triggers will be written as a dotenv file.
|
|
1012
|
+
* The dotenv file is read in a CircleCI step and be evaluated.
|
|
1013
|
+
*/
|
|
1014
|
+
const git = {
|
|
1015
|
+
// https://git-scm.com/docs/git-merge-base
|
|
1016
|
+
base: (baseBranch, headRevision) => `git merge-base ${baseBranch} ${headRevision}`,
|
|
1017
|
+
// https://git-scm.com/docs/git-diff
|
|
1018
|
+
changedFiles: (mergeRevision, headRevision) => `git diff --name-only ${mergeRevision} ${headRevision}`,
|
|
1019
|
+
commitMessage: headRevision => `git log --format=oneline -n 1 ${headRevision}`
|
|
1020
|
+
};
|
|
1021
|
+
const helpers = {
|
|
1022
|
+
async writeOutDotEnvFile(cliFlags, cwd, matchingTriggers) {
|
|
1023
|
+
var _context;
|
|
1024
|
+
// If desired read the env file and write out the matching triggers.
|
|
1025
|
+
if (!cliFlags.outEnvFile) {
|
|
1026
|
+
return;
|
|
1027
|
+
}
|
|
1028
|
+
const filePath = path__default$1["default"].join(fs__default["default"].realpathSync(cwd), cliFlags.outEnvFile);
|
|
1029
|
+
const fileContents = _mapInstanceProperty__default["default"](_context = _Object$entries__default["default"](matchingTriggers)).call(_context, _ref => {
|
|
1030
|
+
let _ref2 = _slicedToArray(_ref, 2),
|
|
1031
|
+
triggerName = _ref2[0],
|
|
1032
|
+
triggerValue = _ref2[1];
|
|
1033
|
+
const triggerNameForEnvFile = `${snakeCase__default["default"](triggerName).toUpperCase()}`;
|
|
1034
|
+
|
|
1035
|
+
// General pipeline optimization hints are not transformed
|
|
1036
|
+
if (_startsWithInstanceProperty__default["default"](triggerName).call(triggerName, 'allowPipelineOptimizations')) {
|
|
1037
|
+
return `${triggerNameForEnvFile}=${triggerValue}`;
|
|
1038
|
+
}
|
|
1039
|
+
return `DID_${triggerNameForEnvFile}_CHANGE=${triggerValue}`;
|
|
1040
|
+
}).join('\n');
|
|
1041
|
+
await fs__default["default"].promises.writeFile(filePath, fileContents);
|
|
1042
|
+
if (!cliFlags.silent) {
|
|
1043
|
+
console.log(`📝 Wrote out file to '${filePath}' with contents:`);
|
|
1044
|
+
console.log(fileContents);
|
|
1045
|
+
}
|
|
1046
|
+
},
|
|
1047
|
+
async getChangedFiles(cliFlags) {
|
|
1048
|
+
var _context2, _context3;
|
|
1049
|
+
const baseCmdResult = await execa.command(git.base(cliFlags.baseBranch, cliFlags.headRevision));
|
|
1050
|
+
const mergeRevision = baseCmdResult.stdout;
|
|
1051
|
+
const changedFilesCmdResult = await execa.command(git.changedFiles(mergeRevision, cliFlags.headRevision));
|
|
1052
|
+
const changedFiles = _filterInstanceProperty__default["default"](_context2 = _mapInstanceProperty__default["default"](_context3 = changedFilesCmdResult.stdout.split('\n')).call(_context3, filePath => _trimInstanceProperty__default["default"](filePath).call(filePath))).call(_context2, filePath => filePath.length > 0);
|
|
1053
|
+
return changedFiles;
|
|
1054
|
+
},
|
|
1055
|
+
async matchTriggersAgainstChangedFiles(cliFlags, config, changedFiles) {
|
|
1056
|
+
const matchedTriggers = {};
|
|
1057
|
+
|
|
1058
|
+
// Evaluate each trigger against each file.
|
|
1059
|
+
_forEachInstanceProperty__default["default"](config).call(config, async trigger => {
|
|
1060
|
+
const hasTriggerBeenInitialized = typeof matchedTriggers[trigger.name] === 'number';
|
|
1061
|
+
|
|
1062
|
+
// Given the trigger with this name was never evaluated it has to be defaulted to 0.
|
|
1063
|
+
// As without any matches we should indicate nothing changed.
|
|
1064
|
+
if (!hasTriggerBeenInitialized) {
|
|
1065
|
+
matchedTriggers[trigger.name] = 0;
|
|
1066
|
+
}
|
|
1067
|
+
// Given the trigger was already evaluated to be positive we can skip this evaluation.
|
|
1068
|
+
if (matchedTriggers[trigger.name] === 1) {
|
|
1069
|
+
return matchedTriggers;
|
|
1070
|
+
}
|
|
1071
|
+
|
|
1072
|
+
// In any other case we evaluate this trigger.
|
|
1073
|
+
const anyFileChangedForTrigger = _someInstanceProperty__default["default"](micromatch__default["default"]).call(micromatch__default["default"], changedFiles, trigger.include, {
|
|
1074
|
+
ignore: trigger.ignore
|
|
1075
|
+
});
|
|
1076
|
+
if (!cliFlags.silent && anyFileChangedForTrigger) {
|
|
1077
|
+
console.log(`ℹ️ Files for trigger ${trigger.name} changed.`);
|
|
1078
|
+
}
|
|
1079
|
+
let onlyExcludedFilesChangedForTrigger = false;
|
|
1080
|
+
if (trigger.exclude?.length > 0) {
|
|
1081
|
+
// NOTE: `micromatch.every` evaluates if every file matches
|
|
1082
|
+
// every pattern.
|
|
1083
|
+
// We need to evaluate if every file matches some pattern.
|
|
1084
|
+
onlyExcludedFilesChangedForTrigger = _everyInstanceProperty__default["default"](changedFiles).call(changedFiles, changedFile => {
|
|
1085
|
+
return micromatch__default["default"].isMatch(changedFile, trigger.exclude, {
|
|
1086
|
+
ignore: trigger.ignore
|
|
1087
|
+
});
|
|
1088
|
+
});
|
|
1089
|
+
}
|
|
1090
|
+
if (!cliFlags.silent && onlyExcludedFilesChangedForTrigger) {
|
|
1091
|
+
console.log(`ℹ️ Only excluded files for trigger ${trigger.name} changed.`);
|
|
1092
|
+
}
|
|
1093
|
+
if (onlyExcludedFilesChangedForTrigger) {
|
|
1094
|
+
matchedTriggers[trigger.name] = 0;
|
|
1095
|
+
} else {
|
|
1096
|
+
matchedTriggers[trigger.name] = Number(anyFileChangedForTrigger);
|
|
1097
|
+
}
|
|
1098
|
+
return matchedTriggers;
|
|
1099
|
+
});
|
|
1100
|
+
return matchedTriggers;
|
|
1101
|
+
}
|
|
1102
|
+
};
|
|
1103
|
+
async function command$1(cliFlags, config, cwd) {
|
|
1104
|
+
const enablePipelineOptimizations = process.env.ENABLE_PIPELINE_OPTIMIZATIONS === '1';
|
|
1105
|
+
const isDevelopmentBranch = cliFlags.branch !== cliFlags.baseBranch;
|
|
1106
|
+
const triggersContainingSharedFiles = _filterInstanceProperty__default["default"](config).call(config, trigger => trigger.containsSharedFiles);
|
|
1107
|
+
if (!cliFlags.silent) {
|
|
1108
|
+
console.log(`ℹ️ Pipeline optimizations are ${enablePipelineOptimizations ? 'enabled' : 'disabled'}.`);
|
|
1109
|
+
console.log(`ℹ️ Changes have been commited to the ${isDevelopmentBranch ? 'a development' : 'the main'} branch.`);
|
|
1110
|
+
console.log(`🚧 Comparing '${cliFlags.baseBranch}' against '${cliFlags.headRevision}' to determine changed files.`);
|
|
1111
|
+
}
|
|
1112
|
+
|
|
1113
|
+
// Collect and parse changed files from git comparing base and head revision.
|
|
1114
|
+
const changedFiles = await helpers.getChangedFiles(cliFlags);
|
|
1115
|
+
if (!cliFlags.silent) {
|
|
1116
|
+
if (changedFiles.length === 0) {
|
|
1117
|
+
console.log(`ℹ️ No changes found.`);
|
|
1118
|
+
} else {
|
|
1119
|
+
console.log(`ℹ️ ${changedFiles.length} changes found.`);
|
|
1120
|
+
}
|
|
1121
|
+
}
|
|
1122
|
+
|
|
1123
|
+
// Read the trigger file to match the changed files against.
|
|
1124
|
+
const matchedTriggers = await helpers.matchTriggersAgainstChangedFiles(cliFlags, config, changedFiles);
|
|
1125
|
+
const commitMessageCmdResult = await execa.command(git.commitMessage(cliFlags.headRevision));
|
|
1126
|
+
const commitMessage = commitMessageCmdResult.stdout;
|
|
1127
|
+
const hasCommitMessageTrigger = commitMessage && _includesInstanceProperty__default["default"](commitMessage).call(commitMessage, '[ci all]');
|
|
1128
|
+
const doesSharedTriggerMatch = _someInstanceProperty__default["default"](triggersContainingSharedFiles).call(triggersContainingSharedFiles, triggerContainingSharedFiles => matchedTriggers[triggerContainingSharedFiles.name] === 1);
|
|
1129
|
+
if (!cliFlags.silent) {
|
|
1130
|
+
console.log(`ℹ️ The git commit message ${hasCommitMessageTrigger ? 'does' : 'does not'} contain a [ci all] trigger.`);
|
|
1131
|
+
}
|
|
1132
|
+
const doesPackageFolderTriggerMatch = matchedTriggers[cliFlags.triggerName] === 1;
|
|
1133
|
+
if (enablePipelineOptimizations && isDevelopmentBranch && !hasCommitMessageTrigger && !doesSharedTriggerMatch && !doesPackageFolderTriggerMatch) {
|
|
1134
|
+
if (!cliFlags.silent) {
|
|
1135
|
+
console.log(`ℹ️ No relevant changes found for ${cliFlags.triggerName}.`);
|
|
1136
|
+
}
|
|
1137
|
+
matchedTriggers['allowPipelineOptimizationsForTrigger'] = 1;
|
|
1138
|
+
} else {
|
|
1139
|
+
if (!cliFlags.silent) {
|
|
1140
|
+
console.log(`ℹ️ Relevant changes found for ${cliFlags.triggerName}.`);
|
|
1141
|
+
}
|
|
1142
|
+
matchedTriggers['allowPipelineOptimizationsForTrigger'] = 0;
|
|
1143
|
+
}
|
|
1144
|
+
await helpers.writeOutDotEnvFile(cliFlags, cwd, matchedTriggers);
|
|
1145
|
+
return matchedTriggers;
|
|
1146
|
+
}
|
|
1147
|
+
|
|
1148
|
+
function ownKeys(e, r) { var t = _Object$keys__default["default"](e); if (_Object$getOwnPropertySymbols__default["default"]) { var o = _Object$getOwnPropertySymbols__default["default"](e); r && (o = _filterInstanceProperty__default["default"](o).call(o, function (r) { return _Object$getOwnPropertyDescriptor__default["default"](e, r).enumerable; })), t.push.apply(t, o); } return t; }
|
|
1149
|
+
function _objectSpread(e) { for (var r = 1; r < arguments.length; r++) { var _context, _context2; var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? _forEachInstanceProperty__default["default"](_context = ownKeys(Object(t), !0)).call(_context, function (r) { _defineProperty(e, r, t[r]); }) : _Object$getOwnPropertyDescriptors__default["default"] ? _Object$defineProperties__default["default"](e, _Object$getOwnPropertyDescriptors__default["default"](t)) : _forEachInstanceProperty__default["default"](_context2 = ownKeys(Object(t))).call(_context2, function (r) { _Object$defineProperty__default["default"](e, r, _Object$getOwnPropertyDescriptor__default["default"](t, r)); }); } return e; }
|
|
1150
|
+
const baseMenuProperties = {
|
|
1151
|
+
key: {
|
|
1152
|
+
type: 'string'
|
|
1153
|
+
},
|
|
1154
|
+
uriPath: {
|
|
1155
|
+
type: 'string'
|
|
1156
|
+
},
|
|
1157
|
+
icon: {
|
|
1158
|
+
type: 'string'
|
|
1159
|
+
},
|
|
1160
|
+
featureToggle: {
|
|
1161
|
+
type: ['string', 'null']
|
|
1162
|
+
},
|
|
1163
|
+
labelAllLocales: {
|
|
1164
|
+
type: 'array',
|
|
1165
|
+
items: [{
|
|
1166
|
+
type: 'object',
|
|
1167
|
+
properties: {
|
|
1168
|
+
locale: {
|
|
1169
|
+
type: 'string'
|
|
1170
|
+
},
|
|
1171
|
+
value: {
|
|
1172
|
+
type: 'string'
|
|
1173
|
+
}
|
|
1174
|
+
},
|
|
1175
|
+
required: ['locale', 'value']
|
|
1176
|
+
}]
|
|
1177
|
+
},
|
|
1178
|
+
menuVisibility: {
|
|
1179
|
+
type: ['string', 'null']
|
|
1180
|
+
},
|
|
1181
|
+
permissions: {
|
|
1182
|
+
type: 'array',
|
|
1183
|
+
items: {
|
|
1184
|
+
type: 'string'
|
|
1185
|
+
}
|
|
1186
|
+
},
|
|
1187
|
+
dataFences: {
|
|
1188
|
+
type: ['array', 'null'],
|
|
1189
|
+
items: [{
|
|
1190
|
+
type: ['object'],
|
|
1191
|
+
properties: {
|
|
1192
|
+
group: {
|
|
1193
|
+
type: 'string'
|
|
1194
|
+
},
|
|
1195
|
+
name: {
|
|
1196
|
+
type: 'string'
|
|
1197
|
+
},
|
|
1198
|
+
type: {
|
|
1199
|
+
type: 'string'
|
|
1200
|
+
}
|
|
1201
|
+
}
|
|
1202
|
+
}]
|
|
1203
|
+
},
|
|
1204
|
+
actionRights: {
|
|
1205
|
+
type: ['array', 'null'],
|
|
1206
|
+
items: [{
|
|
1207
|
+
type: ['object'],
|
|
1208
|
+
properties: {
|
|
1209
|
+
group: {
|
|
1210
|
+
type: 'string'
|
|
1211
|
+
},
|
|
1212
|
+
name: {
|
|
1213
|
+
type: 'string'
|
|
1214
|
+
}
|
|
1215
|
+
}
|
|
1216
|
+
}]
|
|
1217
|
+
}
|
|
1218
|
+
};
|
|
1219
|
+
const navbarMenuSchema = {
|
|
1220
|
+
$schema: 'https://json-schema.org/draft/2020-12/schema',
|
|
1221
|
+
// "$id":""
|
|
1222
|
+
title: 'NavbarMenu',
|
|
1223
|
+
type: 'object',
|
|
1224
|
+
properties: _objectSpread(_objectSpread({}, baseMenuProperties), {}, {
|
|
1225
|
+
submenu: {
|
|
1226
|
+
type: 'array',
|
|
1227
|
+
items: [{
|
|
1228
|
+
type: 'object',
|
|
1229
|
+
properties: baseMenuProperties
|
|
1230
|
+
}]
|
|
1231
|
+
}
|
|
1232
|
+
}),
|
|
1233
|
+
required: ['icon', 'key', 'labelAllLocales', 'permissions', 'submenu', 'uriPath']
|
|
1234
|
+
};
|
|
1235
|
+
const appbarMenuSchema = {
|
|
1236
|
+
$schema: 'https://json-schema.org/draft/2020-12/schema',
|
|
1237
|
+
// "$id":""
|
|
1238
|
+
title: 'AppbarMenu',
|
|
1239
|
+
type: 'array',
|
|
1240
|
+
items: [{
|
|
1241
|
+
type: 'object',
|
|
1242
|
+
properties: baseMenuProperties,
|
|
1243
|
+
required: ['key', 'labelAllLocales', 'permissions', 'uriPath']
|
|
1244
|
+
}]
|
|
1245
|
+
};
|
|
1246
|
+
|
|
1247
|
+
function validateMenu(menuJson) {
|
|
1248
|
+
let schema = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : navbarMenuSchema;
|
|
1249
|
+
const validator = new jsonschema.Validator();
|
|
1250
|
+
const result = validator.validate(menuJson, schema);
|
|
1251
|
+
if (result.valid) {
|
|
1252
|
+
return menuJson;
|
|
1253
|
+
} else {
|
|
1254
|
+
throw new Error('menu.json validation failed\n' + result.errors);
|
|
1255
|
+
}
|
|
1256
|
+
}
|
|
1257
|
+
async function command(cliFlags) {
|
|
1258
|
+
const menuJsonPath = cliFlags.inputFile;
|
|
1259
|
+
const isAppbarMenu = cliFlags.navigation === 'top';
|
|
1260
|
+
if (!menuJsonPath) throw new Error(`--input-file cannot be empty. please provide the path of compiled menu.json`);
|
|
1261
|
+
if (!doesFileExist(menuJsonPath)) throw new Error(`The menu.json file doesn't exist: ${menuJsonPath}`);
|
|
1262
|
+
const menuJson = fs__default$1["default"].readFileSync(menuJsonPath, 'utf-8');
|
|
1263
|
+
return validateMenu(JSON.parse(menuJson), isAppbarMenu ? appbarMenuSchema : navbarMenuSchema);
|
|
1264
|
+
}
|
|
1265
|
+
|
|
1266
|
+
var pkgJson = {
|
|
1267
|
+
name: "@commercetools-frontend/application-cli",
|
|
1268
|
+
version: "5.0.2",
|
|
1269
|
+
description: "Internal CLI to manage Merchant Center application deployments across various environments.",
|
|
1270
|
+
keywords: [
|
|
1271
|
+
"commercetools",
|
|
1272
|
+
"cli",
|
|
1273
|
+
"custom-application"
|
|
1274
|
+
],
|
|
1275
|
+
license: "MIT",
|
|
1276
|
+
main: "dist/commercetools-frontend-application-cli.cjs.js",
|
|
1277
|
+
module: "dist/commercetools-frontend-application-cli.esm.js",
|
|
1278
|
+
bin: "bin/cli.js",
|
|
1279
|
+
files: [
|
|
1280
|
+
"bin",
|
|
1281
|
+
"cli",
|
|
1282
|
+
"dist",
|
|
1283
|
+
"package.json",
|
|
1284
|
+
"LICENSE",
|
|
1285
|
+
"README.md"
|
|
1286
|
+
],
|
|
1287
|
+
scripts: {
|
|
1288
|
+
typecheck: "tsc --noEmit"
|
|
1289
|
+
},
|
|
1290
|
+
dependencies: {
|
|
1291
|
+
"@babel/core": "^7.22.11",
|
|
1292
|
+
"@babel/runtime-corejs3": "^7.21.0",
|
|
1293
|
+
"@commercetools-frontend/application-config": "22.37.0",
|
|
1294
|
+
"@commercetools-frontend/constants": "22.37.0",
|
|
1295
|
+
"@commercetools-frontend/l10n": "22.37.0",
|
|
1296
|
+
"@manypkg/find-root": "2.2.3",
|
|
1297
|
+
cac: "^6.7.14",
|
|
1298
|
+
cosmiconfig: "9.0.0",
|
|
1299
|
+
dotenv: "16.4.5",
|
|
1300
|
+
execa: "5.1.1",
|
|
1301
|
+
jsonschema: "^1.4.1",
|
|
1302
|
+
listr2: "8.2.5",
|
|
1303
|
+
lodash: "4.17.21",
|
|
1304
|
+
micromatch: "4.0.8",
|
|
1305
|
+
"node-fetch": "2.7.0",
|
|
1306
|
+
"ts-deepmerge": "7.0.1"
|
|
1307
|
+
},
|
|
1308
|
+
devDependencies: {
|
|
1309
|
+
"@tsconfig/node20": "20.1.4",
|
|
1310
|
+
"@types/lodash": "^4.14.198",
|
|
1311
|
+
"@types/micromatch": "4.0.9",
|
|
1312
|
+
"@types/node": "20.17.13",
|
|
1313
|
+
typescript: "5.2.2"
|
|
1314
|
+
},
|
|
1315
|
+
engines: {
|
|
1316
|
+
node: ">=21",
|
|
1317
|
+
npm: ">=6"
|
|
1318
|
+
},
|
|
1319
|
+
publishConfig: {
|
|
1320
|
+
access: "public"
|
|
1321
|
+
},
|
|
1322
|
+
preconstruct: {
|
|
1323
|
+
entrypoints: [
|
|
1324
|
+
"./cli.ts",
|
|
1325
|
+
"./index.ts"
|
|
1326
|
+
]
|
|
1327
|
+
}
|
|
1328
|
+
};
|
|
1329
|
+
|
|
1330
|
+
const cli = cac.cac('application-cli');
|
|
1331
|
+
const cwd = process.cwd();
|
|
1332
|
+
const run = async () => {
|
|
1333
|
+
cli.option('--build-revision [git-sha]', '(optional) The git commit SHA which is being built.', {
|
|
1334
|
+
default: process.env.CIRCLE_SHA1
|
|
1335
|
+
}).option('--build-number [string]', '(optional) A number of the build on the Continuous Integration system.', {
|
|
1336
|
+
default: process.env.CIRCLE_BUILD_NUM
|
|
1337
|
+
}).option('--package-manager-name [string]', '(optional) Name of the binary of the used package manager (e.g. pnpm).', {
|
|
1338
|
+
default: 'yarn'
|
|
1339
|
+
});
|
|
1340
|
+
|
|
1341
|
+
// Default command
|
|
1342
|
+
cli.command('').usage('\n\n Compile deployments and menus and create versions for MC applications').action(cli.outputHelp);
|
|
1343
|
+
const usageCompileDeployment = 'Compile the deployments for an application for all environments.';
|
|
1344
|
+
cli.command('compile-deployments', usageCompileDeployment).usage(`compile-deployments \n\n ${usageCompileDeployment}`).option('--application-name <string>', '(required) The name of the application being compiled for example application-products.').option('--dotenv-folder [string]', '(optional) The path to a folder containing a dotenv file ".env.production" and a cloud-environment specific dotenv file (for example ".env.gcp-production-eu"). Those values are parsed and merged together to be used by the `mc-scripts compile-html` command.').option('--pr-number [string]', '(optional) A pull request number determining a scoped storage bucket for the deployment. Please use it carefully.').option('--mc-url [string]', '(optional) The MC URL of the deployment. This is usually inferred from the env file and overwrites the value. Please use it carefully.').option('--mc-api-url [string]', '(optional) The MC API URL of the deployment. This is usually inferred from the env file and overwrites the value. Please use it carefully.').option('--application-index-out-file [path]', '(optional) The name of the application index file.', {
|
|
1345
|
+
default: 'application.html'
|
|
1346
|
+
}).option('--application-index-upload-script-out-file [path]', '(optional) The name of the the application index upload script file.', {
|
|
1347
|
+
default: 'upload-index.sh'
|
|
1348
|
+
}).option('--application-assets-upload-script-out-file [path]', '(optional) The name of the the assets upload script file.', {
|
|
1349
|
+
default: 'upload-assets.sh'
|
|
1350
|
+
}).option('--ci-assets-root-path [path]', '(optional) A replacement value for the scripts root path only used on CI (e.g. "--ci-assets-root-path=/root/") used in generated scripts.').option('--skip-menu', '(optional) If provided, it will skip uploading the `menu.json`.', {
|
|
1351
|
+
default: false
|
|
1352
|
+
}).action(async options => {
|
|
1353
|
+
await command$4(options, cwd);
|
|
1354
|
+
});
|
|
1355
|
+
const usageCompileMenu = 'Compile the menu links of an application into a `menu.json`. This is only required for internal applications';
|
|
1356
|
+
cli.command('compile-menu', usageCompileMenu).usage(`compile-menu \n\n ${usageCompileMenu}`).option('--dotenv-folder [string]', '(optional) The path to a folder containing a dotenv file `.env.production` and a cloud-environment specific dotenv file (for example `.env.gcp-production-eu`). Those values are parsed and merged together to be used by the application config.').action(async options => {
|
|
1357
|
+
await command$3(options, cwd);
|
|
1358
|
+
});
|
|
1359
|
+
const usageValidateMenu = 'Validate compiled `menu.json` file';
|
|
1360
|
+
cli.command('validate-menu', usageValidateMenu).usage(`validate-menu \n\n ${usageValidateMenu}`).option('--input-file <path>', '(required) The path to the `menu.json` file to be validated.').option('--navigation [string]', '(optional) Location of the menu navigation. Possible values are `top`.').action(async options => {
|
|
1361
|
+
await command(options);
|
|
1362
|
+
});
|
|
1363
|
+
const usageCreateVersion = 'Output a JSON string about the information in the `version.json` for a deployment, including the updated list of rollbacks.';
|
|
1364
|
+
cli.command('create-version', usageCreateVersion).usage(`create-version \n\n ${usageCreateVersion}`).option('--version-url <url>', "(required) The path of an application's current `version.json` within the storage bucket.").option('--rollbacks [int]', '(optional) The number of max rollbacks to keep', {
|
|
1365
|
+
default: 15
|
|
1366
|
+
}).option('--out-file [path]', '(optional) The path to the file where to write the JSON. If not specified, the JSON is printed to stdout.').action(async options => {
|
|
1367
|
+
await command$2(options);
|
|
1368
|
+
});
|
|
1369
|
+
|
|
1370
|
+
// Command: Evaluate change triggers
|
|
1371
|
+
const usageEvaluateChangeTriggers = 'Evaluates changed files against a base and evaluates them against defined triggers.';
|
|
1372
|
+
cli.command('evaluate-change-triggers', usageEvaluateChangeTriggers).usage(`evaluate-change-triggers \n\n ${usageEvaluateChangeTriggers}`).option('--branch <string>', 'The branch of the pull request', {
|
|
1373
|
+
default: process.env.CIRCLE_BRANCH
|
|
1374
|
+
}).option('--base-branch <string>', 'The base revision of the git commit compare against (e.g. "main")').option('--head-revision <string>', 'The revision of the git head to compare with', {
|
|
1375
|
+
default: process.env.CIRCLE_SHA1
|
|
1376
|
+
}).option('--trigger-name <string>', 'The trigger to evaluate for.').option('--silent', '(optional) Disable logging', {
|
|
1377
|
+
default: false
|
|
1378
|
+
}).option('--out-env-file [string]', '(optional) A file path where the matched triggers are written as a dotenv file.').action(async options => {
|
|
1379
|
+
const config = await storageBucketsConfig.loadConfig('circleci-change-triggers', []);
|
|
1380
|
+
await command$1(options, config, cwd);
|
|
1381
|
+
});
|
|
1382
|
+
cli.help();
|
|
1383
|
+
cli.version(pkgJson.version);
|
|
1384
|
+
cli.parse(process.argv, {
|
|
1385
|
+
run: false
|
|
1386
|
+
});
|
|
1387
|
+
await cli.runMatchedCommand();
|
|
1388
|
+
};
|
|
1389
|
+
|
|
1390
|
+
exports.run = run;
|