@firestartr/cli 1.48.0-snapshot-0 → 1.48.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (26) hide show
  1. package/build/index.js +1060 -756
  2. package/build/packages/catalog_common/index.d.ts +8 -0
  3. package/build/packages/catalog_common/src/io/write.d.ts +2 -2
  4. package/build/packages/catalog_common/src/logger/index.d.ts +2 -0
  5. package/build/packages/catalog_common/src/logger/logger.d.ts +9 -0
  6. package/build/packages/catalog_common/src/logger/utils.d.ts +1 -0
  7. package/build/packages/features_preparer/src/logger.d.ts +9 -0
  8. package/build/packages/features_renderer/index.d.ts +4 -2
  9. package/build/packages/features_renderer/src/auxiliar.d.ts +1 -2
  10. package/build/packages/features_renderer/src/render.d.ts +2 -0
  11. package/build/packages/github/index.d.ts +5 -0
  12. package/build/packages/github/src/check_run.d.ts +83 -0
  13. package/build/packages/github/src/logger.d.ts +9 -0
  14. package/build/packages/operator/src/logger.d.ts +2 -2
  15. package/build/packages/operator/src/user-feedback-ops/gh-checkrun.d.ts +5 -0
  16. package/build/packages/operator/src/user-feedback-ops/tf-checkrun.d.ts +5 -0
  17. package/build/packages/provisioner/src/cdktf.d.ts +3 -1
  18. package/build/packages/provisioner/src/logger.d.ts +9 -0
  19. package/build/packages/provisioner/src/resources/resource.d.ts +10 -0
  20. package/build/packages/provisioner/src/terraform.d.ts +7 -5
  21. package/build/packages/terraform_provisioner/index.d.ts +1 -1
  22. package/build/packages/terraform_provisioner/src/logger.d.ts +9 -0
  23. package/build/packages/terraform_provisioner/src/project_tf.d.ts +4 -0
  24. package/build/packages/terraform_provisioner/src/project_tf_remote.d.ts +4 -0
  25. package/build/packages/terraform_provisioner/src/utils.d.ts +8 -6
  26. package/package.json +1 -1
package/build/index.js CHANGED
@@ -288941,14 +288941,131 @@ var external_path_ = __nccwpck_require__(71017);
288941
288941
  var external_path_default = /*#__PURE__*/__nccwpck_require__.n(external_path_);
288942
288942
  // EXTERNAL MODULE: ../../node_modules/yaml/dist/index.js
288943
288943
  var yaml_dist = __nccwpck_require__(8447);
288944
- // EXTERNAL MODULE: ../../node_modules/debug/src/index.js
288945
- var src = __nccwpck_require__(67984);
288946
- var src_default = /*#__PURE__*/__nccwpck_require__.n(src);
288944
+ // EXTERNAL MODULE: ../../node_modules/winston/lib/winston.js
288945
+ var winston = __nccwpck_require__(66752);
288946
+ var winston_default = /*#__PURE__*/__nccwpck_require__.n(winston);
288947
+ ;// CONCATENATED MODULE: ../catalog_common/src/logger/utils.ts
288948
+ // https://siderite.dev/blog/jsonstringify-with-circular-references.html/#at2011170946
288949
+ function fixCircularReferences(o) {
288950
+ const weirdTypes = [
288951
+ Int8Array,
288952
+ Uint8Array,
288953
+ Uint8ClampedArray,
288954
+ Int16Array,
288955
+ Uint16Array,
288956
+ Int32Array,
288957
+ Uint32Array,
288958
+ BigInt64Array,
288959
+ BigUint64Array,
288960
+ Float32Array,
288961
+ Float64Array,
288962
+ ArrayBuffer,
288963
+ SharedArrayBuffer,
288964
+ DataView,
288965
+ ];
288966
+ const defs = new Map();
288967
+ return (k, v) => {
288968
+ if (k && v === o) {
288969
+ return `[${String(k)} is the same as original object]`;
288970
+ }
288971
+ if (v === undefined || v === null) {
288972
+ return v;
288973
+ }
288974
+ // Check for the Timeout constructor. This will also catch TimersList indirectly
288975
+ // since TimersList is part of the circular structure *of* a Timeout object.
288976
+ if (v && v.constructor && v.constructor.name === 'Timeout') {
288977
+ return '[Node.js internal timer object]';
288978
+ }
288979
+ // An alternative check could be `v instanceof Timeout` but the constructor name
288980
+ // check is more reliable for these internal types.
288981
+ const weirdType = weirdTypes.find((t) => v instanceof t);
288982
+ if (weirdType) {
288983
+ return weirdType.toString();
288984
+ }
288985
+ if (typeof v === 'function') {
288986
+ return v.toString();
288987
+ }
288988
+ if (v && typeof v === 'object') {
288989
+ const def = defs.get(v);
288990
+ if (def) {
288991
+ return `[${String(k)} is the same as ${def}]`;
288992
+ }
288993
+ defs.set(v, String(k));
288994
+ }
288995
+ return v;
288996
+ };
288997
+ }
288998
+
288999
+ ;// CONCATENATED MODULE: ../catalog_common/src/logger/logger.ts
289000
+
289001
+
289002
+ const validLogLevels = [
289003
+ 'error',
289004
+ 'warn',
289005
+ 'info',
289006
+ 'debug',
289007
+ 'verbose',
289008
+ 'silly',
289009
+ ];
289010
+ let initiated = false;
289011
+ let logger = null;
289012
+ // Type guard to check if a value is a valid LogLevel
289013
+ function isValidLogLevel(level) {
289014
+ return (typeof level === 'string' && validLogLevels.includes(level));
289015
+ }
289016
+ function initLogger() {
289017
+ if (initiated)
289018
+ return;
289019
+ const logLevel = process.env.LOG_LEVEL && isValidLogLevel(process.env.LOG_LEVEL)
289020
+ ? process.env.LOG_LEVEL
289021
+ : 'info';
289022
+ logger = winston_default().createLogger({
289023
+ level: logLevel,
289024
+ exitOnError: false,
289025
+ format: winston.format.combine(winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }), winston.format.json()),
289026
+ transports: [
289027
+ new winston.transports.Console({
289028
+ level: logLevel,
289029
+ }),
289030
+ ],
289031
+ });
289032
+ initiated = true;
289033
+ }
289034
+ function doLog(level, args) {
289035
+ initLogger();
289036
+ const [message, data] = args;
289037
+ let finalMessage = message;
289038
+ if (data) {
289039
+ const fx = fixCircularReferences(data.metadata);
289040
+ try {
289041
+ finalMessage =
289042
+ finalMessage + ' | ' + JSON.stringify(data?.metadata, fx, 2);
289043
+ }
289044
+ catch (err) {
289045
+ console.error(`Serializing ${message}: ${err}`);
289046
+ return;
289047
+ }
289048
+ }
289049
+ logger[level].apply(logger, [finalMessage]);
289050
+ }
289051
+ const logger_log = {
289052
+ error: (...args) => doLog('error', args),
289053
+ warn: (...args) => doLog('warn', args),
289054
+ info: (...args) => doLog('info', args),
289055
+ debug: (...args) => doLog('debug', args),
289056
+ verbose: (...args) => doLog('verbose', args),
289057
+ silly: (...args) => doLog('silly', args),
289058
+ };
289059
+ /* harmony default export */ const logger_logger = (logger_log);
289060
+
289061
+ ;// CONCATENATED MODULE: ../catalog_common/src/logger/index.ts
289062
+
289063
+ /* harmony default export */ const src_logger = (logger_logger);
289064
+
288947
289065
  ;// CONCATENATED MODULE: ../catalog_common/src/io/common.ts
288948
289066
 
288949
289067
 
288950
289068
 
288951
- const messageLog = src_default()('firestartr:catalog_common:io:common');
288952
289069
  const ComponentPaths = (/* unused pure expression or super */ null && ([
288953
289070
  'apiVersion',
288954
289071
  'kind',
@@ -289033,25 +289150,25 @@ function transformKind(kind) {
289033
289150
  }
289034
289151
  }
289035
289152
  function getPath(kind, name, catalogPath) {
289036
- messageLog(`Getting path for kind ${kind} and name ${name} in catalog path ${catalogPath}`);
289153
+ src_logger.debug(`Getting path for kind ${kind} and name ${name} in catalog path ${catalogPath}`);
289037
289154
  return external_path_.join(catalogPath, transformKind(kind), name + '.yaml');
289038
289155
  }
289039
289156
  function getKindPath(kind, catalogPath) {
289040
- messageLog(`Getting path for kind ${kind} in catalog path ${catalogPath}`);
289157
+ src_logger.debug(`Getting path for kind ${kind} in catalog path ${catalogPath}`);
289041
289158
  return external_path_.join(catalogPath, transformKind(kind));
289042
289159
  }
289043
289160
  function fromYaml(data) {
289044
289161
  const result = yaml_dist.parse(data);
289045
- messageLog('Loading YAML data: %O', result);
289162
+ src_logger.debug('Loading YAML data: %O', result);
289046
289163
  return result;
289047
289164
  }
289048
289165
  function toYaml(data, opts = {}) {
289049
- messageLog('opts', opts);
289166
+ src_logger.debug('opts', opts);
289050
289167
  const result = yaml_dist.stringify(data);
289051
289168
  return result;
289052
289169
  }
289053
289170
  function dumpYaml(data) {
289054
- messageLog('Dumping object data to YAML %O', data);
289171
+ src_logger.debug('Dumping object data to YAML %O', data);
289055
289172
  return yaml_dist.stringify(data);
289056
289173
  }
289057
289174
 
@@ -289059,7 +289176,6 @@ function dumpYaml(data) {
289059
289176
  var external_child_process_ = __nccwpck_require__(32081);
289060
289177
  ;// CONCATENATED MODULE: ../catalog_common/src/generic/random.ts
289061
289178
 
289062
- const random_messageLog = src_default()('firestartr:catalog_common:generic:random');
289063
289179
  function randomString(length = 10) {
289064
289180
  let result = '';
289065
289181
  const characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
@@ -289069,7 +289185,7 @@ function randomString(length = 10) {
289069
289185
  result += characters.charAt(Math.floor(Math.random() * charactersLength));
289070
289186
  counter += 1;
289071
289187
  }
289072
- random_messageLog('Generated random string %s', result);
289188
+ src_logger.debug(`Generated random string ${result}`);
289073
289189
  return result;
289074
289190
  }
289075
289191
  function shuffleArray(array) {
@@ -289104,17 +289220,16 @@ function shuffleObject(obj, shuffleArrays = false) {
289104
289220
 
289105
289221
 
289106
289222
 
289107
- const clone_catalog_messageLog = src_default()('firestartr:catalog_common:io:clone_catalog');
289108
289223
  function cloneCatalog(catalogPath, dest = _calculateRandomDestination()) {
289109
- clone_catalog_messageLog(`Cloning catalog from ${catalogPath} to ${dest}`);
289224
+ src_logger.info(`Cloning catalog from ${catalogPath} to ${dest}`);
289110
289225
  return new Promise((ok, ko) => {
289111
289226
  (0,external_child_process_.exec)(`cp -a ${catalogPath} ${dest}`, (error, _stdout, _stderr) => {
289112
289227
  if (error) {
289113
- clone_catalog_messageLog(`Error cloning catalog: ${error.message}`);
289228
+ src_logger.error(`Error cloning catalog: ${error.message}`);
289114
289229
  return ko(error.message);
289115
289230
  }
289116
289231
  else {
289117
- clone_catalog_messageLog(`Catalog cloned to successfully to ${dest}`);
289232
+ src_logger.info(`Catalog cloned to successfully to ${dest}`);
289118
289233
  return ok(dest);
289119
289234
  }
289120
289235
  });
@@ -289133,29 +289248,28 @@ var external_fs_default = /*#__PURE__*/__nccwpck_require__.n(external_fs_);
289133
289248
 
289134
289249
 
289135
289250
 
289136
- const write_messageLog = src_default()('firestartr:catalog_common:io:write');
289137
289251
  function writeEntity(entity, path) {
289138
289252
  try {
289139
289253
  entity['metadata']['annotations']['fire-starter.dev/timestamp'] =
289140
289254
  Math.floor(Date.now() / 1000).toString();
289141
289255
  //If we have an status, we remove it
289142
- write_messageLog(`Writing to catalog ${path} entity %O`, entity);
289256
+ src_logger.debug(`Writing to catalog ${path} entity ${entity}`);
289143
289257
  external_fs_.writeFileSync(getPath(entity['kind'], entity['metadata']['name'], path), dumpYaml(entity));
289144
289258
  }
289145
289259
  catch (err) {
289146
- write_messageLog('Error writing entity, error %O', err);
289260
+ src_logger.error(`Error writing entity '${entity.kind}', error ${err}`);
289147
289261
  throw `writeEntity: ${entity.kind} ${err}`;
289148
289262
  }
289149
289263
  }
289150
289264
  function writeClaim(claim, claimsPath) {
289151
289265
  try {
289152
289266
  const kindFolder = `${claim['kind']}s`.toLowerCase().replace('claim', '');
289153
- write_messageLog(`Writing to gitops ${claimsPath}/${kindFolder} claim %O`, claim);
289267
+ src_logger.debug(`Writing to gitops ${claimsPath}/${kindFolder} claim ${claim}`);
289154
289268
  external_fs_.mkdirSync(external_path_.join(claimsPath, kindFolder), { recursive: true });
289155
289269
  external_fs_.writeFileSync(getPathClaim(claim['kind'], claim['name'], claimsPath), dumpYaml(claim));
289156
289270
  }
289157
289271
  catch (err) {
289158
- write_messageLog('Error writing claim, error %O', err);
289272
+ src_logger.error(`Error writing claim, error ${err}`);
289159
289273
  throw `writeClaim: ${claim.kind} ${err}`;
289160
289274
  }
289161
289275
  }
@@ -289172,7 +289286,7 @@ function writeYamlFile(fileName, data, pathFile = '/tmp') {
289172
289286
  external_fs_.writeFileSync(external_path_.join(pathFile, fileName), dumpYaml(data));
289173
289287
  }
289174
289288
  catch (err) {
289175
- write_messageLog('Error writing yaml file, error %O', err);
289289
+ src_logger.error(`Error writing yaml file, error ${err}`);
289176
289290
  throw `writeYamlFile: ${fileName} ${err}`;
289177
289291
  }
289178
289292
  }
@@ -289181,23 +289295,23 @@ function getPathClaim(kind, name, claimsPath) {
289181
289295
  }
289182
289296
  function renameEntity(entity, catalogPath, oldname) {
289183
289297
  try {
289184
- write_messageLog('Renaming oldname %s in %O', oldname, entity);
289298
+ src_logger.debug(`Renaming oldname ${oldname} in ${entity}`);
289185
289299
  const oldPath = getPath(entity.kind, oldname, catalogPath);
289186
289300
  const newPath = getPath(entity.kind, entity.metadata.name, catalogPath);
289187
289301
  external_fs_.renameSync(oldPath, newPath);
289188
289302
  }
289189
289303
  catch (err) {
289190
- write_messageLog('Error writing entity, error %O', err);
289304
+ src_logger.error(`Error writing entity, error ${err}`);
289191
289305
  throw `renameEntity: ${entity.kind} ${err}`;
289192
289306
  }
289193
289307
  }
289194
289308
  function removeEntity(entity, catalogPath) {
289195
289309
  try {
289196
- write_messageLog(`Removing entity ${entity.kind}/${entity.metadata.name} in catalog ${catalogPath}`);
289310
+ src_logger.debug(`Removing entity ${entity.kind}/${entity.metadata.name} in catalog ${catalogPath}`);
289197
289311
  external_fs_.rmSync(getPath(entity.kind, entity.metadata.name, catalogPath));
289198
289312
  }
289199
289313
  catch (err) {
289200
- write_messageLog(`Error removing entity ${entity.kind}/${entity.metadata.name} in catalog ${catalogPath}: ${err}`);
289314
+ src_logger.error(`Error removing entity ${entity.kind}/${entity.metadata.name} in catalog ${catalogPath}: ${err}`);
289201
289315
  throw `removeEntity: ${entity.kind} ${err}`;
289202
289316
  }
289203
289317
  }
@@ -289213,21 +289327,21 @@ function moveFile(oldPath, newPath) {
289213
289327
  external_fs_.cpSync(oldPath, newPath);
289214
289328
  external_fs_.rmSync(oldPath);
289215
289329
  }
289216
- function writeFunctionLog(functionName, log) {
289330
+ function writeFunctionLog(functionName, logStream) {
289217
289331
  try {
289218
- external_fs_.writeFileSync(external_path_.join('/tmp', `${functionName}.${randomString(5)}.log`), log + '\n');
289332
+ external_fs_.writeFileSync(external_path_.join('/tmp', `${functionName}.${randomString(5)}.log`), logStream + '\n');
289219
289333
  }
289220
289334
  catch (err) {
289221
- write_messageLog('Error writing log, error %O', err);
289335
+ src_logger.error(`Error writing log, error ${err}`);
289222
289336
  throw `writeLog: ${functionName} ${err}`;
289223
289337
  }
289224
289338
  }
289225
- function writeLogFile(fileName, log) {
289339
+ function writeLogFile(fileName, logStream) {
289226
289340
  try {
289227
- external_fs_.appendFileSync(external_path_.join('/tmp', fileName + '.log'), log + '\n');
289341
+ external_fs_.appendFileSync(external_path_.join('/tmp', fileName + '.log'), logStream + '\n');
289228
289342
  }
289229
289343
  catch (err) {
289230
- write_messageLog('Error writing log, error %O', err);
289344
+ src_logger.error(`Error writing log, error ${err}`);
289231
289345
  throw `writeLog: ${fileName} ${err}`;
289232
289346
  }
289233
289347
  }
@@ -289237,7 +289351,6 @@ function writeLogFile(fileName, log) {
289237
289351
 
289238
289352
 
289239
289353
 
289240
- const read_messageLog = src_default()('firestartr:catalog_common:io:read');
289241
289354
  function readEntity(kind, name, catalogPaths) {
289242
289355
  try {
289243
289356
  if (typeof catalogPaths === 'string') {
@@ -289246,7 +289359,7 @@ function readEntity(kind, name, catalogPaths) {
289246
289359
  let data = false;
289247
289360
  for (const catalogPath of catalogPaths) {
289248
289361
  try {
289249
- read_messageLog(`Reading entity ${kind}/${name} from catalog ${catalogPath}`);
289362
+ src_logger.debug(`Reading entity ${kind}/${name} from catalog ${catalogPath}`);
289250
289363
  const entityPath = getPath(kind, name, catalogPath);
289251
289364
  if (external_fs_.existsSync(entityPath)) {
289252
289365
  if (data) {
@@ -289256,7 +289369,7 @@ function readEntity(kind, name, catalogPaths) {
289256
289369
  }
289257
289370
  }
289258
289371
  catch (err) {
289259
- read_messageLog('readEntity: cached error %s', err);
289372
+ src_logger.debug('readEntity: cached error %s', err);
289260
289373
  if (err === 'DUPLICATED') {
289261
289374
  throw `Error reading entity: Duplicated ${kind}/${name} in ${catalogPaths.join(', ')}`;
289262
289375
  }
@@ -289268,7 +289381,7 @@ function readEntity(kind, name, catalogPaths) {
289268
289381
  return fromYaml(data);
289269
289382
  }
289270
289383
  catch (err) {
289271
- read_messageLog(err);
289384
+ src_logger.error(err);
289272
289385
  throw `readEntity->: ${kind}/${name}: ${err}`;
289273
289386
  }
289274
289387
  }
@@ -289276,13 +289389,13 @@ function listByKind(kind, catalogPaths, callback, exclude = []) {
289276
289389
  if (typeof catalogPaths === 'string') {
289277
289390
  catalogPaths = [catalogPaths];
289278
289391
  }
289279
- read_messageLog('CATALOGS_PATHS_ %O', catalogPaths);
289392
+ src_logger.debug(`CATALOGS_PATHS_ ${catalogPaths}`);
289280
289393
  const list = [];
289281
289394
  catalogPaths.forEach((catalogPath) => {
289282
289395
  list.push(...external_fs_.readdirSync(getKindPath(kind, catalogPath)));
289283
289396
  });
289284
- read_messageLog('LIST_ %O', list);
289285
- read_messageLog(`Listing entities of kind ${kind} from catalogs`);
289397
+ src_logger.debug(`LIST_ ${list}`);
289398
+ src_logger.debug(`Listing entities of kind ${kind} from catalogs`);
289286
289399
  return list
289287
289400
  .filter((file) => file.match(/\.yaml$/))
289288
289401
  .filter((file) => exclude.indexOf(file.replace(/\.yaml/, '')) === -1)
@@ -290213,6 +290326,9 @@ class CsvWriter {
290213
290326
  }
290214
290327
  /* harmony default export */ const csv_generator = (CsvWriter);
290215
290328
 
290329
+ // EXTERNAL MODULE: ../../node_modules/debug/src/index.js
290330
+ var src = __nccwpck_require__(67984);
290331
+ var src_default = /*#__PURE__*/__nccwpck_require__.n(src);
290216
290332
  ;// CONCATENATED MODULE: ../catalog_common/src/generic/logger.ts
290217
290333
 
290218
290334
 
@@ -290275,9 +290391,8 @@ var lodash_default = /*#__PURE__*/__nccwpck_require__.n(lodash);
290275
290391
 
290276
290392
 
290277
290393
  const { camelCase } = (lodash_default());
290278
- const name_log = src_default()('firestartr:catalog_common:generic:name');
290279
290394
  function normalizeName(name) {
290280
- name_log('Normalizing name %s', name);
290395
+ src_logger.debug(`Normalizing name ${name}`);
290281
290396
  return name.replace(/[^a-z0-9]/gi, '-').toLowerCase();
290282
290397
  }
290283
290398
  function transformKeysToCamelCase(obj) {
@@ -290618,7 +290733,6 @@ const ExternalSecretsApiGroup = 'external-secrets.io';
290618
290733
 
290619
290734
  ;// CONCATENATED MODULE: ../catalog_common/src/environment/index.ts
290620
290735
 
290621
- const environment_messageLog = src_default()('firestartr:catalog_common:environment');
290622
290736
  function getFromEnvironment(envVar) {
290623
290737
  return process.env[envVar];
290624
290738
  }
@@ -290636,7 +290750,7 @@ function getFromEnvironmentAsBoolean(envVar) {
290636
290750
  }
290637
290751
  function checkExistOnEnvironment(envVar) {
290638
290752
  const environmentValue = getFromEnvironment(envVar);
290639
- environment_messageLog(`Checking if environment variable ${envVar} exists: ${environmentValue}`);
290753
+ src_logger.debug(`Checking if environment variable ${envVar} exists: ${environmentValue}`);
290640
290754
  if (!environmentValue || environmentValue === '') {
290641
290755
  return false;
290642
290756
  }
@@ -290682,30 +290796,29 @@ const fullMembersTeam = getFromEnvironmentWithDefault(envVars.fullOrgGroup, `${o
290682
290796
  ;// CONCATENATED MODULE: ../catalog_common/src/features/tarballs.ts
290683
290797
 
290684
290798
 
290685
- const tarballs_messageLog = src_default()('firestartr:catalog_common:features:tarballs');
290686
290799
  function getFeatureZipDownloadPath(featureName, version, owner, repo) {
290687
290800
  const featureDownloadPath = `/tmp/${basicFeaturePath(featureName, version, owner, repo)}-zipball.zip`;
290688
- tarballs_messageLog('Feature tarball download path %s', featureDownloadPath);
290801
+ src_logger.debug(`Feature tarball download path ${featureDownloadPath}`);
290689
290802
  return featureDownloadPath;
290690
290803
  }
290691
290804
  function removeFeatureTarball(featureName, version, owner, repo) {
290692
290805
  const featurePath = getFeatureZipDownloadPath(featureName, version, owner, repo);
290693
- tarballs_messageLog('Removing feature tarball %s', featurePath);
290806
+ src_logger.debug(`Removing feature tarball ${featurePath}`);
290694
290807
  external_fs_.unlinkSync(featurePath);
290695
- tarballs_messageLog(`Removed tarball for feature ${featureName} and version ${version}: ${featurePath}`);
290808
+ src_logger.debug(`Removed tarball for feature ${featureName} and version ${version}: ${featurePath}`);
290696
290809
  }
290697
290810
  function featureTarballExists(featureName, version, owner, repo) {
290698
290811
  const featurePath = getFeatureZipDownloadPath(featureName, version, owner, repo);
290699
290812
  const exists = external_fs_.existsSync(featurePath);
290700
- tarballs_messageLog(`Tarball ${featurePath} exists? ${exists}`);
290813
+ src_logger.debug(`Tarball ${featurePath} exists? ${exists}`);
290701
290814
  return exists;
290702
290815
  }
290703
290816
  function getFeaturesExtractPath(featureName, version, owner, repo, options = {}) {
290704
290817
  const { createIfNotExists } = options;
290705
290818
  const extractPath = `/tmp/${basicFeaturePath(featureName, version, owner, repo)}-extract`;
290706
- tarballs_messageLog('Extract path %s', extractPath);
290819
+ src_logger.debug(`Extract path ${extractPath}`);
290707
290820
  if (createIfNotExists && !external_fs_.existsSync(extractPath)) {
290708
- tarballs_messageLog('Extract path %s does not exist, creating', extractPath);
290821
+ src_logger.debug(`Extract path ${extractPath} does not exist, creating`);
290709
290822
  external_fs_.mkdirSync(extractPath, { recursive: true });
290710
290823
  }
290711
290824
  return extractPath;
@@ -290722,17 +290835,16 @@ function trasformLeg(leg) {
290722
290835
 
290723
290836
 
290724
290837
 
290725
- const features_io_messageLog = src_default()('firestartr:catalog_common:features:features_io');
290726
290838
  function getFeatureRenderedPathForEntity(entity, featureName, basePath = '/tmp') {
290727
290839
  const entityFolderName = `${entity.metadata.name}`.toLowerCase();
290728
290840
  return external_path_default().join(basePath, entityFolderName, featureName);
290729
290841
  }
290730
290842
  function getFeatureRenderedConfigForComponent(entity, featureName, basePath = '/tmp/features') {
290731
- features_io_messageLog('Getting rendered config for component %s and feature %s', entity.name, featureName);
290843
+ src_logger.info(`Getting rendered config for component ${entity.name}and feature ${featureName}`);
290732
290844
  const workdir = getFeatureRenderedPathForEntity(entity, featureName, basePath);
290733
290845
  const config = JSON.parse(external_fs_.readFileSync(`${workdir}/output.json`, { encoding: 'utf8' }));
290734
- features_io_messageLog('Feature output: %O', config);
290735
- features_io_messageLog(`Rendered feature ${featureName} for component ${entity.name}. Result: ${(JSON.stringify, config)}`);
290846
+ src_logger.debug(`Feature output: ${config}`);
290847
+ src_logger.debug(`Rendered feature ${featureName} for component ${entity.name}. Result: ${(JSON.stringify, config)}`);
290736
290848
  return config;
290737
290849
  }
290738
290850
 
@@ -290746,7 +290858,6 @@ function getFeatureRenderedConfigForComponent(entity, featureName, basePath = '/
290746
290858
 
290747
290859
  ;// CONCATENATED MODULE: ../catalog_common/src/policies/policies.ts
290748
290860
 
290749
- const policies_log = src_default()('firestartr:catalog_common:policies');
290750
290861
  const FIRESTARTR_POLICIES = [
290751
290862
  {
290752
290863
  name: 'full-control',
@@ -290785,17 +290896,17 @@ function getPolicyByName(policyName) {
290785
290896
  return FIRESTARTR_POLICIES.find((p) => p.name === policyName || p.aliases.includes(policyName));
290786
290897
  }
290787
290898
  function policiesAreCompatible(syncPolicy, generalPolicy) {
290788
- policies_log('Validating policy compatibility: %s %s', syncPolicy, generalPolicy);
290899
+ src_logger.debug('Validating policy compatibility: %s %s', syncPolicy, generalPolicy);
290789
290900
  const syncPolicyWeight = getPolicyByName(syncPolicy)?.weight;
290790
290901
  const generalPolicyWeight = getPolicyByName(generalPolicy)?.weight;
290791
290902
  if (!syncPolicyWeight || !generalPolicyWeight) {
290792
290903
  throw new Error(`Policy ${syncPolicy} or ${generalPolicy} not found`);
290793
290904
  }
290794
290905
  if (generalPolicyWeight >= syncPolicyWeight) {
290795
- policies_log('Policies %s %s are compatible', syncPolicy, generalPolicy);
290906
+ src_logger.debug('Policies %s %s are compatible', syncPolicy, generalPolicy);
290796
290907
  return true;
290797
290908
  }
290798
- policies_log('Policies %s %s are not compatible', syncPolicy, generalPolicy);
290909
+ src_logger.debug('Policies %s %s are not compatible', syncPolicy, generalPolicy);
290799
290910
  return false;
290800
290911
  }
290801
290912
 
@@ -290815,6 +290926,7 @@ function policiesAreCompatible(syncPolicy, generalPolicy) {
290815
290926
 
290816
290927
 
290817
290928
 
290929
+
290818
290930
  /* harmony default export */ const catalog_common = ({
290819
290931
  io: io,
290820
290932
  generic: generic,
@@ -290823,6 +290935,7 @@ function policiesAreCompatible(syncPolicy, generalPolicy) {
290823
290935
  defaults: defaults,
290824
290936
  features: features,
290825
290937
  policies: policies,
290938
+ logger: logger_logger,
290826
290939
  });
290827
290940
 
290828
290941
  ;// CONCATENATED MODULE: ../../node_modules/universal-user-agent/index.js
@@ -297369,13 +297482,16 @@ async function getOctokitFromPat(envVar) {
297369
297482
  }
297370
297483
  /* harmony default export */ const src_auth = ({ getOctokitForOrg });
297371
297484
 
297485
+ ;// CONCATENATED MODULE: ../github/src/logger.ts
297486
+
297487
+ /* harmony default export */ const github_src_logger = (catalog_common.logger);
297488
+
297372
297489
  ;// CONCATENATED MODULE: ../github/src/organization.ts
297373
297490
 
297374
297491
 
297375
- const organization_messageLog = src_default()('firestartr:github:organization');
297376
297492
  const defaultPerPage = 100;
297377
297493
  async function getRepositoryList(org, perPageEntries = defaultPerPage) {
297378
- organization_messageLog(`Getting repository list for ${org} with ${perPageEntries} entries per page`);
297494
+ github_src_logger.info(`Getting repository list for ${org} with ${perPageEntries} entries per page`);
297379
297495
  const octokit = await getOctokitForOrg(org);
297380
297496
  const options = octokit.repos.listForOrg.endpoint.merge({
297381
297497
  org: org,
@@ -297385,7 +297501,7 @@ async function getRepositoryList(org, perPageEntries = defaultPerPage) {
297385
297501
  return await doPaginatedRequest(options);
297386
297502
  }
297387
297503
  async function getTeamList(org, perPageEntries = defaultPerPage) {
297388
- organization_messageLog(`Getting team list for ${org} with ${perPageEntries} entries per page`);
297504
+ github_src_logger.info(`Getting team list for ${org} with ${perPageEntries} entries per page`);
297389
297505
  const octokit = await getOctokitForOrg(org);
297390
297506
  const options = octokit.rest.teams.list.endpoint.merge({
297391
297507
  org: org,
@@ -297394,7 +297510,7 @@ async function getTeamList(org, perPageEntries = defaultPerPage) {
297394
297510
  return await doPaginatedRequest(options);
297395
297511
  }
297396
297512
  async function getUserList(org, perPageEntries = defaultPerPage) {
297397
- organization_messageLog(`Getting user list for ${org} with ${perPageEntries} entries per page`);
297513
+ github_src_logger.info(`Getting user list for ${org} with ${perPageEntries} entries per page`);
297398
297514
  const octokit = await getOctokitForOrg(org);
297399
297515
  const options = await octokit.rest.orgs.listMembers.endpoint.merge({
297400
297516
  org: org,
@@ -297403,7 +297519,7 @@ async function getUserList(org, perPageEntries = defaultPerPage) {
297403
297519
  return await doPaginatedRequest(options);
297404
297520
  }
297405
297521
  async function validateMember(username, org) {
297406
- organization_messageLog(`Validating ${username} is a member of ${org}`);
297522
+ github_src_logger.debug(`Validating ${username} is a member of ${org}`);
297407
297523
  const octokit = await getOctokitForOrg(org);
297408
297524
  const result = await octokit.orgs.checkMembershipForUser({
297409
297525
  org: org,
@@ -297412,7 +297528,7 @@ async function validateMember(username, org) {
297412
297528
  return result;
297413
297529
  }
297414
297530
  async function getUserRoleInOrg(username, org) {
297415
- organization_messageLog(`Getting user ${username} role in ${org}`);
297531
+ github_src_logger.info(`Getting user ${username} role in ${org}`);
297416
297532
  const octokit = await getOctokitForOrg(org);
297417
297533
  const membership = await octokit.orgs.getMembershipForUser({
297418
297534
  org: org,
@@ -297421,13 +297537,13 @@ async function getUserRoleInOrg(username, org) {
297421
297537
  return membership.data.role;
297422
297538
  }
297423
297539
  async function getOrgInfo(org) {
297424
- organization_messageLog(`Getting info for org ${org}`);
297540
+ github_src_logger.info(`Getting info for org ${org}`);
297425
297541
  const octokit = await getOctokitForOrg(org);
297426
297542
  const orgInfo = await octokit.orgs.get({ org });
297427
297543
  return orgInfo.data;
297428
297544
  }
297429
297545
  async function getOrgPlanName(org) {
297430
- organization_messageLog(`Getting plan for org ${org}`);
297546
+ github_src_logger.info(`Getting plan for org ${org}`);
297431
297547
  const orgInfo = await getOrgInfo(org);
297432
297548
  return orgInfo.plan.name;
297433
297549
  }
@@ -297451,9 +297567,8 @@ async function doPaginatedRequest(options) {
297451
297567
 
297452
297568
 
297453
297569
 
297454
- const repository_messageLog = src_default()('firestartr:github:repository');
297455
297570
  async function listReleases(repo, owner = 'prefapp') {
297456
- repository_messageLog(`Getting releases for ${owner}/${repo}`);
297571
+ github_src_logger.info(`Getting releases for ${owner}/${repo}`);
297457
297572
  const octokit = await getOctokitForOrg(owner);
297458
297573
  const response = await octokit.rest.repos.listReleases({
297459
297574
  owner,
@@ -297464,7 +297579,7 @@ async function listReleases(repo, owner = 'prefapp') {
297464
297579
  return response.data;
297465
297580
  }
297466
297581
  async function getReleaseByTag(releaseTag, repo, owner = 'prefapp') {
297467
- repository_messageLog(`Getting release ${releaseTag} for ${owner}/${repo}`);
297582
+ github_src_logger.info(`Getting release ${releaseTag} for ${owner}/${repo}`);
297468
297583
  const octokit = await getOctokitForOrg(owner);
297469
297584
  const response = await octokit.rest.repos.getReleaseByTag({
297470
297585
  owner,
@@ -297479,7 +297594,7 @@ async function getFileFromGithub(path, repo, owner = 'prefapp') {
297479
297594
  return await octokit.rest.repos.getContent({ owner, repo, path });
297480
297595
  }
297481
297596
  async function getContent(path, repo, owner = 'prefapp', ref = '') {
297482
- repository_messageLog(`Getting content for ${owner}/${repo}/${path}`);
297597
+ github_src_logger.info(`Getting content for ${owner}/${repo}/${path}`);
297483
297598
  const octokit = await getOctokitForOrg(owner);
297484
297599
  const opts = {
297485
297600
  owner,
@@ -297493,19 +297608,19 @@ async function getContent(path, repo, owner = 'prefapp', ref = '') {
297493
297608
  return Buffer.from(content.data.content, 'base64').toString('utf8');
297494
297609
  }
297495
297610
  async function getRepoInfo(owner, name) {
297496
- repository_messageLog(`Getting repo info for ${owner}/${name}`);
297611
+ github_src_logger.info(`Getting repo info for ${owner}/${name}`);
297497
297612
  const octokit = await getOctokitForOrg(owner);
297498
297613
  const res = await octokit.repos.get({ owner: owner, repo: name });
297499
297614
  return res['data'];
297500
297615
  }
297501
297616
  async function getPages(owner, name) {
297502
- repository_messageLog(`Getting pages for ${owner}/${name}`);
297617
+ github_src_logger.info(`Getting pages for ${owner}/${name}`);
297503
297618
  const octokit = await getOctokitForOrg(owner);
297504
297619
  const res = await octokit.repos.getPages({ owner: owner, repo: name });
297505
297620
  return res['data'];
297506
297621
  }
297507
297622
  async function getOIDCRepo(owner, name) {
297508
- repository_messageLog(`Getting repo info for ${owner}/${name}`);
297623
+ github_src_logger.info(`Getting repo info for ${owner}/${name}`);
297509
297624
  const octokit = await getOctokitForOrg(owner);
297510
297625
  return await octokit.request(`GET /repos/${owner}/${name}/actions/oidc/customization/sub`, {
297511
297626
  owner: owner,
@@ -297516,7 +297631,7 @@ async function getOIDCRepo(owner, name) {
297516
297631
  });
297517
297632
  }
297518
297633
  async function getBranchProtection(owner, repo, branch = 'main') {
297519
- repository_messageLog(`Getting branch protection for ${owner}/${repo}/${branch}`);
297634
+ github_src_logger.info(`Getting branch protection for ${owner}/${repo}/${branch}`);
297520
297635
  const octokit = await getOctokitForOrg(owner);
297521
297636
  const res = await octokit.repos.getBranchProtection({
297522
297637
  owner: owner,
@@ -297526,13 +297641,13 @@ async function getBranchProtection(owner, repo, branch = 'main') {
297526
297641
  return res['data'];
297527
297642
  }
297528
297643
  async function getTeams(owner, repo) {
297529
- repository_messageLog(`Getting teams for ${owner}/${repo}`);
297644
+ github_src_logger.info(`Getting teams for ${owner}/${repo}`);
297530
297645
  const octokit = await getOctokitForOrg(owner);
297531
297646
  const res = await octokit.repos.listTeams({ owner: owner, repo: repo });
297532
297647
  return res['data'];
297533
297648
  }
297534
297649
  async function getCollaborators(owner, repo, affiliation = 'direct') {
297535
- repository_messageLog(`Getting collaborators for ${owner}/${repo}`);
297650
+ github_src_logger.info(`Getting collaborators for ${owner}/${repo}`);
297536
297651
  const octokit = await getOctokitForOrg(owner);
297537
297652
  const res = await octokit.repos.listCollaborators({
297538
297653
  owner: owner,
@@ -297543,7 +297658,7 @@ async function getCollaborators(owner, repo, affiliation = 'direct') {
297543
297658
  }
297544
297659
  async function setContent(path, fileContent, repo, owner = 'prefapp', branch = 'main', message = '') {
297545
297660
  const base64Content = Buffer.from(fileContent, 'utf8').toString('base64');
297546
- repository_messageLog(`Setting content for ${owner}/${repo}/${path}`);
297661
+ github_src_logger.info(`Setting content for ${owner}/${repo}/${path}`);
297547
297662
  if (message === '') {
297548
297663
  message = `Update ${path}`;
297549
297664
  }
@@ -297551,10 +297666,10 @@ async function setContent(path, fileContent, repo, owner = 'prefapp', branch = '
297551
297666
  try {
297552
297667
  const currentContent = await getFileFromGithub(path, repo, owner);
297553
297668
  sha = currentContent.data.sha;
297554
- repository_messageLog('File already exists, updating it');
297669
+ github_src_logger.debug('File already exists, updating it');
297555
297670
  }
297556
297671
  catch {
297557
- repository_messageLog('File does not exists, creating it');
297672
+ github_src_logger.debug('File does not exist, creating it');
297558
297673
  }
297559
297674
  const octokit = await getOctokitForOrg(owner);
297560
297675
  await octokit.rest.repos.createOrUpdateFileContents({
@@ -297569,7 +297684,7 @@ async function setContent(path, fileContent, repo, owner = 'prefapp', branch = '
297569
297684
  }
297570
297685
  async function uploadFile(destinationPath, filePath, repo, owner = 'prefapp', branch = 'main', message = '') {
297571
297686
  if (!external_fs_.existsSync(filePath)) {
297572
- repository_messageLog(`File ${filePath} does not exists or is not readable`);
297687
+ github_src_logger.error(`File ${filePath} does not exists or is not readable`);
297573
297688
  throw `${filePath} does not exists or is not readable`;
297574
297689
  }
297575
297690
  // Read file contents and call setContent
@@ -297578,16 +297693,16 @@ async function uploadFile(destinationPath, filePath, repo, owner = 'prefapp', br
297578
297693
  }
297579
297694
  async function deleteFile(path, repo, owner = 'prefapp', branch = 'main', message = '') {
297580
297695
  let sha = undefined;
297581
- repository_messageLog(`Deleting file ${owner}/${repo}/${path}`);
297696
+ github_src_logger.info(`Deleting file ${owner}/${repo}/${path}`);
297582
297697
  try {
297583
297698
  const currentContent = await getFileFromGithub(path, repo, owner);
297584
297699
  sha = currentContent.data.sha;
297585
297700
  }
297586
297701
  catch {
297587
- repository_messageLog(`File ${path} does not exist in ${repo}`);
297702
+ github_src_logger.error(`File ${path} does not exist in ${repo}`);
297588
297703
  }
297589
297704
  if (!sha) {
297590
- repository_messageLog(`File ${path} does not exist in ${repo}`);
297705
+ github_src_logger.error(`File ${path} does not exist in ${repo}`);
297591
297706
  throw `File ${path} does not exist in ${repo}`;
297592
297707
  }
297593
297708
  if (message === '') {
@@ -297604,7 +297719,7 @@ async function deleteFile(path, repo, owner = 'prefapp', branch = 'main', messag
297604
297719
  });
297605
297720
  }
297606
297721
  async function addStatusCheck(output, is_failure, head_sha, name, status, repo, owner = 'prefapp') {
297607
- repository_messageLog(`Adding status checks to commit ${head_sha} in ${owner}/${repo}`);
297722
+ github_src_logger.info(`Adding status checks to commit ${head_sha} in ${owner}/${repo}`);
297608
297723
  const octokit = await getOctokitForOrg(owner);
297609
297724
  const payload = { output, head_sha, name, owner, repo, status };
297610
297725
  if (status === 'completed') {
@@ -297613,7 +297728,7 @@ async function addStatusCheck(output, is_failure, head_sha, name, status, repo,
297613
297728
  await octokit.rest.checks.create(payload);
297614
297729
  }
297615
297730
  async function addCommitStatus(state, sha, repo, owner = 'prefapp', target_url = '', description = '', context = '') {
297616
- repository_messageLog(`Adding commit status with state ${state} to SHA ${sha} in ${owner}/${repo}`);
297731
+ github_src_logger.info(`Adding commit status with state ${state} to SHA ${sha} in ${owner}/${repo}`);
297617
297732
  const octokit = await getOctokitForOrg(owner);
297618
297733
  await octokit.rest.repos.createCommitStatus({
297619
297734
  owner,
@@ -297645,9 +297760,8 @@ async function addCommitStatus(state, sha, repo, owner = 'prefapp', target_url =
297645
297760
  ;// CONCATENATED MODULE: ../github/src/team.ts
297646
297761
 
297647
297762
 
297648
- const team_messageLog = src_default()('firestartr:github:team');
297649
297763
  async function getTeamMembers(team, org) {
297650
- team_messageLog(`Getting members for ${org}/${team}`);
297764
+ github_src_logger.info(`Getting members for ${org}/${team}`);
297651
297765
  const octokit = await getOctokitForOrg(org);
297652
297766
  const res = await octokit.rest.teams.listMembersInOrg({
297653
297767
  org: org,
@@ -297656,13 +297770,13 @@ async function getTeamMembers(team, org) {
297656
297770
  return res['data'];
297657
297771
  }
297658
297772
  async function getTeamInfo(team, org) {
297659
- team_messageLog(`Getting info for ${org}/${team}`);
297773
+ github_src_logger.info(`Getting info for ${org}/${team}`);
297660
297774
  const octokit = await getOctokitForOrg(org);
297661
297775
  const res = await octokit.rest.teams.getByName({ org: org, team_slug: team });
297662
297776
  return res['data'];
297663
297777
  }
297664
297778
  async function getTeamRoleUser(org, team, username) {
297665
- team_messageLog(`Getting role for ${username} in ${org}/${team}`);
297779
+ github_src_logger.info(`Getting role for ${username} in ${org}/${team}`);
297666
297780
  const octokit = await getOctokitForOrg(org);
297667
297781
  const res = await octokit.rest.teams.getMembershipForUserInOrg({
297668
297782
  org: org,
@@ -297672,7 +297786,7 @@ async function getTeamRoleUser(org, team, username) {
297672
297786
  return res['data'];
297673
297787
  }
297674
297788
  async function create(org, team, privacy = 'closed') {
297675
- team_messageLog(`Creating team ${org}/${team}`);
297789
+ github_src_logger.info(`Creating team ${org}/${team}`);
297676
297790
  const octokit = await getOctokitForOrg(org);
297677
297791
  return await octokit.rest.teams.create({
297678
297792
  org: org,
@@ -297681,7 +297795,7 @@ async function create(org, team, privacy = 'closed') {
297681
297795
  });
297682
297796
  }
297683
297797
  async function addOrUpdateMember(org, team, username, role = 'member') {
297684
- team_messageLog(`Adding or updating ${username} in ${org}/${team} with role ${role}`);
297798
+ github_src_logger.info(`Adding or updating ${username} in ${org}/${team} with role ${role}`);
297685
297799
  const octokit = await getOctokitForOrg(org);
297686
297800
  return await octokit.rest.teams.addOrUpdateMembershipForUserInOrg({
297687
297801
  org: org,
@@ -297691,7 +297805,7 @@ async function addOrUpdateMember(org, team, username, role = 'member') {
297691
297805
  });
297692
297806
  }
297693
297807
  async function removeMember(org, team, username) {
297694
- team_messageLog(`Removing ${username} from ${org}/${team}`);
297808
+ github_src_logger.info(`Removing ${username} from ${org}/${team}`);
297695
297809
  const octokit = await getOctokitForOrg(org);
297696
297810
  return await octokit.teams.removeMembershipForUserInOrg({
297697
297811
  org: org,
@@ -297711,9 +297825,8 @@ async function removeMember(org, team, username) {
297711
297825
  ;// CONCATENATED MODULE: ../github/src/user.ts
297712
297826
 
297713
297827
 
297714
- const user_messageLog = src_default()('firestartr:github:user');
297715
297828
  async function getUserInfo(name) {
297716
- user_messageLog(`Getting user ${name} info`);
297829
+ github_src_logger.info(`Getting user ${name} info`);
297717
297830
  const octokit = await getOctokitForOrg(name);
297718
297831
  return await octokit.users.getByUsername({ username: name });
297719
297832
  }
@@ -297724,11 +297837,10 @@ async function getUserInfo(name) {
297724
297837
  ;// CONCATENATED MODULE: ../github/src/pull_request.ts
297725
297838
 
297726
297839
 
297727
- const pull_request_messageLog = src_default()('firestartr:github:pull_request');
297728
297840
  const commentMaxSize = 65535;
297729
297841
  async function commentInPR(comment, pr_number, repo, owner = 'prefapp') {
297730
297842
  try {
297731
- pull_request_messageLog(`Commenting ${comment} in PR ${pr_number} of ${owner}/${repo}`);
297843
+ github_src_logger.info(`Commenting ${comment} in PR ${pr_number} of ${owner}/${repo}`);
297732
297844
  const octokit = await getOctokitForOrg(owner);
297733
297845
  await octokit.rest.issues.createComment({
297734
297846
  owner,
@@ -297747,12 +297859,12 @@ async function getPrData(pull_number, repo, owner) {
297747
297859
  return await octokit.rest.pulls.get({ owner, repo, pull_number });
297748
297860
  }
297749
297861
  async function getPrLastCommitSHA(pull_number, repo, owner = 'prefapp') {
297750
- pull_request_messageLog(`Getting last commit SHA for PR ${pull_number} of ${owner}/${repo}`);
297862
+ github_src_logger.info(`Getting last commit SHA for PR ${pull_number} of ${owner}/${repo}`);
297751
297863
  const prData = await getPrData(pull_number, repo, owner);
297752
297864
  return prData.data.head.sha;
297753
297865
  }
297754
297866
  async function getPrMergeCommitSHA(pull_number, repo, owner = 'prefapp') {
297755
- pull_request_messageLog(`Getting merge commit SHA for PR ${pull_number} of ${owner}/${repo}`);
297867
+ github_src_logger.info(`Getting merge commit SHA for PR ${pull_number} of ${owner}/${repo}`);
297756
297868
  const prData = await getPrData(pull_number, repo, owner);
297757
297869
  if (prData.data.merge_commit_sha !== null) {
297758
297870
  return prData.data.merge_commit_sha;
@@ -297790,7 +297902,7 @@ function divideCommentIntoChunks(comment, sizeReduction = 0) {
297790
297902
  return result;
297791
297903
  }
297792
297904
  async function getPrFiles(pr_number, repo, owner = 'prefapp') {
297793
- pull_request_messageLog(`Getting PR details of PR ${pr_number} of ${owner}/${repo}`);
297905
+ github_src_logger.info(`Getting PR details of PR ${pr_number} of ${owner}/${repo}`);
297794
297906
  const octokit = await getOctokitForOrg(owner);
297795
297907
  return await octokit.rest.pulls.listFiles({
297796
297908
  owner,
@@ -297832,9 +297944,8 @@ async function filterPrBy(filter, opts) {
297832
297944
  ;// CONCATENATED MODULE: ../github/src/issues.ts
297833
297945
 
297834
297946
 
297835
- const issues_log = src_default()('firestartr:github:issues');
297836
297947
  async function issues_create(owner, repo, title, body, labels = []) {
297837
- issues_log(`Creating issue in ${owner}/${repo}`);
297948
+ github_src_logger.info(`Creating issue in ${owner}/${repo}`);
297838
297949
  const octokit = await getOctokitForOrg(owner);
297839
297950
  return await octokit.rest.issues.create({
297840
297951
  owner,
@@ -297845,7 +297956,7 @@ async function issues_create(owner, repo, title, body, labels = []) {
297845
297956
  });
297846
297957
  }
297847
297958
  async function update(owner, repo, issue_number, title, body, labels = []) {
297848
- issues_log(`Updating issue ${issue_number} in ${owner}/${repo}`);
297959
+ github_src_logger.info(`Updating issue ${issue_number} in ${owner}/${repo}`);
297849
297960
  const octokit = await getOctokitForOrg(owner);
297850
297961
  return await octokit.rest.issues.update({
297851
297962
  owner,
@@ -297857,7 +297968,7 @@ async function update(owner, repo, issue_number, title, body, labels = []) {
297857
297968
  });
297858
297969
  }
297859
297970
  async function filterBy(owner, repo, title, labels, state = 'open', creator = undefined, assignee = undefined) {
297860
- issues_log(`Filtering issues by title in ${owner}/${repo}`);
297971
+ github_src_logger.info(`Filtering issues by title in ${owner}/${repo}`);
297861
297972
  const octokit = await getOctokitForOrg(owner);
297862
297973
  const resp = await octokit.rest.issues.listForRepo({
297863
297974
  owner,
@@ -297872,7 +297983,7 @@ async function filterBy(owner, repo, title, labels, state = 'open', creator = un
297872
297983
  return resp.data.filter((issue) => issue.title.includes(title));
297873
297984
  }
297874
297985
  async function upsertByTitle(owner, repo, title, body, labels = []) {
297875
- issues_log(`Upserting issue by title in ${owner}/${repo}`);
297986
+ github_src_logger.info(`Upserting issue by title in ${owner}/${repo}`);
297876
297987
  const foundIssues = await filterBy(owner, repo, title, labels.join(','));
297877
297988
  if (foundIssues.length > 0) {
297878
297989
  return update(owner, repo, foundIssues[0].number, title, body, labels);
@@ -297882,7 +297993,7 @@ async function upsertByTitle(owner, repo, title, body, labels = []) {
297882
297993
  }
297883
297994
  }
297884
297995
  async function issues_close(owner, repo, issue_number) {
297885
- issues_log(`Closing issue ${issue_number} in ${owner}/${repo}`);
297996
+ github_src_logger.info(`Closing issue ${issue_number} in ${owner}/${repo}`);
297886
297997
  const octokit = await getOctokitForOrg(owner);
297887
297998
  return await octokit.rest.issues.update({
297888
297999
  owner,
@@ -297902,10 +298013,9 @@ async function issues_close(owner, repo, issue_number) {
297902
298013
  ;// CONCATENATED MODULE: ../github/src/branches.ts
297903
298014
 
297904
298015
 
297905
- const branches_messageLog = src_default()('firestartr:github:branches');
297906
298016
  const SHA1_EMPTY_TREE = '4b825dc642cb6eb9a060e54bf8d69288fbee4904';
297907
298017
  async function listBranches(repo, owner = 'prefapp') {
297908
- branches_messageLog(`Getting branches for ${owner}/${repo}`);
298018
+ github_src_logger.info(`Getting branches for ${owner}/${repo}`);
297909
298019
  const octokit = await getOctokitForOrg(owner);
297910
298020
  const response = await octokit.rest.repos.listBranches({
297911
298021
  owner,
@@ -297916,7 +298026,7 @@ async function listBranches(repo, owner = 'prefapp') {
297916
298026
  return response.data;
297917
298027
  }
297918
298028
  async function getBranch(repo, branch, owner = 'prefapp') {
297919
- branches_messageLog(`Getting branch ${branch} for ${owner}/${repo}`);
298029
+ github_src_logger.info(`Getting branch ${branch} for ${owner}/${repo}`);
297920
298030
  const octokit = await getOctokitForOrg(owner);
297921
298031
  const response = await octokit.rest.repos.getBranch({
297922
298032
  owner,
@@ -297926,7 +298036,7 @@ async function getBranch(repo, branch, owner = 'prefapp') {
297926
298036
  return response.data;
297927
298037
  }
297928
298038
  async function createBranch(repo, branch, sha, owner = 'prefapp') {
297929
- branches_messageLog(`Creating branch ${branch} for ${owner}/${repo}`);
298039
+ github_src_logger.info(`Creating branch ${branch} for ${owner}/${repo}`);
297930
298040
  const octokit = await getOctokitForOrg(owner);
297931
298041
  const response = await octokit.rest.git.createRef({
297932
298042
  owner,
@@ -297937,7 +298047,7 @@ async function createBranch(repo, branch, sha, owner = 'prefapp') {
297937
298047
  return response.data;
297938
298048
  }
297939
298049
  async function createOrphanBranch(repo, branch, owner = 'prefapp') {
297940
- branches_messageLog(`Creating orphan branch ${branch} for ${owner}/${repo}`);
298050
+ github_src_logger.info(`Creating orphan branch ${branch} for ${owner}/${repo}`);
297941
298051
  const octokit = await getOctokitForOrg(owner);
297942
298052
  // Create a commit with an empty tree
297943
298053
  const { data: commit } = await octokit.request('POST /repos/{owner}/{repo}/git/commits', {
@@ -297963,6 +298073,268 @@ async function createOrphanBranch(repo, branch, owner = 'prefapp') {
297963
298073
  createOrphanBranch,
297964
298074
  });
297965
298075
 
298076
+ ;// CONCATENATED MODULE: ../github/src/check_run.ts
298077
+
298078
+
298079
+ const FLUSH_TIMEOUT = 4; // seconds
298080
+ const GITHUB_OUTPUT_TEXT_LIMIT = 65000; // ~65k hard limit for output.text
298081
+ /**
298082
+ * Streams text updates to a callback on a fixed cadence, with a size-triggered early flush.
298083
+ * Does NOT clear content on flush (so the consumer can send the full, current log each time).
298084
+ */
298085
+ class CheckRunBuffer {
298086
+ constructor(initial = '', onFlush, opts) {
298087
+ this.content = initial;
298088
+ this.updated = Boolean(initial);
298089
+ this.onFlush = onFlush;
298090
+ this.flushIntervalMs = (opts?.intervalSec ?? FLUSH_TIMEOUT) * 1000;
298091
+ this.timer = setInterval(() => {
298092
+ this.flush();
298093
+ }, this.flushIntervalMs);
298094
+ if (initial)
298095
+ this.flush();
298096
+ }
298097
+ stop() {
298098
+ if (this.timer !== null) {
298099
+ clearInterval(this.timer);
298100
+ this.timer = null;
298101
+ }
298102
+ }
298103
+ update(data = '') {
298104
+ if (!data)
298105
+ return;
298106
+ this.content += data;
298107
+ this.updated = true;
298108
+ }
298109
+ flush() {
298110
+ if (!this.updated)
298111
+ return;
298112
+ try {
298113
+ this.onFlush(this.content);
298114
+ }
298115
+ finally {
298116
+ this.updated = false;
298117
+ }
298118
+ }
298119
+ snapshot() {
298120
+ return this.content;
298121
+ }
298122
+ }
298123
+ class GithubCheckRun {
298124
+ constructor(octokit, params) {
298125
+ this.hasCommented = false;
298126
+ this.closing = false;
298127
+ this.closed = false;
298128
+ this.lastStatus = 'in_progress';
298129
+ this.detailsFormatter = (s) => s;
298130
+ this.octokit = octokit;
298131
+ this.owner = params.owner;
298132
+ this.repo = params.repo;
298133
+ this.headSHA = params.headSHA;
298134
+ this.name = params.name;
298135
+ this.detailsUrl = params.detailsUrl;
298136
+ this.title = params.title ?? params.name;
298137
+ if (params.summary)
298138
+ this._summaryOverride = params.summary;
298139
+ this.pullNumber = params.pullNumber;
298140
+ this.includeCheckRunComment = Boolean(params.includeCheckRunComment);
298141
+ this.checkRunComment = params.checkRunComment;
298142
+ this.buffer = new CheckRunBuffer('', (data) => this.__updateCheckRun(data).catch(() => { }), { intervalSec: FLUSH_TIMEOUT });
298143
+ }
298144
+ /**
298145
+ * Configure markdown formatting for the details (output.text).
298146
+ * Example: ch.mdOptionsDetails({ quotes: 'terraform' })
298147
+ * Result:
298148
+ * ```terraform
298149
+ * <log>
298150
+ * ```
298151
+ */
298152
+ mdOptionsDetails(opts) {
298153
+ const lang = (opts?.quotes ?? '').trim();
298154
+ if (!lang) {
298155
+ this.detailsFormatter = (s) => s;
298156
+ return;
298157
+ }
298158
+ const fenceOpen = '```' + lang + '\n';
298159
+ const fenceClose = '\n```';
298160
+ const overhead = fenceOpen.length + fenceClose.length;
298161
+ this.detailsFormatter = (body) => {
298162
+ const maxBody = Math.max(0, GITHUB_OUTPUT_TEXT_LIMIT - overhead);
298163
+ const safeBody = body.length > maxBody ? truncateRight(body, maxBody) : body;
298164
+ return fenceOpen + safeBody + fenceClose;
298165
+ };
298166
+ }
298167
+ set summary(data) {
298168
+ this._summaryOverride = data;
298169
+ // Push an immediate update if already created and not closed.
298170
+ if (!this.closed && this.checkRunId) {
298171
+ // do not mutate buffer flags; just send current snapshot using new summary
298172
+ this.__updateCheckRun(this.buffer.snapshot()).catch(() => { });
298173
+ }
298174
+ }
298175
+ get summary() {
298176
+ return this._summaryOverride;
298177
+ }
298178
+ /**
298179
+ * Append log text and optionally set status ('queued' | 'in_progress').
298180
+ */
298181
+ update(text, status) {
298182
+ if (this.closed)
298183
+ return;
298184
+ if (status)
298185
+ this.lastStatus = status;
298186
+ if (text)
298187
+ this.buffer.update(text);
298188
+ }
298189
+ /**
298190
+ * Finalize the check with a conclusion. Flushes buffered text, marks completed.
298191
+ */
298192
+ async close(finalText, ok) {
298193
+ if (this.closed || this.closing)
298194
+ return;
298195
+ this.closing = true;
298196
+ this.buffer.stop();
298197
+ const finalContent = this.buffer.snapshot() + (finalText || '');
298198
+ try {
298199
+ await this.__ensureCreated();
298200
+ const { text, summary } = this.buildOutputTextAndSummary(finalContent);
298201
+ await this.octokit.rest.checks.update({
298202
+ owner: this.owner,
298203
+ repo: this.repo,
298204
+ check_run_id: this.checkRunId,
298205
+ conclusion: ok ? 'success' : 'failure',
298206
+ completed_at: new Date().toISOString(),
298207
+ output: {
298208
+ title: this.title,
298209
+ summary,
298210
+ text,
298211
+ },
298212
+ });
298213
+ this.closed = true;
298214
+ }
298215
+ finally {
298216
+ this.closing = false;
298217
+ }
298218
+ }
298219
+ // -------------------- Internals --------------------
298220
+ async __ensureCreated() {
298221
+ if (this.checkRunId)
298222
+ return;
298223
+ const startedAt = new Date().toISOString();
298224
+ const res = await this.octokit.rest.checks.create({
298225
+ owner: this.owner,
298226
+ repo: this.repo,
298227
+ name: this.name,
298228
+ head_sha: this.headSHA,
298229
+ status: 'in_progress',
298230
+ started_at: startedAt,
298231
+ details_url: this.detailsUrl,
298232
+ output: {
298233
+ title: this.title,
298234
+ summary: this._summaryOverride ?? '',
298235
+ text: undefined,
298236
+ },
298237
+ });
298238
+ this.checkRunId = res.data.id;
298239
+ if (this.includeCheckRunComment &&
298240
+ this.pullNumber !== undefined &&
298241
+ !this.hasCommented) {
298242
+ const link = this.__buildCheckRunUrl();
298243
+ const formattedLink = `[here](${link})`;
298244
+ const base = this.checkRunComment ?? '';
298245
+ const body = base ? `${base}${formattedLink}` : formattedLink;
298246
+ await this.octokit.rest.issues.createComment({
298247
+ owner: this.owner,
298248
+ repo: this.repo,
298249
+ issue_number: this.pullNumber,
298250
+ body,
298251
+ });
298252
+ this.hasCommented = true;
298253
+ }
298254
+ }
298255
+ async __updateCheckRun(allContent) {
298256
+ if (this.closed || this.closing)
298257
+ return;
298258
+ await this.__ensureCreated();
298259
+ const { text, summary } = this.buildOutputTextAndSummary(allContent);
298260
+ await this.octokit.rest.checks.update({
298261
+ owner: this.owner,
298262
+ repo: this.repo,
298263
+ check_run_id: this.checkRunId,
298264
+ status: this.lastStatus,
298265
+ output: {
298266
+ title: this.title,
298267
+ summary,
298268
+ text,
298269
+ },
298270
+ });
298271
+ }
298272
+ __buildCheckRunUrl() {
298273
+ if (this.checkRunId) {
298274
+ return `https://github.com/${this.owner}/${this.repo}/runs/${this.checkRunId}?check_suite_focus=true`;
298275
+ }
298276
+ return `https://github.com/${this.owner}/${this.repo}/commit/${this.headSHA}/checks?check_suite_focus=true`;
298277
+ }
298278
+ buildOutputTextAndSummary(full) {
298279
+ if (!full) {
298280
+ return {
298281
+ text: undefined,
298282
+ summary: this._summaryOverride ?? '',
298283
+ };
298284
+ }
298285
+ let text = this.detailsFormatter(full);
298286
+ let truncated = false;
298287
+ if (text.length > GITHUB_OUTPUT_TEXT_LIMIT) {
298288
+ text = truncateRight(text, GITHUB_OUTPUT_TEXT_LIMIT);
298289
+ truncated = true;
298290
+ }
298291
+ else {
298292
+ truncated = text.length < full.length;
298293
+ }
298294
+ let summary = this._summaryOverride ?? '';
298295
+ if (this._summaryOverride && truncated) {
298296
+ summary = `${summary}\n\n... (log truncated to ~${GITHUB_OUTPUT_TEXT_LIMIT.toLocaleString()} chars)`;
298297
+ }
298298
+ return { text, summary };
298299
+ }
298300
+ }
298301
+ // -------------------- Helpers --------------------
298302
+ function truncateRight(s, max) {
298303
+ if (s.length <= max)
298304
+ return s;
298305
+ const HARD = Math.max(0, max - 3);
298306
+ return s.slice(0, HARD) + '...';
298307
+ }
298308
+ /**
298309
+ * Factory: build a GithubCheckRun using an installation token for the given org.
298310
+ */
298311
+ async function createCheckRunForOrg(org, owner, repo, name, opts) {
298312
+ const octokit = await getOctokitForOrg(org);
298313
+ let headSHA = opts?.headSHA;
298314
+ if (!headSHA && typeof opts?.pullNumber === 'number') {
298315
+ headSHA = await getPrMergeCommitSHA(opts.pullNumber, repo, owner);
298316
+ }
298317
+ if (!headSHA) {
298318
+ throw new Error('createCheckRunForOrg: either opts.headSHA or opts.pullNumber must be provided');
298319
+ }
298320
+ return new GithubCheckRun(octokit, {
298321
+ owner,
298322
+ repo,
298323
+ headSHA,
298324
+ name,
298325
+ detailsUrl: opts?.detailsUrl,
298326
+ title: opts?.title,
298327
+ summary: opts?.summary,
298328
+ pullNumber: opts?.pullNumber,
298329
+ includeCheckRunComment: Boolean(opts?.includeCheckRunComment),
298330
+ checkRunComment: opts?.checkRunComment,
298331
+ });
298332
+ }
298333
+ async function createCheckRun(owner, repo, name, opts) {
298334
+ return createCheckRunForOrg(owner, owner, repo, name, opts);
298335
+ }
298336
+ const CheckRun = GithubCheckRun;
298337
+
297966
298338
  ;// CONCATENATED MODULE: ../github/index.ts
297967
298339
 
297968
298340
 
@@ -297973,6 +298345,7 @@ async function createOrphanBranch(repo, branch, owner = 'prefapp') {
297973
298345
 
297974
298346
 
297975
298347
 
298348
+
297976
298349
  /* harmony default export */ const github_0 = ({
297977
298350
  org: organization,
297978
298351
  repo: repository,
@@ -297986,6 +298359,10 @@ async function createOrphanBranch(repo, branch, owner = 'prefapp') {
297986
298359
  pulls: pull_request,
297987
298360
  issues: issues,
297988
298361
  branches: branches,
298362
+ feedback: {
298363
+ createCheckRun: createCheckRun,
298364
+ CheckRun: CheckRun,
298365
+ },
297989
298366
  });
297990
298367
 
297991
298368
  ;// CONCATENATED MODULE: ../cdk8s_renderer/src/patches/base.ts
@@ -301506,14 +301883,14 @@ async function loadSchema(schemaURI) {
301506
301883
 
301507
301884
 
301508
301885
  const ajv = new (_2020_default())({ useDefaults: true });
301509
- let initiated = false;
301886
+ let validation_initiated = false;
301510
301887
  const validations = {};
301511
301888
  function prepareValidation(schemaId) {
301512
- if (!initiated)
301889
+ if (!validation_initiated)
301513
301890
  ajv.addSchema(base_schemas.schemas);
301514
301891
  if (!validations[schemaId])
301515
301892
  validations[schemaId] = ajv.getSchema(schemaId);
301516
- initiated = true;
301893
+ validation_initiated = true;
301517
301894
  return validations[schemaId];
301518
301895
  }
301519
301896
  function validateClaim(data, schemaId = 'firestartr.dev://common/ClaimEnvelope') {
@@ -303147,7 +303524,7 @@ function addTraceabilityStamp(context, content) {
303147
303524
 
303148
303525
 
303149
303526
 
303150
- const render_messageLog = src_default()('firestartr:features_renderer');
303527
+ const messageLog = src_default()('firestartr:features_renderer');
303151
303528
  function render(featurePath, featureRenderPath, entity, firestartrConfig = {}, featureArgs = {}) {
303152
303529
  const configData = validate_validate(featurePath);
303153
303530
  const context = buildContext(entity, configData.args, firestartrConfig, featureArgs);
@@ -303158,7 +303535,7 @@ function render(featurePath, featureRenderPath, entity, firestartrConfig = {}, f
303158
303535
  // For now let's keep upgradeable flag for backward compatibility
303159
303536
  // by default it's false
303160
303537
  const userManaged = file.user_managed ?? file.upgradeable ?? false;
303161
- render_messageLog(`Rendering ${src} to ${dest}`);
303538
+ messageLog(`Rendering ${src} to ${dest}`);
303162
303539
  // render the content of the file
303163
303540
  const content = addTraceability(context, src, renderContent(external_fs_default().readFileSync(external_path_default().join(featurePath, 'templates', src)).toString(), context));
303164
303541
  const destFilePath = external_path_default().join(`${featureRenderPath}`, dest);
@@ -303321,7 +303698,7 @@ function loadYaml(file) {
303321
303698
  throw new Error(`Failed to parse YAML "${file}": ${msg}`);
303322
303699
  }
303323
303700
  }
303324
- function ensureSafeTmpName(name) {
303701
+ function ensureSafeTmpNames(name) {
303325
303702
  if (typeof name !== 'string' || !name.trim()) {
303326
303703
  throw new Error('Test "name" must be a non-empty string');
303327
303704
  }
@@ -303334,16 +303711,15 @@ function ensureSafeTmpName(name) {
303334
303711
  if (name.includes('..')) {
303335
303712
  throw new Error('Test "name" must not contain ".."');
303336
303713
  }
303337
- if (/[/\\]/.test(name)) {
303338
- throw new Error('Test "name" must not contain path separators (/ or \\)');
303339
- }
303340
303714
  if (!/^[A-Za-z0-9._-]+$/.test(name)) {
303341
303715
  throw new Error('Test "name" may only contain letters, numbers, ".", "_", or "-"');
303342
303716
  }
303343
303717
  }
303344
- async function mkNamedTmp(name) {
303345
- ensureSafeTmpName(name);
303346
- const dir = external_node_path_.join(external_node_os_namespaceObject.tmpdir(), name);
303718
+ async function mkNamedTmp(...names) {
303719
+ for (const name of names) {
303720
+ ensureSafeTmpNames(name);
303721
+ }
303722
+ const dir = external_node_path_.join(external_node_os_namespaceObject.tmpdir(), ...names);
303347
303723
  await promises_namespaceObject.rm(dir, { recursive: true, force: true });
303348
303724
  await promises_namespaceObject.mkdir(dir, { recursive: true });
303349
303725
  return dir;
@@ -303354,7 +303730,7 @@ async function mkTmp(prefix = 'feature-render-') {
303354
303730
  function buildExpectedOutput(config, renderDir) {
303355
303731
  const files = (config.files || []).map((f) => ({
303356
303732
  localPath: external_node_path_.join(renderDir, f.dest),
303357
- repoPath: f.src,
303733
+ repoPath: f.dest,
303358
303734
  userManaged: f.user_managed,
303359
303735
  }));
303360
303736
  return {
@@ -303432,18 +303808,23 @@ function updateFileContent(featureRenderPath, filePath, content) {
303432
303808
  render: render,
303433
303809
  updateFileContent: updateFileContent,
303434
303810
  auxiliar: auxiliar,
303811
+ buildContext: buildContext,
303812
+ renderContent: renderContent,
303435
303813
  });
303436
303814
 
303815
+ ;// CONCATENATED MODULE: ../features_preparer/src/logger.ts
303816
+
303817
+ /* harmony default export */ const features_preparer_src_logger = (catalog_common.logger);
303818
+
303437
303819
  ;// CONCATENATED MODULE: ../features_preparer/src/renderer.ts
303438
303820
 
303439
303821
 
303440
303822
 
303441
303823
 
303442
- const renderer_messageLog = src_default()('firestartr:features_preparer:renderer');
303443
303824
  function renderFeature(featureName, version, owner, repo, featureOwner, renderPath = '/tmp', featureArgs = {}) {
303444
303825
  const extractPath = external_path_default().join(catalog_common.features.tarballs.getFeaturesExtractPath(featureName, version, owner, repo), 'packages', featureName);
303445
303826
  const renderedPath = catalog_common.features.features.getFeatureRenderedPathForEntity(featureOwner, featureName, renderPath);
303446
- renderer_messageLog(`Rendering feature ${featureName} to ${renderedPath} with component ${JSON.stringify(featureOwner)}`);
303827
+ features_preparer_src_logger.info(`Rendering feature ${featureName} to ${renderedPath} with component ${JSON.stringify(featureOwner)}`);
303447
303828
  return features_renderer.render(extractPath, renderedPath, featureOwner, {}, featureArgs);
303448
303829
  }
303449
303830
 
@@ -303482,7 +303863,6 @@ async function downloadZipBall(url, filePath) {
303482
303863
 
303483
303864
 
303484
303865
 
303485
- const installer_log = src_default()('firestartr:features_preparer:installer');
303486
303866
  async function getFeatureConfigFromRef(featureName, featureRef, featureOwner, // -> cr
303487
303867
  featureArgs = {}, repo = 'features', owner = 'prefapp') {
303488
303868
  // reference is the featureRef directly
@@ -303513,12 +303893,12 @@ async function prepareFeature(featureName, version, repo = 'features', owner = '
303513
303893
  async function downloadFeatureZip(repo, featureName, reference, owner = 'prefapp') {
303514
303894
  try {
303515
303895
  const zipballExtractPath = catalog_common.features.tarballs.getFeaturesExtractPath(featureName, reference, owner, repo, { createIfNotExists: false });
303516
- console.log(`Zipball extract path: ${zipballExtractPath}`);
303896
+ features_preparer_src_logger.debug(`Zipball extract path: ${zipballExtractPath}`);
303517
303897
  if (external_fs_.existsSync(zipballExtractPath)) {
303518
- console.log(`Zipball extract path ${zipballExtractPath} already exists, reusing it.`);
303898
+ features_preparer_src_logger.debug(`Zipball extract path ${zipballExtractPath} already exists, reusing it.`);
303519
303899
  return zipballExtractPath;
303520
303900
  }
303521
- installer_log(`Feature ${[featureName, reference, owner, repo].join('-')} has not been downloaded yet, downloading`);
303901
+ features_preparer_src_logger.info(`Feature ${[featureName, reference, owner, repo].join('-')} has not been downloaded yet, downloading`);
303522
303902
  const octokit = await github_0.getOctokitForOrg(owner);
303523
303903
  const response = await octokit.request('GET /repos/{owner}/{repo}/zipball/{reference}', {
303524
303904
  request: {
@@ -303529,28 +303909,28 @@ async function downloadFeatureZip(repo, featureName, reference, owner = 'prefapp
303529
303909
  reference,
303530
303910
  });
303531
303911
  const randomZipTmpPath = `/tmp/${catalog_common.generic.randomString(20)}.zip`;
303532
- console.log(`Downloading feature ${featureName} version ${reference} to ${randomZipTmpPath}`);
303912
+ features_preparer_src_logger.info(`Downloading feature ${featureName} version ${reference} to ${randomZipTmpPath}`);
303533
303913
  if (external_fs_.existsSync(randomZipTmpPath)) {
303534
- console.log(`Temporary zip file ${randomZipTmpPath} already exists, removing it.`);
303914
+ features_preparer_src_logger.debug(`Temporary zip file ${randomZipTmpPath} already exists, removing it.`);
303535
303915
  external_fs_.unlinkSync(randomZipTmpPath);
303536
303916
  }
303537
303917
  const randomExtractPath = `/tmp/${catalog_common.generic.randomString(20)}`;
303538
- console.log(`Extracting feature ${featureName} version ${reference} to ${randomExtractPath}`);
303918
+ features_preparer_src_logger.debug(`Extracting feature ${featureName} version ${reference} to ${randomExtractPath}`);
303539
303919
  external_fs_.rmSync(randomExtractPath, { recursive: true, force: true });
303540
303920
  await downloadZipBall(response.url, randomZipTmpPath);
303541
303921
  const zip = new (adm_zip_default())(randomZipTmpPath);
303542
303922
  const mainEntry = zip.getEntries()[0].entryName;
303543
- console.log(`Main entry in zip: ${mainEntry}`);
303544
- console.log(`Extracting zip to ${randomExtractPath}`);
303923
+ features_preparer_src_logger.debug(`Main entry in zip: ${mainEntry}`);
303924
+ features_preparer_src_logger.debug(`Extracting zip to ${randomExtractPath}`);
303545
303925
  zip.extractAllTo(randomExtractPath, true);
303546
- console.log(`Renaming main entry ${mainEntry} to ${zipballExtractPath}`);
303926
+ features_preparer_src_logger.debug(`Renaming main entry ${mainEntry} to ${zipballExtractPath}`);
303547
303927
  external_fs_.renameSync(`${randomExtractPath}/${mainEntry}`, zipballExtractPath);
303548
- console.log(`Removing temporary zip file ${randomZipTmpPath}`);
303928
+ features_preparer_src_logger.debug(`Removing temporary zip file ${randomZipTmpPath}`);
303549
303929
  external_fs_.unlinkSync(randomZipTmpPath);
303550
303930
  return zipballExtractPath;
303551
303931
  }
303552
303932
  catch (error) {
303553
- console.error(error);
303933
+ features_preparer_src_logger.error(`Error on prepare feature with tag ${reference}: ${error}`);
303554
303934
  throw new Error(`Error for feature with tag ${reference}: ${error}. GitHub response: ${error}`);
303555
303935
  }
303556
303936
  }
@@ -309139,60 +309519,9 @@ const scaffoldSubcommand = {
309139
309519
 
309140
309520
  // EXTERNAL MODULE: ../../node_modules/@kubernetes/client-node/dist/index.js
309141
309521
  var client_node_dist = __nccwpck_require__(54851);
309142
- // EXTERNAL MODULE: ../../node_modules/winston/lib/winston.js
309143
- var winston = __nccwpck_require__(66752);
309144
- var winston_default = /*#__PURE__*/__nccwpck_require__.n(winston);
309145
309522
  ;// CONCATENATED MODULE: ../operator/src/logger.ts
309146
309523
 
309147
- const validLogLevels = [
309148
- 'error',
309149
- 'warn',
309150
- 'info',
309151
- 'debug',
309152
- 'verbose',
309153
- 'silly',
309154
- ];
309155
- let logger_initiated = false;
309156
- let logger = null;
309157
- // Type guard to check if a value is a valid LogLevel
309158
- function isValidLogLevel(level) {
309159
- return (typeof level === 'string' && validLogLevels.includes(level));
309160
- }
309161
- function initLogger() {
309162
- if (logger_initiated)
309163
- return;
309164
- const logLevel = process.env.LOG_LEVEL && isValidLogLevel(process.env.LOG_LEVEL)
309165
- ? process.env.LOG_LEVEL
309166
- : 'info';
309167
- logger = winston_default().createLogger({
309168
- level: logLevel,
309169
- exitOnError: false,
309170
- format: winston.format.combine(winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }), winston.format.json()),
309171
- transports: [
309172
- new winston.transports.Console({
309173
- level: logLevel,
309174
- }),
309175
- ],
309176
- });
309177
- logger_initiated = true;
309178
- }
309179
- function doLog(level, args) {
309180
- initLogger();
309181
- const [message, metadata] = args;
309182
- // eslint-disable-next-line prefer-spread
309183
- logger[level].apply(logger, [
309184
- message + ' | ' + JSON.stringify({ ...metadata }),
309185
- ]);
309186
- }
309187
- const logger_log = {
309188
- error: (...args) => doLog('error', args),
309189
- warn: (...args) => doLog('warn', args),
309190
- info: (...args) => doLog('info', args),
309191
- debug: (...args) => doLog('debug', args),
309192
- verbose: (...args) => doLog('verbose', args),
309193
- silly: (...args) => doLog('silly', args),
309194
- };
309195
- /* harmony default export */ const src_logger = (logger_log);
309524
+ /* harmony default export */ const operator_src_logger = (catalog_common.logger);
309196
309525
 
309197
309526
  ;// CONCATENATED MODULE: ../operator/src/store.ts
309198
309527
 
@@ -309203,13 +309532,7 @@ class Store {
309203
309532
  this.kind = kind;
309204
309533
  }
309205
309534
  add(item) {
309206
- src_logger.debug('STORE_ADD_ITEM', {
309207
- metadata: {
309208
- name: item.metadata.name,
309209
- kind: this.kind,
309210
- namespace: item.metadata.namespace,
309211
- },
309212
- });
309535
+ operator_src_logger.debug(`Added item '${item.metadata.name}' of kind '${this.kind}' to the store in namespace '${item.metadata.namespace}'`);
309213
309536
  this.store[itemPath(this.kind, item)] = {
309214
309537
  item,
309215
309538
  };
@@ -309218,13 +309541,7 @@ class Store {
309218
309541
  return 'deletionTimestamp' in item.metadata;
309219
309542
  }
309220
309543
  markToDelete(item) {
309221
- src_logger.debug('STORE_MARKED_ITEM_TO_DELETE', {
309222
- metadata: {
309223
- name: item.metadata.name,
309224
- kind: this.kind,
309225
- namespace: item.metadata.namespace,
309226
- },
309227
- });
309544
+ operator_src_logger.debug(`Marked item '${item.metadata.name}' of kind '${this.kind}' for deletion in namespace '${item.metadata.namespace}'`);
309228
309545
  this.store[itemPath(this.kind, item)] = {
309229
309546
  item,
309230
309547
  markedToDelete: true,
@@ -309257,24 +309574,11 @@ class Store {
309257
309574
  item,
309258
309575
  };
309259
309576
  if (updated)
309260
- src_logger.debug('STORE_ITEM_MODIFIED', {
309261
- metadata: {
309262
- name: item.metadata.name,
309263
- kind: this.kind,
309264
- namespace: item.metadata.namespace,
309265
- patches,
309266
- },
309267
- });
309577
+ operator_src_logger.debug(`Modified item '${item.metadata.name}' of kind '${this.kind}' in namespace '${item.metadata.namespace}' with patches ${JSON.stringify(patches)}`);
309268
309578
  return updated;
309269
309579
  }
309270
309580
  remove(item) {
309271
- src_logger.debug('STORE_ITEM_REMOVED', {
309272
- metadata: {
309273
- name: item.metadata.name,
309274
- kind: this.kind,
309275
- namespace: item.metadata.namespace,
309276
- },
309277
- });
309581
+ operator_src_logger.debug(`Removed item '${item.metadata.name}' of kind '${this.kind}' from namespace '${item.metadata.namespace}'`);
309278
309582
  delete this.store[itemPath(this.kind, item)];
309279
309583
  }
309280
309584
  getItem(item) {
@@ -309321,7 +309625,7 @@ async function getItem(kind, namespace, item) {
309321
309625
  }
309322
309626
  async function getItemByItemPath(itemPath, apiGroup = catalog_common.types.controller.FirestartrApiGroup, apiVersion = 'v1') {
309323
309627
  try {
309324
- src_logger.debug('CTL_GET_ITEM', { metadata: { itemPath } });
309628
+ operator_src_logger.debug(`The ctl is getting the item at '${itemPath}'.`);
309325
309629
  const { kc, opts } = await ctl_getConnection();
309326
309630
  opts.headers['Content-Type'] = 'application/json';
309327
309631
  opts.headers['Accept'] = 'application/json';
@@ -309331,14 +309635,14 @@ async function getItemByItemPath(itemPath, apiGroup = catalog_common.types.contr
309331
309635
  const r = await fetch(url, { method: 'get', headers: opts.headers });
309332
309636
  if (!r.ok) {
309333
309637
  const err = new Error(`Error on getItemByItemPath: ${itemPath}: ${r.statusText}`);
309334
- console.log(err.stack);
309638
+ operator_src_logger.error(`Error on getItemByItemPath: ${itemPath}: ${r.statusText}`);
309335
309639
  throw err;
309336
309640
  }
309337
309641
  const jsonResponse = await r.json();
309338
309642
  return jsonResponse;
309339
309643
  }
309340
309644
  catch (e) {
309341
- console.dir(e, { depth: null });
309645
+ operator_src_logger.error(`Error on getItemByItemPath: ${e}`);
309342
309646
  throw e;
309343
309647
  }
309344
309648
  }
@@ -309358,17 +309662,15 @@ async function writeManifest(kind, namespace, item, apiSlug) {
309358
309662
  return jsonResponse;
309359
309663
  }
309360
309664
  function writeSecret(secret, namespace) {
309361
- log.debug('CTL_WRITE_SECRET', {
309362
- metadata: { namespace, name: secret.metadata.name },
309363
- });
309665
+ log.debug(`The ctl is writing the secret '${secret.metadata.name}' in namespace '${namespace}'.`);
309364
309666
  return writeManifest('secrets', namespace, secret, `api/v1/namespaces/${namespace}/secrets/${secret.metadata.name}`);
309365
309667
  }
309366
309668
  async function writeStatus(kind, namespace, item) {
309367
- src_logger.debug('CTL_WRITE_STATUS', { metadata: { item } });
309669
+ operator_src_logger.debug(`The ctl is writing the status for item '${item.kind}/${item.metadata.name}' in namespace '${item.metadata.namespace}'.`);
309368
309670
  return await writeManifest(kind, namespace, item, `apis/firestartr.dev/v1/namespaces/${namespace}/${kind}/${item.metadata.name}/status`);
309369
309671
  }
309370
309672
  function writeFinalizer(kind, namespace, item) {
309371
- log.debug('CTL_WRITE_FINALIZER', { metadata: { item } });
309673
+ log.debug(`The ctl is writing the status for item '${item.kind}/${item.metadata.name}' in namespace '${item.metadata.namespace}'.`);
309372
309674
  return writeManifest(kind, namespace, item, `apis/firestartr.dev/v1/namespaces/${namespace}/${kind}/${item.metadata.name}/metadata/finalizers`);
309373
309675
  }
309374
309676
  async function listItems(kind, namespace, kc, opts) {
@@ -309383,7 +309685,7 @@ async function listItems(kind, namespace, kc, opts) {
309383
309685
  return await r.json();
309384
309686
  }
309385
309687
  catch (err) {
309386
- console.dir(err);
309688
+ log.error(`On listItems: ${err}`);
309387
309689
  throw err;
309388
309690
  }
309389
309691
  }
@@ -309410,15 +309712,10 @@ async function* observeList(kind, namespace, revision, kc, opts) {
309410
309712
  }
309411
309713
  catch (err) {
309412
309714
  if (err instanceof TypeError) {
309413
- log.error('CTL_OBSERVE_LIST_ERROR_CHUNKS', {
309414
- metadata: { namespace, kind, revision, error: err },
309415
- });
309715
+ log.error(`The ctl encountered an error while listing chunks for '${kind}' with revision '${revision}' in namespace '${namespace}': '${err}'.`);
309416
309716
  }
309417
309717
  else {
309418
- log.error('CTL_OBSERVE_LIST_UNKNOWN_ERROR_CHUNKS', {
309419
- metadata: { namespace, kind, revision, error: err },
309420
- });
309421
- console.error(err);
309718
+ log.error(`The ctl encountered an unknown error while listing chunks for '${kind}' with revision '${revision}' in namespace '${namespace}': '${err}'.`);
309422
309719
  }
309423
309720
  }
309424
309721
  }
@@ -309441,7 +309738,7 @@ async function ctl_getConnection() {
309441
309738
  return { kc, opts };
309442
309739
  }
309443
309740
  catch (err) {
309444
- console.dir(err, { depth: null });
309741
+ operator_src_logger.error(`getConnection: ${err}`);
309445
309742
  throw err;
309446
309743
  }
309447
309744
  }
@@ -309500,9 +309797,7 @@ async function deleteSecret(secretName, namespace) {
309500
309797
  }
309501
309798
  catch (e) {
309502
309799
  if (e && e.code === 404) {
309503
- src_logger.error('CTL_DELETE_SECRET_NOT_FOUND', {
309504
- metadata: { secretName, namespace },
309505
- });
309800
+ operator_src_logger.error(`The ctl failed to delete the secret '${secretName}' in namespace '${namespace}' because it was not found.`);
309506
309801
  return null;
309507
309802
  }
309508
309803
  else {
@@ -309536,9 +309831,7 @@ async function getSecret(namespace, secretName) {
309536
309831
  }
309537
309832
  catch (e) {
309538
309833
  if (e.response && e.response.statusCode === 404) {
309539
- src_logger.error('CTL_SECRET_NOT_FOUND', {
309540
- metadata: { secretName, namespace },
309541
- });
309834
+ operator_src_logger.error(`The ctl could not find the secret '${secretName}' in namespace '${namespace}'.`);
309542
309835
  return null;
309543
309836
  }
309544
309837
  else {
@@ -309569,13 +309862,7 @@ async function getTFResult(namespace, item) {
309569
309862
  * @param {any} item - Object to check if has been renamed
309570
309863
  */
309571
309864
  async function checkIfRenamed(namespace, item) {
309572
- log.debug('CTL_CHECK_IF_RENAMED', {
309573
- metadata: {
309574
- kind: item.kind,
309575
- name: item.metadata.name,
309576
- namespace,
309577
- },
309578
- });
309865
+ log.debug(`The ctl is checking if item '${item.kind}/${item.metadata.name}' in namespace '${namespace}' has been renamed.`);
309579
309866
  const oldName = item.metadata?.labels?.[common.types.controller.FirestartrLabelOldName];
309580
309867
  // If the item does not have firestartr.dev/old-name label, it has not been renamed
309581
309868
  if (!oldName)
@@ -309591,9 +309878,7 @@ async function checkIfRenamed(namespace, item) {
309591
309878
  });
309592
309879
  if (!r.ok) {
309593
309880
  if (r.status === 404) {
309594
- log.debug('CTL_CHECK_IF_RENAMED_OLDNAME_NOT_FOUND', {
309595
- metadata: { kind: item.kind, name: item.metadata.name, namespace },
309596
- });
309881
+ log.debug(`The ctl is checking for a rename of item '${item.kind}/${item.metadata.name}' in namespace '${namespace}', but the old item name was not found.`);
309597
309882
  return false;
309598
309883
  }
309599
309884
  }
@@ -309601,21 +309886,17 @@ async function checkIfRenamed(namespace, item) {
309601
309886
  return true;
309602
309887
  }
309603
309888
  catch (err) {
309604
- console.log(err);
309889
+ log.debug(err);
309605
309890
  return false;
309606
309891
  }
309607
309892
  }
309608
309893
  async function upsertFinalizer(kind, namespace, item, finalizer) {
309609
309894
  if ('finalizers' in item.metadata &&
309610
309895
  item.metadata.finalizers.includes(finalizer)) {
309611
- src_logger.debug('CTL_UPSERT_FINALIZER_ALREADY_SET', {
309612
- metadata: { finalizer, kind, name: item.metadata.name, namespace },
309613
- });
309896
+ operator_src_logger.debug(`The ctl tried to upsert the finalizer '${finalizer}' for '${kind}/${item.metadata.name}' in namespace '${namespace}', but it was already set.`);
309614
309897
  return;
309615
309898
  }
309616
- src_logger.debug('CTL_UPSERT_FINALIZER_SETTING', {
309617
- metadata: { finalizer, kind, name: item.metadata.name, namespace },
309618
- });
309899
+ operator_src_logger.debug(`The ctl is setting the finalizer '${finalizer}' for '${kind}/${item.metadata.name}' in namespace '${namespace}'.`);
309619
309900
  const { kc, opts } = await ctl_getConnection();
309620
309901
  const url = `${kc.getCurrentCluster().server}/apis/firestartr.dev/v1/namespaces/${namespace}/${kind}/${item.metadata.name}`;
309621
309902
  opts.headers['Content-Type'] = 'application/json-patch+json';
@@ -309656,14 +309937,7 @@ async function upsertFinalizer(kind, namespace, item, finalizer) {
309656
309937
  async function unsetFinalizer(kind, namespace, item, finalizer) {
309657
309938
  const { kc, opts } = await ctl_getConnection();
309658
309939
  const name = typeof item === 'string' ? item : item.metadata.name;
309659
- src_logger.debug('CTL_REMOVE_FINALIZER', {
309660
- metadata: {
309661
- finalizer,
309662
- kind,
309663
- name,
309664
- namespace,
309665
- },
309666
- });
309940
+ operator_src_logger.debug(`The ctl is removing the finalizer '${finalizer}' from '${kind}/${name}' in namespace '${namespace}'.`);
309667
309941
  const url = `${kc.getCurrentCluster().server}/apis/firestartr.dev/v1/namespaces/${namespace}/${kind}/${name}`;
309668
309942
  opts.headers['Content-Type'] = 'application/json-patch+json';
309669
309943
  opts.headers['Accept'] = '*';
@@ -309725,8 +309999,7 @@ async function writePlanInGithubPR(prUrl, planText) {
309725
309999
  await github_0.pulls.commentInPR(message, +pr_number, repo, owner);
309726
310000
  }
309727
310001
  catch (err) {
309728
- console.error(err);
309729
- console.log('Cannot write plan in PR');
310002
+ operator_src_logger.error(`writePlanInGithubPR: Cannot write plan in PR: ${err}`);
309730
310003
  }
309731
310004
  }
309732
310005
  async function addApplyCommitStatus(cr, state, targetURL = '', description = '', context = '') {
@@ -309734,15 +310007,7 @@ async function addApplyCommitStatus(cr, state, targetURL = '', description = '',
309734
310007
  await addCommitStatusToPrMergeCommit(cr.metadata.annotations['firestartr.dev/last-state-pr'], state, targetURL, description, context);
309735
310008
  }
309736
310009
  catch (e) {
309737
- src_logger.error('CTL_ADD_APPLY_COMMIT_STATUS_ERROR', {
309738
- metadata: {
309739
- state,
309740
- targetURL,
309741
- description,
309742
- cr_metadata: cr.metadata,
309743
- error: e,
309744
- },
309745
- });
310010
+ log.error(`The ctl encountered an error while adding commit status for custom resource '${cr.metadata.name}' in namespace '${cr.metadata.namespace}'. State: '${state}'. Target URL: '${targetURL}'. Description: '${description}'. Error: '${e}'.`);
309746
310011
  }
309747
310012
  }
309748
310013
  async function addDestroyCommitStatus(cr, state, description = '', context = '') {
@@ -309751,50 +310016,33 @@ async function addDestroyCommitStatus(cr, state, description = '', context = '')
309751
310016
  await addCommitStatusToPrMergeCommit(prUrl, state, '', description, context);
309752
310017
  }
309753
310018
  catch (e) {
309754
- src_logger.error('CTL_ADD_DESTROY_COMMIT_STATUS_ERROR', {
309755
- metadata: {
309756
- state,
309757
- description,
309758
- cr_metadata: cr.metadata,
309759
- error: e,
309760
- },
309761
- });
310019
+ operator_src_logger.error(`The ctl encountered an error while adding the destroy commit status for custom resource '${cr.metadata.name}' in namespace '${cr.metadata.namespace}'. State: '${state}'. Description: '${description}'. Error: '${e}'.`);
309762
310020
  }
309763
310021
  }
309764
310022
  async function addPlanStatusCheck(prUrl, summary, status = 'in_progress', isFailure = false) {
309765
310023
  try {
309766
- src_logger.debug('CTL_ADD_PLAN_STATUS_CHECK_SUMMARY_LENGTH', {
309767
- metadata: { length: summary.length },
309768
- });
310024
+ operator_src_logger.debug(`The ctl is checking the length of the plan summary, which is '${summary.length}'.`);
309769
310025
  if (summary.length > MAX_CHARS_OUPUT_PLAN) {
309770
310026
  const mustDrop = summary.length - MAX_CHARS_OUPUT_PLAN;
309771
310027
  summary = summary.substring(mustDrop);
309772
- src_logger.debug('CTL_ADD_PLAN_STATUS_CHECK_SUMMARY_TOO_LENGTHY', {
309773
- metadata: { mustDrop, length: summary.length },
309774
- });
310028
+ operator_src_logger.debug(`The ctl found the plan summary too lengthy (length: '${summary.length}'). The summary must drop because '${mustDrop}'.`);
309775
310029
  }
309776
310030
  await ctl_addStatusCheck({ summary, title: 'Terraform Plan Results' }, isFailure, 'terraform_plan', prUrl, status);
309777
310031
  }
309778
310032
  catch (e) {
309779
- src_logger.error('CTL_ADD_PLAN_STATUS_CHECK_ERROR', {
309780
- metadata: { prUrl, status, isFailure, error: e },
309781
- });
310033
+ operator_src_logger.error(`The ctl encountered an error while adding plan status for PR '${prUrl}' with status '${status}'. Is Failure: '${isFailure}'. Error: '${e}'.`);
309782
310034
  }
309783
310035
  }
309784
310036
  async function ctl_addStatusCheck(output, isFailure, name, prAnnotationValue, status) {
309785
310037
  const { owner, repo, prNumber } = catalog_common.generic.getOwnerRepoPrNumberFromAnnotationValue(prAnnotationValue);
309786
310038
  const branchSha = await github_0.pulls.getPrLastCommitSHA(prNumber, repo, owner);
309787
- src_logger.info('CTL_ADD_STATUS_CHECK', {
309788
- metadata: { owner, repo, branchSha, prAnnotationValue, name },
309789
- });
310039
+ operator_src_logger.info(`The ctl is adding a status check for '${owner}/${repo}' on branch '${branchSha}' with PR annotation value '${prAnnotationValue}' and name '${name}'.`);
309790
310040
  await github_0.repo.addStatusCheck(output, isFailure, branchSha, name, status, repo, owner);
309791
310041
  }
309792
310042
  async function addCommitStatusToPrMergeCommit(prAnnotationValue, state, targetURL, description, context) {
309793
310043
  const { owner, repo, prNumber } = catalog_common.generic.getOwnerRepoPrNumberFromAnnotationValue(prAnnotationValue);
309794
310044
  const branchSha = await github_0.pulls.getPrMergeCommitSHA(prNumber, repo, owner);
309795
- src_logger.info('CTL_ADD_COMMIT_STATUS', {
309796
- metadata: { owner, repo, branchSha, state, targetURL },
309797
- });
310045
+ operator_src_logger.info(`The ctl is adding a commit status for '${owner}/${repo}' on branch '${branchSha}'. State: '${state}'. Target URL: '${targetURL}'.`);
309798
310046
  await github_0.repo.addCommitStatus(state, branchSha, repo, owner, targetURL, description, context);
309799
310047
  }
309800
310048
  async function getLastStatePrInfo(cr) {
@@ -309862,62 +310110,42 @@ async function observe(plural, namespace, onAdd, onChange, onDelete, _onRename)
309862
310110
  informer.on('add', (obj) => {
309863
310111
  store.add(obj);
309864
310112
  if (store.hasDeletionTimestamp(obj)) {
309865
- src_logger.info('REFLECTOR_ITEM_MARKED_TO_DELETION', {
309866
- metadata: { kind: obj.kind, name: obj.metadata.name },
309867
- });
310113
+ operator_src_logger.info(`Reflector has marked item '${obj.kind}/${obj.metadata.name}' for deletion.`);
309868
310114
  store.markToDelete(obj);
309869
310115
  onDelete(obj);
309870
310116
  }
309871
310117
  else {
309872
- src_logger.info('REFLECTOR_ITEM_ADDED', {
309873
- metadata: { kind: obj.kind, name: obj.metadata.name },
309874
- });
310118
+ operator_src_logger.info(`Reflector has added item '${obj.kind}/${obj.metadata.name}'.`);
309875
310119
  onAdd(obj);
309876
310120
  }
309877
310121
  });
309878
310122
  informer.on('update', (obj) => {
309879
- src_logger.info('REFLECTOR_ITEM_UPDATED', {
309880
- metadata: {
309881
- kind: obj.kind,
309882
- name: obj.metadata.name,
309883
- resourceVersion: obj.metadata.resourceVersion,
309884
- },
309885
- });
310123
+ operator_src_logger.info(`Reflector has updated item '${obj.kind}/${obj.metadata.name}' to a new resource version: '${obj.metadata.resourceVersion}'.`);
309886
310124
  if (!store.getItem(obj).markedToDelete &&
309887
310125
  store.hasDeletionTimestamp(obj) &&
309888
310126
  (store.hasBeenMarkedToDelete(obj) || store.modified(obj))) {
309889
- src_logger.info('REFLECTOR_ITEM_UPDATED_MARKED_TO_DELETION', {
309890
- metadata: { kind: obj.kind, name: obj.metadata.name },
309891
- });
310127
+ operator_src_logger.info(`Reflector has updated item '${obj.kind}/${obj.metadata.name}' and marked it for deletion.`);
309892
310128
  store.markToDelete(obj);
309893
310129
  onDelete(obj);
309894
310130
  }
309895
310131
  else if (store.modified(obj)) {
309896
- src_logger.info('REFLECTOR_ITEM_UPDATED_AND_MODIFIED', {
309897
- metadata: { kind: obj.kind, name: obj.metadata.name },
309898
- });
310132
+ operator_src_logger.info(`Reflector has updated and modified item '${obj.kind}/${obj.metadata.name}'.`);
309899
310133
  onChange(obj);
309900
310134
  }
309901
310135
  });
309902
310136
  informer.on('delete', (obj) => {
309903
310137
  // deleted from the etcd
309904
- src_logger.info('REFLECTOR_ITEM_DELETED', {
309905
- metadata: { kind: obj.kind, name: obj.metadata.name },
309906
- });
310138
+ operator_src_logger.info(`Reflector has deleted item '${obj.kind}/${obj.metadata.name}' from the etcd.`);
309907
310139
  store.remove(obj);
309908
310140
  });
309909
310141
  informer.on('error', (err) => {
309910
- src_logger.error('REFLECTOR_ITEM_ERROR', {
309911
- metadata: { error: err, plural, namespace },
309912
- });
310142
+ operator_src_logger.error(`An error occurred in the reflector for '${plural}' in namespace '${namespace}': '${err}'.`);
309913
310143
  setTimeout(async () => {
309914
310144
  try {
309915
310145
  await informer.start();
309916
310146
  }
309917
310147
  catch (err) {
309918
- src_logger.error('REFLECTOR_INFORMER_START_ERROR', {
309919
- metadata: { error: err, plural, namespace },
309920
- });
310148
+ operator_src_logger.error(`Failed to start the reflector informer for '${plural}' in namespace '${namespace}': '${err}'.`);
309921
310149
  }
309922
310150
  }, 5000);
309923
310151
  });
@@ -309937,13 +310165,13 @@ async function needsProvisioningOnCreate(cr) {
309937
310165
  const fCrLog = (cr) => `The item ${cr.kind}: ${cr.metadata.name}`;
309938
310166
  // NO STATUS
309939
310167
  if (!('status' in cr) || !('conditions' in cr.status)) {
309940
- src_logger.debug('STATUS_NO_STATUS_NOR_CONDITION', { metadata: { cr } });
310168
+ operator_src_logger.debug(`The custom resource '${cr.kind}/${cr.metadata.name}' is missing a status and any conditions.`);
309941
310169
  return true;
309942
310170
  }
309943
310171
  // ERROR
309944
310172
  const errCond = getConditionByType(cr.status.conditions, 'ERROR');
309945
310173
  if (errCond && errCond.status === 'True') {
309946
- src_logger.debug('STATUS_ERROR_SKIP_PROVISION', { metadata: { cr } });
310174
+ operator_src_logger.debug(`Skipping the provisioning process for custom resource '${cr.kind}/${cr.metadata.name}' due to a status error.`);
309947
310175
  return false;
309948
310176
  }
309949
310177
  // PROVISIONED
@@ -309951,7 +310179,7 @@ async function needsProvisioningOnCreate(cr) {
309951
310179
  if (provCond &&
309952
310180
  provCond.status === 'True' &&
309953
310181
  provCond.observedGeneration >= cr.metadata.generation) {
309954
- src_logger.debug('STATUS_ALREADY_PROVISIONED', { metadata: { cr } });
310182
+ operator_src_logger.debug(`The custom resource '${cr.kind}/${cr.metadata.name}' is already provisioned; skipping the process.`);
309955
310183
  return false;
309956
310184
  }
309957
310185
  // DELETED
@@ -309959,29 +310187,20 @@ async function needsProvisioningOnCreate(cr) {
309959
310187
  if (delCond &&
309960
310188
  delCond.status === 'True' &&
309961
310189
  delCond.observedGeneration >= cr.metadata.generation) {
309962
- src_logger.debug('STATUS_ALREADY_DELETED', { metadata: { cr } });
310190
+ operator_src_logger.debug(`The custom resource '${cr.kind}/${cr.metadata.name}' has already been deleted; no action is required.`);
309963
310191
  return false;
309964
310192
  }
309965
310193
  // PROVISIONING
309966
310194
  const provisioningCondition = getConditionByType(cr.status.conditions, 'PROVISIONING');
309967
310195
  if (provisioningCondition && provisioningCondition.status === 'True') {
309968
- src_logger.debug('STATUS_IN_PROVISIONING_REPROVISIONING', { metadata: { cr } });
310196
+ operator_src_logger.debug(`The custom resource '${cr.kind}/${cr.metadata.name}' is currently in a provisioning or reprovisioning state.`);
309969
310197
  return true;
309970
310198
  }
309971
- src_logger.debug('STATUS_NOT_HANDLED_STATE_SKIP_PROVISIONING', { metadata: { cr } });
310199
+ operator_src_logger.debug(`Skipping the provisioning process for custom resource '${cr.kind}/${cr.metadata.name}' because its current state is not handled.`);
309972
310200
  return false;
309973
310201
  }
309974
310202
  async function updateTransition(itemPath, reason, type, statusValue, message = '', updateStatusOnly = false) {
309975
- src_logger.info('STATUS_UPDATE_TRANSITION_FOR_ITEM', {
309976
- metadata: {
309977
- itemPath,
309978
- reason,
309979
- type,
309980
- statusValue,
309981
- message,
309982
- updateStatusOnly,
309983
- },
309984
- });
310203
+ operator_src_logger.info(`The item at '${itemPath}' transitioned to a new status of '${statusValue}' (type: '${type}'). The reason for the change is '${reason}' with the message: '${message}'. This was a status-only update: '${updateStatusOnly}'.`);
309985
310204
  const k8sItem = await getItemByItemPath(itemPath);
309986
310205
  if (!('status' in k8sItem))
309987
310206
  k8sItem.status = {};
@@ -310064,7 +310283,7 @@ async function syncer(enqueue) {
310064
310283
  void loop(enqueue);
310065
310284
  return {
310066
310285
  addItem(itemPath) {
310067
- src_logger.info('SYNC_ADD_ITEM', { metadata: { itemPath } });
310286
+ operator_src_logger.info(`Added item of path '${itemPath}' for synchronization`);
310068
310287
  void itemIsSyncable(itemPath).then((itemSyncInfo) => {
310069
310288
  if (!itemSyncInfo.syncable) {
310070
310289
  return;
@@ -310076,7 +310295,7 @@ async function syncer(enqueue) {
310076
310295
  }, helperCalculateRevisionTime(itemSyncInfo.period)),
310077
310296
  needsRevision: false,
310078
310297
  };
310079
- src_logger.info('Configured syncing for item %s %s', itemPath, syncWatchers[itemPath]);
310298
+ operator_src_logger.info(`Configured synchronization for item at path '${itemPath}'`);
310080
310299
  });
310081
310300
  },
310082
310301
  updateItem(itemPath) {
@@ -310084,13 +310303,13 @@ async function syncer(enqueue) {
310084
310303
  // log('Item %s not found, ignoring...', itemPath)
310085
310304
  // return
310086
310305
  //}
310087
- src_logger.debug('SYNC_UPDATE_ITEM', { metadata: { itemPath } });
310306
+ operator_src_logger.debug(`Updated item of path '${itemPath}' during synchronization`);
310088
310307
  void itemIsSyncable(itemPath).then((itemSyncInfo) => {
310089
310308
  if (!itemSyncInfo.syncable) {
310090
310309
  if (syncWatchers[itemPath]) {
310091
310310
  clearInterval(syncWatchers[itemPath].lastRevision);
310092
310311
  delete syncWatchers[itemPath];
310093
- src_logger.info('SYNC_REMOVE_FOR_ITEM', { metadata: { itemPath } });
310312
+ operator_src_logger.info(`Removed item of path '${itemPath}' from synchronization`);
310094
310313
  }
310095
310314
  }
310096
310315
  else {
@@ -310104,26 +310323,19 @@ async function syncer(enqueue) {
310104
310323
  }, helperCalculateRevisionTime(itemSyncInfo.period)),
310105
310324
  needsRevision: false,
310106
310325
  };
310107
- src_logger.debug('SYNC_CONFIGURED_FOR_ITEM', {
310108
- metadata: {
310109
- itemPath,
310110
- watcher: syncWatchers[itemPath],
310111
- },
310112
- });
310326
+ operator_src_logger.debug(`Configured synchronization for item at path '${itemPath}' with watcher '${syncWatchers[itemPath]}'`);
310113
310327
  }
310114
310328
  });
310115
310329
  },
310116
310330
  deleteItem(itemPath) {
310117
310331
  if (!syncWatchers[itemPath]) {
310118
- src_logger.debug('SYNC_DELETE_ITEM_NOT_FOUND_IGNORE', {
310119
- metadata: { itemPath },
310120
- });
310332
+ operator_src_logger.debug(`Ignored deletion attempt for item at path '${itemPath}' as it was not found during synchronization`);
310121
310333
  return;
310122
310334
  }
310123
- src_logger.debug('SYNC_DELETE_ITEM', { metadata: { itemPath } });
310335
+ operator_src_logger.debug(`Deleted item of path '${itemPath}' during synchronization`);
310124
310336
  clearInterval(syncWatchers[itemPath].lastRevision);
310125
310337
  delete syncWatchers[itemPath];
310126
- src_logger.debug('SYNC_DELETE_ITEM_DELETED', { metadata: { itemPath } });
310338
+ operator_src_logger.debug(`Successfully deleted item at path '${itemPath}' during synchronization`);
310127
310339
  },
310128
310340
  };
310129
310341
  }
@@ -310195,13 +310407,7 @@ async function initRetry(enqueue) {
310195
310407
  function retry(itemPath) {
310196
310408
  if (retryWatchers[itemPath]) {
310197
310409
  retryWatchers[itemPath].retryCounter++;
310198
- src_logger.debug('RETRY_FAILED', {
310199
- metadata: {
310200
- itemPath,
310201
- remainRetries: MAXRETRY - retryWatchers[itemPath].retryCounter,
310202
- nextRetry: NEXT_RETRY_SECS * retryWatchers[itemPath].retryCounter,
310203
- },
310204
- });
310410
+ operator_src_logger.debug(`Failed to process item '${itemPath}'. Retrying in '${NEXT_RETRY_SECS * retryWatchers[itemPath].retryCounter}' seconds. Remaining retries: '${MAXRETRY - retryWatchers[itemPath].retryCounter}'.`);
310205
310411
  retryWatchers[itemPath].retry = false;
310206
310412
  retryWatchers[itemPath].nextRetry = setTimeout(() => {
310207
310413
  if (itemPath in retryWatchers)
@@ -310252,12 +310458,7 @@ async function getItemIfNeededRetry(watcher) {
310252
310458
  }
310253
310459
  catch (e) {
310254
310460
  if (e.message && e.message.includes('Error on getItemByItemPath')) {
310255
- src_logger.debug('RETRY_ERROR_ITEM_NOT_FOUND', {
310256
- metadata: {
310257
- message: 'item not found, removed from the retry process',
310258
- itemPath: watcher.itemPath,
310259
- },
310260
- });
310461
+ operator_src_logger.debug(`Item '${watcher.itemPath}' not found, so it has been removed from the retry process.`);
310261
310462
  removeFromRetry(watcher.itemPath);
310262
310463
  return null;
310263
310464
  }
@@ -310297,9 +310498,7 @@ async function resolve(cr, getItemByItemPath, getSecret, namespace = 'default')
310297
310498
  async function resolveSecretRef(namespace, crDependency, getSecret) {
310298
310499
  let secretName = `${crDependency['kind']}-${crDependency['metadata']['name']}-outputs`.toLowerCase();
310299
310500
  if (crDependency.kind === 'FirestartrProviderConfig') {
310300
- src_logger.debug('RESOLVER_SKIP_SECRET_RESOLUTION_FOR', {
310301
- metadata: { kind: 'FirestartrProviderConfig', namespace, crDependency },
310302
- });
310501
+ operator_src_logger.debug(`The resolver is skipping secret resolution for '${crDependency.kind}/${crDependency.metadata.name}' of kind 'FirestartrProviderConfig' in namespace '${namespace}'.`);
310303
310502
  return undefined;
310304
310503
  }
310305
310504
  if (crDependency.kind === 'ExternalSecret') {
@@ -310307,9 +310506,7 @@ async function resolveSecretRef(namespace, crDependency, getSecret) {
310307
310506
  }
310308
310507
  const secret = await getSecret(namespace, secretName);
310309
310508
  if (!secret) {
310310
- src_logger.error('RESOLVER_SECRET_NOT_SOLVABLE', {
310311
- metadata: { secretName, crDependency, namespace },
310312
- });
310509
+ operator_src_logger.error(`The resolver could not find the secret '${secretName}' required by custom resource dependency '${crDependency}' in namespace '${namespace}'.`);
310313
310510
  console.error(`Could not resolve secret ${secretName}`);
310314
310511
  }
310315
310512
  return secret;
@@ -310445,9 +310642,7 @@ const kindsWithFinalizer = [
310445
310642
  */
310446
310643
  async function observeKind(pluralKind, namespace, queue, compute) {
310447
310644
  const lastWorkItems = {};
310448
- src_logger.info('INFORMER_OBSERVE_START', {
310449
- metadata: { kind: pluralKind, namespace },
310450
- });
310645
+ operator_src_logger.info(`The informer has started observing the '${pluralKind}' resource in namespace '${namespace}'.`);
310451
310646
  // onSync
310452
310647
  const enqueueCallback = (event) => {
310453
310648
  return async (item) => {
@@ -310464,13 +310659,7 @@ async function observeKind(pluralKind, namespace, queue, compute) {
310464
310659
  await observe(pluralKind, namespace,
310465
310660
  // on add
310466
310661
  async (item) => {
310467
- src_logger.info('INFORMER_ON_ITEM_ADDED', {
310468
- metadata: {
310469
- kind: pluralKind,
310470
- namespace,
310471
- name: item.metadata.name,
310472
- },
310473
- });
310662
+ operator_src_logger.info(`The informer has detected a new item, '${item.metadata.name}', for '${pluralKind}' in namespace '${namespace}'.`);
310474
310663
  await handleUpsertFinalizer(pluralKind, namespace, item);
310475
310664
  const workItem = await inform(pluralKind, item, 'onAdd', getLastWorkItem(pluralKind, lastWorkItems, item));
310476
310665
  syncCtl.addItem(informer_itemPath(pluralKind, item));
@@ -310481,13 +310670,7 @@ async function observeKind(pluralKind, namespace, queue, compute) {
310481
310670
  },
310482
310671
  // on modify
310483
310672
  async (item) => {
310484
- src_logger.info('INFORMER_ON_ITEM_MODIFIED', {
310485
- metadata: {
310486
- kind: pluralKind,
310487
- namespace,
310488
- name: item.metadata.name,
310489
- },
310490
- });
310673
+ operator_src_logger.info(`The informer has detected that item '${item.metadata.name}' for '${pluralKind}' in namespace '${namespace}' was modified.`);
310491
310674
  const workItem = await inform(pluralKind, item, 'onUpdate', getLastWorkItem(pluralKind, lastWorkItems, item));
310492
310675
  if (workItem) {
310493
310676
  setLastWorkItem(pluralKind, lastWorkItems, item, workItem);
@@ -310496,13 +310679,7 @@ async function observeKind(pluralKind, namespace, queue, compute) {
310496
310679
  },
310497
310680
  // on delete
310498
310681
  async (item) => {
310499
- src_logger.info('INFORMER_ON_ITEM_DELETED', {
310500
- metadata: {
310501
- kind: pluralKind,
310502
- namespace,
310503
- name: item.metadata.name,
310504
- },
310505
- });
310682
+ operator_src_logger.info(`The informer has detected that item '${item.metadata.name}' for '${pluralKind}' in namespace '${namespace}' was deleted.`);
310506
310683
  const workItem = await inform(pluralKind, item, 'onMarkedToDeletion', getLastWorkItem(pluralKind, lastWorkItems, item));
310507
310684
  if (workItem) {
310508
310685
  setLastWorkItem(pluralKind, lastWorkItems, item, workItem);
@@ -310512,17 +310689,11 @@ async function observeKind(pluralKind, namespace, queue, compute) {
310512
310689
  },
310513
310690
  // on rename
310514
310691
  async (item) => {
310515
- src_logger.info('INFORMER_ON_ITEM_RENAMED', {
310516
- metadata: {
310517
- kind: pluralKind,
310518
- namespace,
310519
- name: item.metadata.name,
310520
- },
310521
- });
310692
+ operator_src_logger.info(`The informer has detected that an item for '${pluralKind}' in namespace '${namespace}' has been renamed to '${item.metadata.name}'.`);
310522
310693
  const workItem = await inform(pluralKind, item, 'onRename', getLastWorkItem(pluralKind, lastWorkItems, item));
310523
310694
  // Add the renamed item to the sync queue
310524
310695
  syncCtl.addItem(informer_itemPath(pluralKind, item));
310525
- src_logger.debug('INFORMER_RENAMING_ITEM', { metadata: { workItem } });
310696
+ operator_src_logger.debug(`The informer is renaming item '${workItem.item.metadata.name}' of kind '${workItem.item.kind}' due to a change in its name.`);
310526
310697
  if (workItem) {
310527
310698
  const oldName = workItem.item.metadata.labels[catalog_common.types.controller.FirestartrLabelOldName];
310528
310699
  await handleUnsetFinalizer(pluralKind, namespace, item);
@@ -310583,7 +310754,7 @@ function enqueue(pluralKind, workItem, queue, compute, syncCtl, retryCtl) {
310583
310754
  syncCtl.updateItem(informer_itemPath(pluralKind, item));
310584
310755
  }
310585
310756
  else {
310586
- src_logger.debug('INFORMER_NOT_SPEC_OPERATION', { metadata: { operation } });
310757
+ operator_src_logger.debug(`The informer received an item with an operation type of '${operation}', which is not a specific operation.`);
310587
310758
  }
310588
310759
  };
310589
310760
  queue(workItem);
@@ -310626,9 +310797,7 @@ async function inform(pluralKind, item, op, lastWorkItem = null) {
310626
310797
  return workItem;
310627
310798
  case 'onRename':
310628
310799
  if (await needsProvisioningOnCreate(item)) {
310629
- src_logger.debug('INFORMER_ON_RENAME_NEEDS_PROVISION_ON_CREATE', {
310630
- metadata: { item },
310631
- });
310800
+ operator_src_logger.debug(`The informer is triggering a new provisioning process for the renamed item '${item.kind}/${item.metadata.name}'.`);
310632
310801
  workItem = {
310633
310802
  operation: OperationType.RENAMED,
310634
310803
  item,
@@ -310900,15 +311069,7 @@ let INIT = false;
310900
311069
  * @param {WorkItem} workItem - WorkItem to process
310901
311070
  */
310902
311071
  async function processItem(workItem) {
310903
- src_logger.info('PROCESSOR_NEW_WORKITEM', {
310904
- metadata: {
310905
- operation: workItem.operation,
310906
- workStatus: workItem.workStatus,
310907
- kind: workItem.item.kind,
310908
- name: workItem.item.metadata.name,
310909
- namespace: workItem.item.metadata.namespace,
310910
- },
310911
- });
311072
+ operator_src_logger.info(`The processor received a new work item for '${workItem.operation}' operation on '${workItem.item.kind}/${workItem.item.metadata.name}' in namespace '${workItem.item.metadata.namespace}'. Current work status is '${workItem.workStatus}'.`);
310912
311073
  queue.push(workItem);
310913
311074
  if (!INIT) {
310914
311075
  processItem_loop().catch((err) => {
@@ -310929,15 +311090,7 @@ async function processItem_loop() {
310929
311090
  const logMessage = `${new Date().toISOString()} : Processing OPERATION: ${w.operation} ITEM: ${w.item.kind}/${w.item.metadata.name}`;
310930
311091
  catalog_common.io.writeLogFile('process_item', logMessage);
310931
311092
  const timeout = createTimeout(w);
310932
- src_logger.info('PROCESSOR_PROCESSING_WORKITEM', {
310933
- metadata: {
310934
- operation: w.operation,
310935
- workStatus: w.workStatus,
310936
- kind: w.item.kind,
310937
- name: w.item.metadata.name,
310938
- namespace: w.item.metadata.namespace,
310939
- },
310940
- });
311093
+ operator_src_logger.info(`The processor is currently handling a '${w.operation}' operation for item '${w.item.kind}/${w.item.metadata.name}' in namespace '${w.item.metadata.namespace}'. The current work status is '${w.workStatus}'.`);
310941
311094
  await runWorkItem(w);
310942
311095
  clearTimeout(timeout);
310943
311096
  }
@@ -310953,15 +311106,7 @@ function createTimeout(w) {
310953
311106
  return setTimeout(() => {
310954
311107
  //throw new Error('Timeout on workitem ' + w);
310955
311108
  console.error('Timeout on workitem %O', w);
310956
- src_logger.error('PROCESSOR_TIMEOUT_ON_WORKITEM', {
310957
- metadata: {
310958
- operation: w.operation,
310959
- workStatus: w.workStatus,
310960
- kind: w.item.kind,
310961
- name: w.item.metadata.name,
310962
- namespace: w.item.metadata.namespace,
310963
- },
310964
- });
311109
+ operator_src_logger.error(`The processor timed out while handling a '${w.operation}' operation for item '${w.item.kind}/${w.item.metadata.name}' in namespace '${w.item.metadata.namespace}'. The current work status is '${w.workStatus}'.`);
310965
311110
  process.exit(1);
310966
311111
  }, TIMEOUTS[w.operation] * 1000);
310967
311112
  }
@@ -310987,7 +311132,7 @@ function processItem_wait(t = 2000) {
310987
311132
  return new Promise((ok) => setTimeout(ok, t));
310988
311133
  }
310989
311134
  async function runWorkItem(workItem) {
310990
- src_logger.debug('PROCESSOR_RUNNING_WORK_ITEM', { metadata: { workItem } });
311135
+ operator_src_logger.debug(`The processor is now running the '${workItem.operation}' operation for item '${workItem.item.kind}/${workItem.item.metadata.name}' in namespace '${workItem.item.metadata.namespace}'.`);
310991
311136
  if (!workItem.getItem || !workItem.process || !workItem.operation)
310992
311137
  return;
310993
311138
  try {
@@ -310999,33 +311144,17 @@ async function runWorkItem(workItem) {
310999
311144
  await updateTransition(workItem.handler.itemPath(), condition.reason, condition.type, condition.status, condition.message, condition.updateStatusOnly || false);
311000
311145
  }
311001
311146
  workItem.workStatus = WorkStatus.FINISHED;
311002
- src_logger.debug('PROCESSOR_REMAIN_ITEMS_IN_QUEUE', {
311003
- metadata: { remainingItems: queue.length },
311004
- });
311147
+ operator_src_logger.debug(`The processor has '${queue.length}' items remaining in the queue.`);
311005
311148
  }
311006
311149
  catch (e) {
311007
311150
  if (e instanceof Error &&
311008
311151
  e.message.includes('Error on getItemByItemPath')) {
311009
- src_logger.debug('PROCESSOR_ERROR_ITEM_NOT_FOUND', {
311010
- metadata: {
311011
- workItem,
311012
- message: 'item was not found, removing work item from queue',
311013
- },
311014
- });
311152
+ operator_src_logger.debug(`Item '${workItem.item.kind}/${workItem.item.metadata.name}' was not found, so its work item is being removed from the processor queue.`);
311015
311153
  workItem.workStatus = WorkStatus.FINISHED;
311016
311154
  return;
311017
311155
  }
311018
311156
  else {
311019
- src_logger.error('PROCESSOR_ERROR_PROCESSING_WORKITEM', {
311020
- metadata: {
311021
- operation: workItem.operation,
311022
- workStatus: workItem.workStatus,
311023
- kind: workItem.item.kind,
311024
- name: workItem.item.metadata.name,
311025
- namespace: workItem.item.metadata.namespace,
311026
- error: e,
311027
- },
311028
- });
311157
+ operator_src_logger.error(`An error occurred while the processor was handling the '${workItem.operation}' operation for item '${workItem.item.kind}/${workItem.item.metadata.name}' in namespace '${workItem.item.metadata.namespace}'. Current work status is '${workItem.workStatus}'. The error was: '${e}'.`);
311029
311158
  console.error(e);
311030
311159
  }
311031
311160
  return;
@@ -311037,11 +311166,7 @@ async function runWorkItem(workItem) {
311037
311166
  */
311038
311167
  async function workItemGarbageCollector(queue) {
311039
311168
  while (1) {
311040
- src_logger.debug('PROCESS_ITEM_GARBAGE_COLLECTOR_RUN', {
311041
- metadata: {
311042
- workItemsFound: queue.length,
311043
- },
311044
- });
311169
+ operator_src_logger.debug(`The garbage collector processed '${queue.length}' work items.`);
311045
311170
  for (const [index, wi] of queue.entries()) {
311046
311171
  if (wi.workStatus === WorkStatus.FINISHED) {
311047
311172
  // Because the queue is a constant, we cannot reassign it, instead we
@@ -311050,11 +311175,7 @@ async function workItemGarbageCollector(queue) {
311050
311175
  queue.splice(index, 1);
311051
311176
  }
311052
311177
  }
311053
- src_logger.debug('PROCESS_ITEM_GARBAGE_COLLECTOR_FINISHED', {
311054
- metadata: {
311055
- workItemsLeft: queue.length,
311056
- },
311057
- });
311178
+ operator_src_logger.debug(`The garbage collector finished its run, leaving '${queue.length}' work items in the queue.`);
311058
311179
  await processItem_wait(10 * 1000);
311059
311180
  }
311060
311181
  }
@@ -311066,11 +311187,14 @@ if (process.env.GARBAGE_QUEUE_COLLECTOR) {
311066
311187
  var cdktf_lib = __nccwpck_require__(95933);
311067
311188
  // EXTERNAL MODULE: ../../node_modules/@cdktf/provider-github/lib/provider/index.js
311068
311189
  var lib_provider = __nccwpck_require__(95107);
311069
- ;// CONCATENATED MODULE: ../provisioner/src/entities/base/Entity.ts
311190
+ ;// CONCATENATED MODULE: ../provisioner/src/logger.ts
311191
+
311192
+ /* harmony default export */ const provisioner_src_logger = (catalog_common.logger);
311070
311193
 
311194
+ ;// CONCATENATED MODULE: ../provisioner/src/entities/base/Entity.ts
311071
311195
 
311072
- const Entity_log = src_default()('firestartr:provisioner:entity:base');
311073
311196
  const EXTERNAL_NAME_ANNOTATION = 'firestartr.dev/external-name';
311197
+
311074
311198
  class Metadata {
311075
311199
  constructor(metadata) {
311076
311200
  this._metadata = metadata;
@@ -311117,11 +311241,13 @@ class Entity {
311117
311241
  }
311118
311242
  resolveRef(ref, propertyRef) {
311119
311243
  if (!this.deps) {
311120
- throw `resolveRef:
311244
+ const ErrorMessage = `resolveRef:
311121
311245
 
311122
311246
  Entity with kind ${this.kind} ${this.metadata.name}
311123
311247
 
311124
311248
  does not have any dependencies`;
311249
+ provisioner_src_logger.error(ErrorMessage);
311250
+ throw new Error(ErrorMessage);
311125
311251
  }
311126
311252
  const { kind, name, needsSecret } = ref;
311127
311253
  if (!needsSecret) {
@@ -311130,22 +311256,26 @@ class Entity {
311130
311256
  }
311131
311257
  else {
311132
311258
  if (!propertyRef) {
311133
- throw `resolveRef:
311259
+ const ErrorMessage = `resolveRef:
311134
311260
 
311135
311261
  Entity with kind ${this.kind} ${this.metadata.name}
311136
311262
 
311137
311263
  needs a propertyRef to resolve the secret`;
311264
+ provisioner_src_logger.error(ErrorMessage);
311265
+ throw new Error(ErrorMessage);
311138
311266
  }
311139
311267
  return Buffer.from(this.deps[`${kind}-${name}`].secret.data[propertyRef], 'base64').toString('utf8');
311140
311268
  }
311141
311269
  }
311142
311270
  resolveSecretRef(ref) {
311143
311271
  if (!this.deps) {
311144
- throw `resolveSecretRef:
311272
+ const ErrorMessage = `resolveSecretRef:
311145
311273
 
311146
311274
  Entity with kind ${this.kind} ${this.metadata.name}
311147
311275
 
311148
311276
  does not have any dependencies`;
311277
+ provisioner_src_logger.error(ErrorMessage);
311278
+ throw new Error(ErrorMessage);
311149
311279
  }
311150
311280
  const { name, key } = ref;
311151
311281
  return Buffer.from(this.deps[`Secret-${name}`].cr.data[key], 'base64').toString('utf8');
@@ -311153,11 +311283,13 @@ class Entity {
311153
311283
  resolveOutputs(scope) {
311154
311284
  if (this.spec.writeConnectionSecretToRef) {
311155
311285
  if (!this.mainResource) {
311156
- throw `resolveOutputs:
311286
+ const ErrorMessage = `resolveOutputs:
311157
311287
 
311158
311288
  Entity with kind ${this.kind} ${this.metadata.name}
311159
311289
 
311160
311290
  does not have a mainResource`;
311291
+ provisioner_src_logger.error(ErrorMessage);
311292
+ throw new Error(ErrorMessage);
311161
311293
  }
311162
311294
  /**
311163
311295
  * We don't currently support writing outputs to modules
@@ -311167,13 +311299,15 @@ class Entity {
311167
311299
  const keys = this.getKeysFrom(this.mainResource);
311168
311300
  const outputs = this.spec.writeConnectionSecretToRef.outputs;
311169
311301
  for (const o of outputs) {
311170
- Entity_log('OUTPUT %s', o.key);
311302
+ provisioner_src_logger.debug('OUTPUT %s', o.key);
311171
311303
  if (!keys.includes(o.key)) {
311172
- throw `resolveOutputs:
311304
+ const ErrorMessage = `resolveOutputs:
311173
311305
 
311174
311306
  Entity with kind ${this.kind} ${this.metadata.name}
311175
311307
 
311176
311308
  does not have the output ${o.key}`;
311309
+ provisioner_src_logger.error(ErrorMessage);
311310
+ throw new Error(ErrorMessage);
311177
311311
  }
311178
311312
  new cdktf_lib.TerraformOutput(scope, o.key, {
311179
311313
  value: this.mainResource.getAnyMapAttribute(this.camelToSnake(o.key)),
@@ -311206,7 +311340,6 @@ var repository_file = __nccwpck_require__(79507);
311206
311340
  ;// CONCATENATED MODULE: ../provisioner/src/entities/firestartrgithubrepository/helpers/CodeownersHelper.ts
311207
311341
 
311208
311342
 
311209
- const CodeownersHelper_messageLog = src_default()('firestartr:provisioner:entities:component:helpers:codeownerscreator');
311210
311343
  function provisionCodeowners(scope, repo, branchDefault, fsGithubRepository) {
311211
311344
  const config = {
311212
311345
  dependsOn: [repo, branchDefault],
@@ -311217,7 +311350,7 @@ function provisionCodeowners(scope, repo, branchDefault, fsGithubRepository) {
311217
311350
  overwriteOnCreate: true,
311218
311351
  repository: repo.name,
311219
311352
  };
311220
- CodeownersHelper_messageLog(`Content of the codeowners: ${fsGithubRepository.spec.repo.codeowners}`);
311353
+ provisioner_src_logger.debug(`Content of the codeowners: ${fsGithubRepository.spec.repo.codeowners}`);
311221
311354
  const tfStateKey = `_${fsGithubRepository.getTfStateKey()}-codeowners`;
311222
311355
  new repository_file/* RepositoryFile */.h(scope, tfStateKey, config);
311223
311356
  }
@@ -311230,9 +311363,8 @@ var repository_collaborator = __nccwpck_require__(33786);
311230
311363
 
311231
311364
 
311232
311365
 
311233
- const RepositoryTeamsHelper_messageLog = src_default()('firestartr:provisioner:entities:component:helpers:repositoryteamshelper');
311234
311366
  function provisionPermissions(scope, repo, fsGithubRepository) {
311235
- RepositoryTeamsHelper_messageLog(`provisionRepositoryTeams with name ${fsGithubRepository.metadata.name} in org ${fsGithubRepository.spec.org}`);
311367
+ provisioner_src_logger.info(`provisionRepositoryTeams with name ${fsGithubRepository.metadata.name} in org ${fsGithubRepository.spec.org}`);
311236
311368
  for (const permission of fsGithubRepository.spec.permissions) {
311237
311369
  if ('ref' in permission) {
311238
311370
  const tfStateKey = `_${fsGithubRepository.getTfStateKey()}-${permission.ref.kind}-${permission.ref.name}-tr`;
@@ -311278,9 +311410,8 @@ var branch_protection_v3 = __nccwpck_require__(31706);
311278
311410
 
311279
311411
 
311280
311412
 
311281
- const RepositoryHelper_messageLog = src_default()('firestartr:provisioner:entities:component:helpers:repositoryhelper');
311282
311413
  function provisionRepository(scope, fsGithubRepository) {
311283
- RepositoryHelper_messageLog(`provisionRepository with name ${fsGithubRepository.metadata.name} in org ${fsGithubRepository.spec.org}`);
311414
+ provisioner_src_logger.info(`provisionRepository with name ${fsGithubRepository.metadata.name} in org ${fsGithubRepository.spec.org}`);
311284
311415
  const config = {
311285
311416
  name: fsGithubRepository.metadata.name,
311286
311417
  description: fsGithubRepository.spec.repo.description,
@@ -311317,7 +311448,7 @@ function provisionRepository(scope, fsGithubRepository) {
311317
311448
  return repo;
311318
311449
  }
311319
311450
  function provisionBranchProtections(scope, repo, fsGithubRepository) {
311320
- RepositoryHelper_messageLog(`provisionBranchProtections with name ${fsGithubRepository.metadata.name} in org ${fsGithubRepository.spec.org}`);
311451
+ provisioner_src_logger.info(`provisionBranchProtections with name ${fsGithubRepository.metadata.name} in org ${fsGithubRepository.spec.org}`);
311321
311452
  for (const branchProtection of fsGithubRepository.spec.branchProtections) {
311322
311453
  const tfStateKey = `_${fsGithubRepository.getTfStateKey()}-${branchProtection.pattern}-bp`;
311323
311454
  const statusChecks = {
@@ -311342,8 +311473,6 @@ function provisionBranchProtections(scope, repo, fsGithubRepository) {
311342
311473
 
311343
311474
  ;// CONCATENATED MODULE: ../provisioner/src/config/config.ts
311344
311475
 
311345
-
311346
- const config_messageLog = src_default()('firestartr:provisioner:config');
311347
311476
  /**
311348
311477
  * @description Valid plans for the account
311349
311478
  * @type {Set<string>}
@@ -311450,13 +311579,12 @@ class FirestartrGithubRepository_FirestartrGithubRepository extends Entity {
311450
311579
 
311451
311580
 
311452
311581
 
311453
- const provisioner_messageLog = src_default()('firestartr:provisioner:features:provisioner');
311454
311582
  function provisionFeatureFiles(scope, feature) {
311455
- provisioner_messageLog(`Provisioning feature ${feature.spec.type} for ${feature.spec.repositoryTarget.name}`);
311456
- provisioner_messageLog('Feature output json: %O', feature);
311583
+ provisioner_src_logger.info(`Provisioning feature ${feature.spec.type} for ${feature.spec.repositoryTarget.name}`);
311584
+ provisioner_src_logger.debug('Feature output json: %O', feature);
311457
311585
  if (feature.spec.files) {
311458
311586
  for (const file of feature.spec.files) {
311459
- provisioner_messageLog('Provisioning file %O', file);
311587
+ provisioner_src_logger.debug('Provisioning file %O', file);
311460
311588
  const lifecycleArg = file.userManaged
311461
311589
  ? { ignoreChanges: ['content'] }
311462
311590
  : {};
@@ -311491,8 +311619,6 @@ class FirestartrGithubRepositoryFeature_FirestartrGithubRepositoryFeature extend
311491
311619
  var lib_membership = __nccwpck_require__(27501);
311492
311620
  ;// CONCATENATED MODULE: ../provisioner/src/entities/firestartrgithubmembership/helpers/MembershipHelper.ts
311493
311621
 
311494
-
311495
- const MembershipHelper_messageLog = src_default()('firestartr:provisioner:modules:artifacts:userartifact');
311496
311622
  function provisionMembership(scope, fsGithubMembership) {
311497
311623
  const tfStateKey = `_${fsGithubMembership.getTfStateKey()}`;
311498
311624
  const membership = new lib_membership/* Membership */.E(scope, tfStateKey, {
@@ -311507,8 +311633,6 @@ function provisionMembership(scope, fsGithubMembership) {
311507
311633
  var team_membership = __nccwpck_require__(93268);
311508
311634
  ;// CONCATENATED MODULE: ../provisioner/src/entities/firestartrgithubmembership/helpers/MembershipAllGroupHelper.ts
311509
311635
 
311510
-
311511
- const MembershipAllGroupHelper_messageLog = src_default()('firestartr:provisioner:modules:artifacts:membership:all-group-helper');
311512
311636
  function provisionAllGroupMembershipRelation(scope, fsGithubMembership) {
311513
311637
  const tfStateKey = `_${fsGithubMembership.getTfStateKey()}`;
311514
311638
  const config = {
@@ -311539,11 +311663,10 @@ class FirestartrGithubMembership_FirestartrGithubMembership extends Entity {
311539
311663
  var lib_team = __nccwpck_require__(57889);
311540
311664
  ;// CONCATENATED MODULE: ../provisioner/src/entities/firestartrgithubgroup/helpers/TeamsHelper.ts
311541
311665
 
311542
-
311543
311666
  // import { TeamConfigAux } from '../auxiliars/TeamConfigAux';
311544
- const TeamsHelper_messageLog = src_default()('firestartr:provisioner:entities:component:helpers:repositoryhelper');
311667
+
311545
311668
  function provisionGroup(scope, fsGithubGroup) {
311546
- TeamsHelper_messageLog(`provisionGroup with name ${fsGithubGroup.metadata.name} in org ${fsGithubGroup.spec.org}`);
311669
+ provisioner_src_logger.info(`provisionGroup with name ${fsGithubGroup.metadata.name} in org ${fsGithubGroup.spec.org}`);
311547
311670
  const config = {
311548
311671
  name: fsGithubGroup.metadata.name,
311549
311672
  description: fsGithubGroup.spec.description,
@@ -311561,11 +311684,10 @@ function provisionGroup(scope, fsGithubGroup) {
311561
311684
  ;// CONCATENATED MODULE: ../provisioner/src/entities/firestartrgithubgroup/helpers/TeamMembersHelper.ts
311562
311685
 
311563
311686
 
311564
- const TeamMembersHelper_messageLog = src_default()('firestartr:provisioner:entities:component:helpers:teamsmembershiphelper');
311565
311687
  function provisionMembers(scope, team, fsGithubGroup) {
311566
- TeamMembersHelper_messageLog(`provisionMembers of group ${fsGithubGroup.metadata.name} in org ${fsGithubGroup.spec.org}`);
311688
+ provisioner_src_logger.info(`provisionMembers of group ${fsGithubGroup.metadata.name} in org ${fsGithubGroup.spec.org}`);
311567
311689
  for (const member of fsGithubGroup.spec.members) {
311568
- TeamMembersHelper_messageLog(`Provisioning user ${member.ref.name} for group ${fsGithubGroup.metadata.name}`);
311690
+ provisioner_src_logger.info(`Provisioning user ${member.ref.name} for group ${fsGithubGroup.metadata.name}`);
311569
311691
  const tfStateKey = `_${fsGithubGroup.getTfStateKey()}-${member.ref.kind}-${member.ref.name}-tr`;
311570
311692
  if (member.ref.kind === 'FirestartrGithubMembership') {
311571
311693
  const username = fsGithubGroup.resolveRef(member.ref);
@@ -311600,8 +311722,6 @@ class FirestartrGithubGroup_FirestartrGithubGroup extends Entity {
311600
311722
  var organization_webhook = __nccwpck_require__(80516);
311601
311723
  ;// CONCATENATED MODULE: ../provisioner/src/entities/firestartrgithuborgwebhook/helpers/OrgWebhookHelper.ts
311602
311724
 
311603
-
311604
- const OrgWebhookHelper_messageLog = src_default()('firestartr:provisioner:modules:artifacts:orgwebhook');
311605
311725
  function provisionOrgWebhook(scope, fsGithubOrgWebhook) {
311606
311726
  const tfStateKey = `_${fsGithubOrgWebhook.getTfStateKey()}`;
311607
311727
  const webhookConfig = {
@@ -311867,7 +311987,6 @@ var FirestartrTerraformProvider;
311867
311987
 
311868
311988
 
311869
311989
 
311870
- const GithubStack_messageLog = src_default()('firestartr:provisioner:stacks:githubstack');
311871
311990
  class GithubStack extends BaseStack {
311872
311991
  async provisionEntity(isImport, entity, deps, tfStatePath, orgConfig) {
311873
311992
  try {
@@ -311883,7 +312002,7 @@ class GithubStack extends BaseStack {
311883
312002
  }
311884
312003
  }
311885
312004
  catch (err) {
311886
- GithubStack_messageLog('Error: provisionEntity: %s', err);
312005
+ provisioner_src_logger.error('Error: provisionEntity: %s', err);
311887
312006
  throw err;
311888
312007
  }
311889
312008
  }
@@ -311904,7 +312023,6 @@ class GithubStack extends BaseStack {
311904
312023
 
311905
312024
 
311906
312025
 
311907
- const TerraformModuleStack_messageLog = src_default()('firestartr:provisioner:stacks:terraformmodulestack');
311908
312026
  class TerraformModuleStack extends BaseStack {
311909
312027
  async provisionEntity(isImport, entity, deps, tfStatePath, orgConfig) {
311910
312028
  try {
@@ -311917,7 +312035,7 @@ class TerraformModuleStack extends BaseStack {
311917
312035
  }
311918
312036
  }
311919
312037
  catch (err) {
311920
- TerraformModuleStack_messageLog('Error: provisionEntity: %s', err);
312038
+ provisioner_src_logger.error('Error: provisionEntity: %s', err);
311921
312039
  throw err;
311922
312040
  }
311923
312041
  }
@@ -311973,9 +312091,10 @@ function __calculateTFStatePath(entity) {
311973
312091
  ;// CONCATENATED MODULE: ../provisioner/src/cdktf.ts
311974
312092
 
311975
312093
 
311976
- async function runCDKTF(entityPath, action, depsPath) {
312094
+ async function runCDKTF(entityPath, action, depsPath, stream) {
311977
312095
  return new Promise((ok, ko) => {
311978
312096
  const cdktfProcess = (0,external_child_process_.spawn)('cdktf', [action, '--log-level', 'DEBUG', '--auto-approve'], {
312097
+ stdio: ['inherit', 'pipe', 'pipe'],
311979
312098
  cwd: process.env.IS_DEV_LOCAL_ENVIRONMENT
311980
312099
  ? '/library/packages/provisioner'
311981
312100
  : '/library/provisioner',
@@ -312003,10 +312122,14 @@ async function runCDKTF(entityPath, action, depsPath) {
312003
312122
  const logparsed = log.toString();
312004
312123
  if (!logparsed.includes('Synthesizing')) {
312005
312124
  output += catalog_common.io.stripAnsi(logparsed);
312125
+ if (stream)
312126
+ stream.write(catalog_common.io.stripAnsi(logparsed));
312006
312127
  }
312007
312128
  });
312008
312129
  cdktfProcess.stderr.on('data', (log) => {
312009
312130
  output += catalog_common.io.stripAnsi(log.toString());
312131
+ if (stream)
312132
+ stream.write(catalog_common.io.stripAnsi(log.toString()));
312010
312133
  });
312011
312134
  cdktfProcess.on('exit', async (code) => {
312012
312135
  if (code !== 0) {
@@ -312024,7 +312147,6 @@ async function runCDKTF(entityPath, action, depsPath) {
312024
312147
 
312025
312148
 
312026
312149
 
312027
- const installer_messageLog = src_default()('firestartr:provisioner:features:installer');
312028
312150
  async function installer_installFeaturesForComponent(component, store) {
312029
312151
  const componentFeatures = component.spec?.provisioner?.features || '[]';
312030
312152
  const componentFeaturesToInstall = componentFeatures.filter((feature) => {
@@ -312033,7 +312155,7 @@ async function installer_installFeaturesForComponent(component, store) {
312033
312155
  });
312034
312156
  if (componentFeaturesToInstall.length > 0) {
312035
312157
  for (const feature of componentFeaturesToInstall) {
312036
- installer_messageLog('Installing feature %s for component %s', feature.name, component.metadata.name);
312158
+ log.info('Installing feature %s for component %s', feature.name, component.metadata.name);
312037
312159
  // Get feature config
312038
312160
  const featureConfig = common.features.features.getFeatureRenderedConfigForComponent(component, feature.name);
312039
312161
  // prepare files
@@ -312046,7 +312168,7 @@ async function installer_installFeaturesForComponent(component, store) {
312046
312168
  }
312047
312169
  }
312048
312170
  else {
312049
- installer_messageLog(`No features to install for component ${component.metadata.name}`);
312171
+ log.error(`No features to install for component ${component.metadata.name}`);
312050
312172
  }
312051
312173
  return store;
312052
312174
  }
@@ -312073,7 +312195,7 @@ async function getFileContentFromGithubIfExists(path, repositoryName, owner) {
312073
312195
  }
312074
312196
  catch (e) {
312075
312197
  if (e.status === 404) {
312076
- installer_messageLog(`File ${path} not found in ${repositoryName}`);
312198
+ log.debug(`File ${path} not found in ${repositoryName}`);
312077
312199
  return false;
312078
312200
  }
312079
312201
  throw e;
@@ -312098,7 +312220,6 @@ function isFreshInstallation(featureName, component) {
312098
312220
 
312099
312221
 
312100
312222
 
312101
- const preparer_messageLog = src_default()('firestartr:provisioner:features:installer');
312102
312223
  async function preparer_prepareFeaturesForComponent(component, store) {
312103
312224
  // those are the features to maintain
312104
312225
  let componentFeatures = component.spec?.provisioner?.features || [];
@@ -312113,7 +312234,7 @@ async function preparer_prepareFeaturesForComponent(component, store) {
312113
312234
  if (componentFeatures.length > 0) {
312114
312235
  const entityPath = dumpArtifactYaml(component);
312115
312236
  for (const feature of componentFeatures) {
312116
- preparer_messageLog('Installing feature %s for component %s', feature.name, component.metadata.name);
312237
+ log.info('Installing feature %s for component %s', feature.name, component.metadata.name);
312117
312238
  await featuresPreparer.getFeatureConfig(feature.name, feature.version, entityPath);
312118
312239
  // Get feature config
312119
312240
  const featureConfig = common.features.features.getFeatureRenderedConfigForComponent(component, feature.name);
@@ -312137,17 +312258,17 @@ const external_node_readline_namespaceObject = __WEBPACK_EXTERNAL_createRequire(
312137
312258
 
312138
312259
 
312139
312260
 
312140
- const terraform_messageLog = src_default()('firestartr:provisioner:terraform');
312141
- async function runTerraform(entity, command) {
312261
+ async function runTerraform(entity, command, stream) {
312142
312262
  let entityID = `${entity.kind.toLowerCase()}--${entity['spec']['firestartr']['tfStateKey']}`;
312143
312263
  if (entity.kind === 'FirestartrGithubRepositoryFeature')
312144
312264
  entityID = `${entity.kind.toLowerCase()}--${entity.metadata.name}`;
312145
312265
  const workDir = external_path_.join(process.env.IS_DEV_LOCAL_ENVIRONMENT
312146
312266
  ? '/library/packages/provisioner'
312147
312267
  : '/library/provisioner', 'cdktf.out', 'stacks', entityID);
312148
- terraform_messageLog(`Running terraform with command ${command} in ${workDir}`);
312268
+ provisioner_src_logger.info(`Running terraform with command ${command} in ${workDir}`);
312149
312269
  return new Promise((ok, ko) => {
312150
312270
  const terraformProcess = (0,external_child_process_.spawn)('terraform', [...command], {
312271
+ stdio: ['inherit', 'pipe', 'pipe'],
312151
312272
  cwd: workDir,
312152
312273
  env: {
312153
312274
  PATH: process.env.PATH,
@@ -312165,17 +312286,18 @@ async function runTerraform(entity, command) {
312165
312286
  terraformProcess.stdout.on('data', (log) => {
312166
312287
  const line = catalog_common.io.stripAnsi(log.toString());
312167
312288
  output += line;
312168
- console.log(line);
312289
+ if (stream)
312290
+ stream.write(line);
312169
312291
  });
312170
312292
  terraformProcess.stderr.on('data', (log) => {
312171
312293
  const line = catalog_common.io.stripAnsi(log.toString());
312172
312294
  output += line;
312173
- console.log(line);
312295
+ if (stream)
312296
+ stream.write(line);
312174
312297
  });
312175
312298
  terraformProcess.on('exit', async (code) => {
312176
312299
  console.log(`child process exited with code ${code}`);
312177
312300
  if (code !== 0) {
312178
- console.log(output);
312179
312301
  ko(output);
312180
312302
  }
312181
312303
  else {
@@ -312184,13 +312306,13 @@ async function runTerraform(entity, command) {
312184
312306
  });
312185
312307
  });
312186
312308
  }
312187
- function terraformInit(entity) {
312188
- return runTerraform(entity, ['init', '-no-color']);
312309
+ function terraformInit(entity, stream) {
312310
+ return runTerraform(entity, ['init', '-no-color'], stream);
312189
312311
  }
312190
- function terraformPlan(entity) {
312191
- return runTerraform(entity, ['plan', '-no-color']);
312312
+ function terraformPlan(entity, stream) {
312313
+ return runTerraform(entity, ['plan', '-no-color'], stream);
312192
312314
  }
312193
- async function terraformApply(entity, isImport = false, skipPlan = false) {
312315
+ async function terraformApply(entity, isImport = false, skipPlan = false, stream) {
312194
312316
  let line = false;
312195
312317
  if (isImport && !skipPlan) {
312196
312318
  console.log(`
@@ -312209,15 +312331,15 @@ Type 'yes' to continue:`);
312209
312331
  });
312210
312332
  }
312211
312333
  if (line === 'yes' || skipPlan) {
312212
- return runTerraform(entity, ['apply', '-no-color', '-auto-approve']);
312334
+ return runTerraform(entity, ['apply', '-no-color', '-auto-approve'], stream);
312213
312335
  }
312214
312336
  else {
312215
312337
  console.log(`🚀 Skipping apply for entity ${entity.kind} ${entity.metadata.name}`);
312216
312338
  return Promise.resolve('');
312217
312339
  }
312218
312340
  }
312219
- function terraformDestroy(entity) {
312220
- return runTerraform(entity, ['destroy', '-no-color', '-auto-approve']);
312341
+ function terraformDestroy(entity, stream) {
312342
+ return runTerraform(entity, ['destroy', '-no-color', '-auto-approve'], stream);
312221
312343
  }
312222
312344
 
312223
312345
  ;// CONCATENATED MODULE: ../provisioner/src/features/uninstaller.ts
@@ -312226,12 +312348,11 @@ function terraformDestroy(entity) {
312226
312348
 
312227
312349
 
312228
312350
 
312229
- const uninstaller_messageLog = src_default()('firestartr:provisioner:features:uninstaller');
312230
312351
  async function untrackManagedFiles(feature, deps) {
312231
312352
  if (!feature.spec.files || feature.spec.files.length < 1)
312232
312353
  return;
312233
- uninstaller_messageLog('Removing managed files from the Terraform State');
312234
- uninstaller_messageLog('Synthing the project...');
312354
+ provisioner_src_logger.debug('Removing managed files from the Terraform State');
312355
+ provisioner_src_logger.debug('Synthing the project...');
312235
312356
  const randomFilenameFeature = `${catalog_common.generic.randomString(20)}.yaml`;
312236
312357
  const randomFilenameDeps = `${catalog_common.generic.randomString(20)}_deps.yaml`;
312237
312358
  catalog_common.io.writeYamlFile(randomFilenameFeature, feature, '/tmp');
@@ -312239,7 +312360,7 @@ async function untrackManagedFiles(feature, deps) {
312239
312360
  await runCDKTF(external_path_.join('/tmp', randomFilenameFeature), 'synth', external_path_.join('/tmp', randomFilenameDeps));
312240
312361
  await runTerraform(feature, ['init']);
312241
312362
  for (const file of feature.spec.files.filter((file) => file.userManaged === true)) {
312242
- uninstaller_messageLog(`Removing from the state file ${file.path}`);
312363
+ provisioner_src_logger.debug(`Removing from the state file ${file.path}`);
312243
312364
  // Terraform replaces / with -- and . with - in the state file names, so we do the same to get the state file name
312244
312365
  const stateFileName = `${feature.spec.type}-${file.path}`
312245
312366
  .replace(/\//g, '--')
@@ -312312,14 +312433,46 @@ function getNextStatus(status) {
312312
312433
 
312313
312434
 
312314
312435
 
312436
+
312437
+
312315
312438
  class Resource {
312316
312439
  setLogger(fn) {
312317
312440
  this.logFn = fn;
312318
312441
  }
312442
+ setSynthStreamLogs(callbacks) {
312443
+ this.synthStreamCallbacks = callbacks;
312444
+ }
312445
+ setTFStreamLogs(callbacks) {
312446
+ this.tfStreamCallbacks = callbacks;
312447
+ }
312448
+ async onSyncStreaming() {
312449
+ if (!this.logStream) {
312450
+ this.logStream = new external_stream_.PassThrough();
312451
+ }
312452
+ if (this.synthStreamCallbacks) {
312453
+ const callbacks = await this.synthStreamCallbacks.prepare();
312454
+ this.setLogStream(callbacks.fnData, callbacks.fnEnd);
312455
+ }
312456
+ }
312457
+ async onTFStreaming() {
312458
+ if (!this.logStream) {
312459
+ this.logStream = new external_stream_.PassThrough();
312460
+ }
312461
+ if (this.tfStreamCallbacks) {
312462
+ const callbacks = await this.tfStreamCallbacks.prepare();
312463
+ this.setLogStream(callbacks.fnData, callbacks.fnEnd);
312464
+ }
312465
+ }
312466
+ setLogStream(fnData, fnEnd, reopen = true) {
312467
+ if (reopen || !this.logStream)
312468
+ this.logStream = new external_stream_.PassThrough();
312469
+ this.logStream.on('data', (data) => fnData(data.toString()));
312470
+ this.logStream.on('end', () => fnEnd());
312471
+ }
312319
312472
  constructor(mainCR, operation, deps = []) {
312320
312473
  this.data = {};
312321
312474
  this.output = '';
312322
- this.logFn = (msg) => console.log(msg);
312475
+ this.logFn = (msg) => provisioner_src_logger.debug(msg);
312323
312476
  this.set('main_artifact', mainCR);
312324
312477
  this.set('operation', operation);
312325
312478
  this.set('deps', deps);
@@ -312329,36 +312482,46 @@ class Resource {
312329
312482
  await this.synth();
312330
312483
  await this.runTerraform();
312331
312484
  await this.postprocess();
312485
+ if (this.logStream) {
312486
+ this.logStream.end();
312487
+ this.logStream = null;
312488
+ }
312332
312489
  }
312333
312490
  artifact() {
312334
312491
  return this.get('main_artifact');
312335
312492
  }
312336
312493
  async synth() {
312494
+ await this.onSyncStreaming();
312337
312495
  const randomFilenameArtifact = `${catalog_common.generic.randomString(20)}.yaml`;
312338
312496
  const randomFilenameDeps = `${catalog_common.generic.randomString(20)}_deps.yaml`;
312339
312497
  catalog_common.io.writeYamlFile(randomFilenameArtifact, this.get('main_artifact'), '/tmp');
312340
312498
  catalog_common.io.writeYamlFile(randomFilenameDeps, this.get('deps'), '/tmp');
312341
- await runCDKTF(external_path_.join('/tmp', randomFilenameArtifact), 'synth', external_path_.join('/tmp', randomFilenameDeps));
312499
+ await runCDKTF(external_path_.join('/tmp', randomFilenameArtifact), 'synth', external_path_.join('/tmp', randomFilenameDeps), this.logStream);
312500
+ if (this.logStream) {
312501
+ this.logStream.end();
312502
+ this.logStream = null;
312503
+ }
312342
312504
  }
312343
312505
  log(msg) {
312344
312506
  this.logFn(msg);
312345
312507
  }
312346
312508
  async runTerraform() {
312509
+ await this.onTFStreaming();
312347
312510
  let output = '';
312348
- output += await terraformInit(this.get('main_artifact'));
312349
- output += await terraformPlan(this.get('main_artifact'));
312511
+ output += await terraformInit(this.get('main_artifact'), this.logStream);
312512
+ output += await terraformPlan(this.get('main_artifact'), this.logStream);
312350
312513
  if (this.get('operation') === 'CREATE' ||
312351
312514
  this.get('operation') === 'UPDATE') {
312352
- output += await terraformApply(this.get('main_artifact'), false, true);
312515
+ output += await terraformApply(this.get('main_artifact'), false, true, this.logStream);
312353
312516
  }
312354
312517
  else if (this.get('operation') === 'DELETE') {
312355
- output += await terraformDestroy(this.get('main_artifact'));
312518
+ output += await terraformDestroy(this.get('main_artifact'), this.logStream);
312356
312519
  }
312357
312520
  else if (this.get('operation') === 'IMPORT') {
312358
- output += await terraformApply(this.get('main_artifact'), true, false);
312521
+ output += await terraformApply(this.get('main_artifact'), true, false, this.logStream);
312359
312522
  }
312360
312523
  else if (this.get('operation') === 'IMPORT_SKIP_PLAN') {
312361
- output += await terraformApply(this.get('main_artifact'), true, true);
312524
+ output += await terraformApply(this.get('main_artifact'), true, true, this.logStream);
312362
312525
  }
312363
312526
  else {
312364
312527
  throw new Error(`unknown operation: ${this.get('operation')}`);
@@ -312383,7 +312546,6 @@ class Resource {
312383
312546
 
312384
312547
 
312385
312548
 
312386
- const github_feature_log = src_default()('firestartr:provisioner:github_repository_feature');
312387
312549
  class github_feature_FirestartrGithubRepositoryFeature extends Resource {
312388
312550
  static kind() {
312389
312551
  return 'FirestartrGithubRepositoryFeature';
@@ -312391,19 +312553,19 @@ class github_feature_FirestartrGithubRepositoryFeature extends Resource {
312391
312553
  async preprocess() {
312392
312554
  switch (this.get('operation')) {
312393
312555
  case 'CREATE':
312394
- github_feature_log('CREATED');
312556
+ provisioner_src_logger.debug('Creating FirestartrGithubRepositoryFeature');
312395
312557
  await this._updateManagedFiles();
312396
312558
  break;
312397
312559
  case 'UPDATE':
312398
- github_feature_log('UPDATED');
312560
+ provisioner_src_logger.debug('Updating FirestartrGithubRepositoryFeature');
312399
312561
  await this._updateManagedFiles();
312400
312562
  break;
312401
312563
  case 'DELETE':
312402
- github_feature_log('DELETE');
312564
+ provisioner_src_logger.debug('Deleting FirestartrGithubRepositoryFeature');
312403
312565
  await untrackManagedFiles(this.get('main_artifact'), this.get('deps'));
312404
312566
  break;
312405
312567
  default:
312406
- github_feature_log(`UNKNOWN: ${this.get('operation')}`);
312568
+ provisioner_src_logger.debug(`Unknown operation '${this.get('operation')}' for FirestartrGithubRepositoryFeature`);
312407
312569
  }
312408
312570
  }
312409
312571
  async _updateManagedFiles() {
@@ -312417,7 +312579,7 @@ class github_feature_FirestartrGithubRepositoryFeature extends Resource {
312417
312579
  file.content = newContent;
312418
312580
  }
312419
312581
  catch (e) {
312420
- github_feature_log(`File ${file.path} not found in repo ${repoInfo.ref.name} on branch ${repoInfo.branch}. File content not updated`);
312582
+ provisioner_src_logger.error(`File ${file.path} not found in repo ${repoInfo.ref.name} on branch ${repoInfo.branch}. File content not updated`);
312421
312583
  }
312422
312584
  }
312423
312585
  }
@@ -312497,7 +312659,6 @@ async function provisionRegularBranch(repo, branchName, sourceBranch, org) {
312497
312659
 
312498
312660
 
312499
312661
 
312500
- const github_repository_log = src_default()('firestartr:provisioner:github_repository');
312501
312662
  class github_repository_FirestartrGithubRepository extends Resource {
312502
312663
  static kind() {
312503
312664
  return 'FirestartrGithubRepository';
@@ -312505,22 +312666,23 @@ class github_repository_FirestartrGithubRepository extends Resource {
312505
312666
  async preprocess() {
312506
312667
  switch (this.get('operation')) {
312507
312668
  case 'CREATE':
312508
- github_repository_log('CREATE');
312669
+ provisioner_src_logger.debug('Creating FirestartrGithubRepository');
312509
312670
  break;
312510
312671
  case 'UPDATE':
312511
- github_repository_log('UPDATED');
312672
+ provisioner_src_logger.debug('Updating FirestartrGithubRepository');
312512
312673
  break;
312513
312674
  case 'DELETE':
312514
- github_repository_log('DELETED');
312675
+ provisioner_src_logger.debug('Deleted FirestartrGithubRepository');
312515
312676
  break;
312516
312677
  case 'IMPORT':
312517
- github_repository_log('IMPORT');
312678
+ provisioner_src_logger.debug('Importing FirestartrGithubRepository');
312518
312679
  break;
312519
312680
  case 'IMPORT_SKIP_PLAN':
312520
- github_repository_log('IMPORT_SKIP_PLAN');
312681
+ provisioner_src_logger.debug('Import skipping plan for FirestartrGithubRepository');
312521
312682
  break;
312522
312683
  default:
312523
- github_repository_log('UNKNOWN');
312684
+ provisioner_src_logger.debug('Unknown operation for FirestartrGithubRepository ' +
312685
+ this.get('operation'));
312524
312686
  }
312525
312687
  }
312526
312688
  async postprocess() {
@@ -312528,20 +312690,20 @@ class github_repository_FirestartrGithubRepository extends Resource {
312528
312690
  switch (this.get('operation')) {
312529
312691
  case 'CREATE':
312530
312692
  case 'UPDATE':
312531
- github_repository_log('CREATE & UPDATE');
312693
+ provisioner_src_logger.debug(`Created and updated FirestartrGithubRepository ${cr.metadata.name}`);
312532
312694
  await provisionAdditionalBranches(cr);
312533
312695
  break;
312534
312696
  case 'DELETE':
312535
- github_repository_log('DELETED');
312697
+ provisioner_src_logger.debug(`Deleted FirestartrGithubRepository ${cr.metadata.name}`);
312536
312698
  break;
312537
312699
  case 'IMPORT':
312538
- github_repository_log('IMPORT');
312700
+ provisioner_src_logger.debug(`Imported FirestartrGithubRepository ${cr.metadata.name}`);
312539
312701
  break;
312540
312702
  case 'IMPORT_SKIP_PLAN':
312541
- github_repository_log('IMPORT_SKIP_PLAN');
312703
+ provisioner_src_logger.debug(`Imported skipped plan FirestartrGithubRepository ${cr.metadata.name}`);
312542
312704
  break;
312543
312705
  default:
312544
- github_repository_log('UNKNOWN');
312706
+ provisioner_src_logger.debug(`Finished for unknown operation ${this.get('operation')} for FirestartrGithubRepository`);
312545
312707
  }
312546
312708
  }
312547
312709
  }
@@ -312549,7 +312711,6 @@ class github_repository_FirestartrGithubRepository extends Resource {
312549
312711
  ;// CONCATENATED MODULE: ../provisioner/src/resources/github_membership/index.ts
312550
312712
 
312551
312713
 
312552
- const github_membership_log = src_default()('firestartr:provisioner:github_membership');
312553
312714
  class github_membership_FirestartrGithubMembership extends Resource {
312554
312715
  static kind() {
312555
312716
  return 'FirestartrGithubMembership';
@@ -312557,22 +312718,23 @@ class github_membership_FirestartrGithubMembership extends Resource {
312557
312718
  async preprocess() {
312558
312719
  switch (this.get('operation')) {
312559
312720
  case 'CREATE':
312560
- github_membership_log('CREATE');
312721
+ provisioner_src_logger.debug('Creating FirestartrGithubMembership');
312561
312722
  break;
312562
312723
  case 'UPDATE':
312563
- github_membership_log('UPDATED');
312724
+ provisioner_src_logger.debug('Updating FirestartrGithubMembership');
312564
312725
  break;
312565
312726
  case 'DELETE':
312566
- github_membership_log('DELETED');
312727
+ provisioner_src_logger.debug('Deleted FirestartrGithubMembership');
312567
312728
  break;
312568
312729
  case 'IMPORT':
312569
- github_membership_log('IMPORT');
312730
+ provisioner_src_logger.debug('Importing FirestartrGithubMembership');
312570
312731
  break;
312571
312732
  case 'IMPORT_SKIP_PLAN':
312572
- github_membership_log('IMPORT_SKIP_PLAN');
312733
+ provisioner_src_logger.debug('Import skipping plan for FirestartrGithubMembership');
312573
312734
  break;
312574
312735
  default:
312575
- github_membership_log('UNKNOWN');
312736
+ provisioner_src_logger.debug('Unknown operation for FirestartrGithubMembership ' +
312737
+ this.get('operation'));
312576
312738
  }
312577
312739
  }
312578
312740
  }
@@ -312580,7 +312742,6 @@ class github_membership_FirestartrGithubMembership extends Resource {
312580
312742
  ;// CONCATENATED MODULE: ../provisioner/src/resources/github_group/index.ts
312581
312743
 
312582
312744
 
312583
- const github_group_log = src_default()('firestartr:provisioner:github_group');
312584
312745
  class github_group_FirestartrGithubGroup extends Resource {
312585
312746
  static kind() {
312586
312747
  return 'FirestartrGithubGroup';
@@ -312588,22 +312749,23 @@ class github_group_FirestartrGithubGroup extends Resource {
312588
312749
  async preprocess() {
312589
312750
  switch (this.get('operation')) {
312590
312751
  case 'CREATE':
312591
- github_group_log('CREATE');
312752
+ provisioner_src_logger.debug('Creating FirestartrGithubGroup');
312592
312753
  break;
312593
312754
  case 'UPDATE':
312594
- github_group_log('UPDATED');
312755
+ provisioner_src_logger.debug('Updating FirestartrGithubGroup');
312595
312756
  break;
312596
312757
  case 'DELETE':
312597
- github_group_log('DELETED');
312758
+ provisioner_src_logger.debug('Deleted FirestartrGithubGroup');
312598
312759
  break;
312599
312760
  case 'IMPORT':
312600
- github_group_log('IMPORT');
312761
+ provisioner_src_logger.debug('Importing FirestartrGithubGroup');
312601
312762
  break;
312602
312763
  case 'IMPORT_SKIP_PLAN':
312603
- github_group_log('IMPORT_SKIP_PLAN');
312764
+ provisioner_src_logger.debug('Import skipping plan for FirestartrGithubGroup');
312604
312765
  break;
312605
312766
  default:
312606
- github_group_log('UNKNOWN');
312767
+ provisioner_src_logger.debug('Unknown operation for FirestartrGithubGroup ' +
312768
+ this.get('operation'));
312607
312769
  }
312608
312770
  }
312609
312771
  }
@@ -312611,14 +312773,13 @@ class github_group_FirestartrGithubGroup extends Resource {
312611
312773
  ;// CONCATENATED MODULE: ../provisioner/src/resources/terraform_module/index.ts
312612
312774
 
312613
312775
 
312614
- const terraform_module_log = src_default()('firestartr:provisioner:terraform_module');
312615
312776
  class FirestartrTerraformModule extends Resource {
312616
312777
  static kind() {
312617
312778
  return 'FirestartrTerraformModule';
312618
312779
  }
312619
312780
  async preprocess() {
312620
312781
  const operation = this.get('operation');
312621
- terraform_module_log(operation);
312782
+ provisioner_src_logger.debug(`Running operation '${operation}' for FirestartrTerraformModule`);
312622
312783
  switch (operation) {
312623
312784
  case 'CREATE':
312624
312785
  break;
@@ -312639,7 +312800,6 @@ class FirestartrTerraformModule extends Resource {
312639
312800
  ;// CONCATENATED MODULE: ../provisioner/src/resources/github_orgWebhook/index.ts
312640
312801
 
312641
312802
 
312642
- const github_orgWebhook_log = src_default()('firestartr:provisioner:github_orgWebhook');
312643
312803
  class github_orgWebhook_FirestartrGithubOrgWebhook extends Resource {
312644
312804
  static kind() {
312645
312805
  return 'FirestartrGithubOrgWebhook';
@@ -312647,22 +312807,23 @@ class github_orgWebhook_FirestartrGithubOrgWebhook extends Resource {
312647
312807
  async preprocess() {
312648
312808
  switch (this.get('operation')) {
312649
312809
  case 'CREATE':
312650
- github_orgWebhook_log('CREATE');
312810
+ provisioner_src_logger.debug('Creating FirestartrGithubOrgWebhook');
312651
312811
  break;
312652
312812
  case 'UPDATE':
312653
- github_orgWebhook_log('UPDATED');
312813
+ provisioner_src_logger.debug('Updating FirestartrGithubOrgWebhook');
312654
312814
  break;
312655
312815
  case 'DELETE':
312656
- github_orgWebhook_log('DELETED');
312816
+ provisioner_src_logger.debug('Deleted FirestartrGithubOrgWebhook');
312657
312817
  break;
312658
312818
  case 'IMPORT':
312659
- github_orgWebhook_log('IMPORT');
312819
+ provisioner_src_logger.debug('Importing FirestartrGithubOrgWebhook');
312660
312820
  break;
312661
312821
  case 'IMPORT_SKIP_PLAN':
312662
- github_orgWebhook_log('IMPORT_SKIP_PLAN');
312822
+ provisioner_src_logger.debug('Import skipping plan for FirestartrGithubOrgWebhook');
312663
312823
  break;
312664
312824
  default:
312665
- github_orgWebhook_log('UNKNOWN');
312825
+ provisioner_src_logger.debug('Unknown operation for FirestartrGithubOrgWebhook ' +
312826
+ this.get('operation'));
312666
312827
  }
312667
312828
  }
312668
312829
  }
@@ -312699,6 +312860,12 @@ async function runProvisioner(data, opts) {
312699
312860
  ? 'DELETE'
312700
312861
  : 'UNKNOWN';
312701
312862
  const resource = createInstanceOf(mainCr, operation, deps);
312863
+ if ('logStreamCallbacksCDKTF' in opts) {
312864
+ resource.setSynthStreamLogs(opts['logStreamCallbacksCDKTF']);
312865
+ }
312866
+ if ('logStreamCallbacksTF' in opts) {
312867
+ resource.setTFStreamLogs(opts['logStreamCallbacksTF']);
312868
+ }
312702
312869
  await resource.run();
312703
312870
  return resource;
312704
312871
  }
@@ -312717,7 +312884,6 @@ function createInstanceOf(entity, op, deps) {
312717
312884
 
312718
312885
 
312719
312886
 
312720
- const provisioner_messageLog_0 = src_default()('firestartr:provisioner:main');
312721
312887
  async function deploy(app) {
312722
312888
  const entity = catalog_common.io.fromYaml(external_fs_.readFileSync(catalog_common.environment.getFromEnvironment(catalog_common.types.envVars.cdktfEntityPath), 'utf8'));
312723
312889
  const deps = catalog_common.io.fromYaml(external_fs_.readFileSync(catalog_common.environment.getFromEnvironment(catalog_common.types.envVars.cdktfDepsPath), 'utf8'));
@@ -312726,7 +312892,7 @@ async function deploy(app) {
312726
312892
  : false;
312727
312893
  catalog_common.io.removeFile(catalog_common.environment.getFromEnvironment(catalog_common.types.envVars.cdktfEntityPath));
312728
312894
  catalog_common.io.removeFile(catalog_common.environment.getFromEnvironment(catalog_common.types.envVars.cdktfDepsPath));
312729
- provisioner_messageLog_0('Entity to provision: %O', entity);
312895
+ provisioner_src_logger.info(`Entity to provision: ${entity}`);
312730
312896
  const orgConfig = {
312731
312897
  bucket: catalog_common.environment.getFromEnvironment(catalog_common.types.envVars.s3Bucket),
312732
312898
  dynamodbTable: catalog_common.environment.getFromEnvironment(catalog_common.types.envVars.s3Lock),
@@ -312744,7 +312910,7 @@ async function deploy(app) {
312744
312910
  app.synth();
312745
312911
  }
312746
312912
  catch (e) {
312747
- void provisioner_messageLog_0('Error: deploy: %s', e);
312913
+ provisioner_src_logger.error('Error: deploy: %s', e);
312748
312914
  throw e;
312749
312915
  }
312750
312916
  }
@@ -312766,26 +312932,20 @@ if (process.env.RUN_PROVISIONER) {
312766
312932
  async function tryPublishApply(item, planOutput, kind) {
312767
312933
  try {
312768
312934
  if (!('firestartr.dev/last-state-pr' in item.metadata.annotations)) {
312769
- src_logger.debug('USER_FEEDBACK_PUBLISH_APPLY_NO_LAST_STATE', {
312770
- metadata: { name: item.metadata.name, kind },
312771
- });
312935
+ operator_src_logger.debug(`The user feedback for the '${kind}/${item.metadata.name}' apply operation could not be published because the last state was not found.`);
312772
312936
  return;
312773
312937
  }
312774
312938
  await publishApply(item, planOutput, kind);
312775
312939
  }
312776
312940
  catch (e) {
312777
- src_logger.error('USER_FEEDBACK_PUBLISH_APPLY_ERROR', {
312778
- metadata: { name: item.metadata.name, kind, error: e },
312779
- });
312941
+ operator_src_logger.error(`The user feedback for the '${kind}/${item.metadata.name}' apply operation failed to publish due to an error: '${e}'.`);
312780
312942
  }
312781
312943
  }
312782
312944
  async function tryPublishDestroy(item, destroyOutput) {
312783
312945
  let lastPr = null;
312784
312946
  try {
312785
312947
  const { repo, org } = extractPrInfo(item);
312786
- src_logger.debug('USER_FEEDBACK_PUBLISH_DESTROY', {
312787
- metadata: { item, repo, org },
312788
- });
312948
+ operator_src_logger.debug(`The user feedback for the '${item.kind}/${item.metadata.name}' destroy operation is being published for repository '${repo}' in organization '${org}'.`);
312789
312949
  lastPr = await github_0.pulls.filterPrBy({
312790
312950
  title: `hydrate: ${item.metadata.name}`,
312791
312951
  state: 'closed',
@@ -312796,9 +312956,7 @@ async function tryPublishDestroy(item, destroyOutput) {
312796
312956
  maxRetries: 3,
312797
312957
  });
312798
312958
  if (!lastPr) {
312799
- src_logger.debug('USER_FEEDBACK_PUBLISH_DESTROY_NO_LAST_STATE', {
312800
- metadata: { item },
312801
- });
312959
+ operator_src_logger.debug(`The user feedback for the '${item.kind}/${item.metadata.name}' destroy operation could not be published because the last state was not found.`);
312802
312960
  return;
312803
312961
  }
312804
312962
  const dividedOutput = github_0.pulls.divideCommentIntoChunks(destroyOutput, 250);
@@ -312816,20 +312974,14 @@ async function tryPublishDestroy(item, destroyOutput) {
312816
312974
  ${commentContent}
312817
312975
  \`\`\`
312818
312976
  </details>`;
312819
- src_logger.debug('USER_FEEDBACK_PUBLISH_COMMENT', {
312820
- metadata: { lastPr: lastPr.number, repo, org, item },
312821
- });
312977
+ operator_src_logger.debug(`The user feedback for item '${item.kind}/${item.metadata.name}' is being published as a comment on pull request '${lastPr.number}' for repository '${repo}' in organization '${org}'.`);
312822
312978
  await github_0.pulls.commentInPR(comment, lastPr.number, repo, org);
312823
- src_logger.debug('USER_FEEDBACK_PUBLISH_DESTROY_COMMENT', {
312824
- metadata: { lastPr: lastPr.number, item },
312825
- });
312979
+ operator_src_logger.debug(`The user feedback for the '${item.kind}/${item.metadata.name}' destroy operation is being published as a comment on pull request '${lastPr.number}'.`);
312826
312980
  currentCommentNo += 1;
312827
312981
  }
312828
312982
  }
312829
312983
  catch (e) {
312830
- src_logger.error('USER_FEEDBACK_PUBLISH_ERROR', {
312831
- metadata: { lastPr: lastPr.number, item, error: e },
312832
- });
312984
+ operator_src_logger.error(`An error occurred while publishing user feedback for item '${item.kind}/${item.metadata.name}' on pull request '${lastPr.number}': '${e}'.`);
312833
312985
  }
312834
312986
  }
312835
312987
  async function publishApply(item, applyOutput, kind) {
@@ -312866,9 +313018,7 @@ function tryCreateErrorSummary(title, errorMsg) {
312866
313018
  return summaryText;
312867
313019
  }
312868
313020
  catch (e) {
312869
- src_logger.error('USER_FEEDBACK_GETTING_ERROR_SUMMARY', {
312870
- metadata: { error: e, title, errorMsg },
312871
- });
313021
+ operator_src_logger.error(`An error occurred while getting the error summary for '${title}'. The error was '${e}', with the message: '${errorMsg}'.`);
312872
313022
  return `Error when getting error summary: ${e}`;
312873
313023
  }
312874
313024
  }
@@ -312890,9 +313040,7 @@ async function tryPublishError(item, reason, message) {
312890
313040
  await publishError(item, reason, message);
312891
313041
  }
312892
313042
  catch (e) {
312893
- src_logger.error('USER_FEEDBACK_TRY_PUBLISH_ERROR', {
312894
- metadata: { item, error: e, reason },
312895
- });
313043
+ operator_src_logger.error(`The user feedback for item '${item.kind}/${item.metadata.name}' failed to publish due to an error: '${e}'. Reason: '${reason}'.`);
312896
313044
  }
312897
313045
  }
312898
313046
  async function publishError(item, reason, message) {
@@ -312930,6 +313078,53 @@ ${commentContent}
312930
313078
  }
312931
313079
  }
312932
313080
 
313081
+ ;// CONCATENATED MODULE: ../operator/src/user-feedback-ops/gh-checkrun.ts
313082
+
313083
+ async function GHCheckRun(cmd, item) {
313084
+ const prInfo = gh_checkrun_extractPrInfo(item);
313085
+ if (!prInfo.prNumber) {
313086
+ throw new Error('TFCheckRun: prNumber not retrievable');
313087
+ }
313088
+ const checkRun = await github_0.feedback.createCheckRun(prInfo.org, prInfo.repo, helperCreateCheckRunName(cmd, item), {
313089
+ //Number(pr_number),
313090
+ pullNumber: Number(prInfo.prNumber),
313091
+ includeCheckRunComment: true,
313092
+ checkRunComment: `The Github ${item.kind} is being processed (cmd=${cmd}). Details: `,
313093
+ });
313094
+ checkRun.mdOptionsDetails({
313095
+ quotes: 'terraform',
313096
+ });
313097
+ checkRun.update('Initiating', 'queued');
313098
+ return {
313099
+ fnData: (d) => {
313100
+ checkRun.update(d.toString(), 'in_progress');
313101
+ },
313102
+ fnEnd: () => {
313103
+ checkRun.close('OK', true);
313104
+ },
313105
+ fnOnError: (err) => {
313106
+ checkRun.close('KO', false);
313107
+ },
313108
+ };
313109
+ }
313110
+ function helperCreateCheckRunName(cmd, item) {
313111
+ return `Github Provisioner / ${item.kind} - ${cmd}`;
313112
+ }
313113
+ function gh_checkrun_extractPrInfo(item) {
313114
+ const prInfo = item.metadata.annotations['firestartr.dev/last-state-pr'];
313115
+ const prNumber = prInfo.split('#')[1];
313116
+ if (!prNumber)
313117
+ throw new Error('No PR number found in CR');
313118
+ const orgRepo = prInfo.split('#')[0];
313119
+ const org = orgRepo.split('/')[0];
313120
+ if (!org)
313121
+ throw new Error('No org found in CR');
313122
+ const repo = orgRepo.split('/')[1];
313123
+ if (!repo)
313124
+ throw new Error('No repo found in CR');
313125
+ return { prNumber, repo, org };
313126
+ }
313127
+
312933
313128
  ;// CONCATENATED MODULE: ../operator/cdktf.ts
312934
313129
 
312935
313130
 
@@ -312939,8 +313134,8 @@ ${commentContent}
312939
313134
 
312940
313135
 
312941
313136
 
312942
- const cdktf_log = src_default()('firestartr:operator:cdktf');
312943
313137
  function processOperation(item, op, handler) {
313138
+ operator_src_logger.info(`Processing operation ${op} on ${item.kind}/${item.metadata?.name}`);
312944
313139
  try {
312945
313140
  switch (op) {
312946
313141
  case OperationType.UPDATED:
@@ -312962,7 +313157,7 @@ function processOperation(item, op, handler) {
312962
313157
  }
312963
313158
  }
312964
313159
  catch (e) {
312965
- cdktf_log(`Operation ${op} failed: ${e}`);
313160
+ operator_src_logger.error(`Operation ${op} failed: ${e}`);
312966
313161
  throw e;
312967
313162
  }
312968
313163
  }
@@ -313013,6 +313208,9 @@ async function* sync(item, op, handler) {
313013
313208
  };
313014
313209
  }
313015
313210
  async function* markedToDeletion(item, op, handler) {
313211
+ // here we store the current callbacks that
313212
+ // are being used (synth|tf-apply...)
313213
+ let checkRunCtl;
313016
313214
  try {
313017
313215
  void cleanTerraformState();
313018
313216
  const type = 'DELETING';
@@ -313037,15 +313235,38 @@ async function* markedToDeletion(item, op, handler) {
313037
313235
  status: 'True',
313038
313236
  message: 'Destroying process started',
313039
313237
  };
313040
- if (item.metadata.annotations['firestartr.dev/last-state-pr'] || false) {
313041
- await addDestroyCommitStatus(item, 'pending', 'Performing destroy operation...', `Terraform Destroy ${item.metadata.name}`);
313042
- }
313043
313238
  const deps = await handler.resolveReferences();
313239
+ const annotation = 'firestartr.dev/last-state-pr';
313240
+ const statePr = item?.metadata?.annotations?.[annotation];
313241
+ const hasStatePr = typeof statePr === 'string' && statePr.trim().length > 0;
313242
+ if (!hasStatePr) {
313243
+ operator_src_logger.warn(`CR ${item?.kind ?? 'UnknownKind'}/${item?.metadata?.name ?? 'unknown'} ` +
313244
+ `has no "${annotation}" annotation; skipping GitHub Check Runs (synth, terraform apply).`);
313245
+ }
313246
+ else {
313247
+ operator_src_logger.debug(`CR ${item.kind}/${item.metadata.name} uses "${annotation}" = ${statePr}`);
313248
+ }
313044
313249
  const destroyOutput = await provisioner.runProvisioner({
313045
313250
  mainCr: item,
313046
313251
  deps,
313047
313252
  }, {
313048
313253
  delete: true,
313254
+ ...(hasStatePr
313255
+ ? {
313256
+ logStreamCallbacksCDKTF: {
313257
+ prepare: async () => {
313258
+ checkRunCtl = await GHCheckRun('synth', item);
313259
+ return checkRunCtl;
313260
+ },
313261
+ },
313262
+ logStreamCallbacksTF: {
313263
+ prepare: async () => {
313264
+ checkRunCtl = await GHCheckRun('terraform destroy', item);
313265
+ return checkRunCtl;
313266
+ },
313267
+ },
313268
+ }
313269
+ : {}),
313049
313270
  });
313050
313271
  const output = destroyOutput.output;
313051
313272
  await tryPublishDestroy(item, output);
@@ -313071,10 +313292,11 @@ async function* markedToDeletion(item, op, handler) {
313071
313292
  status: 'True',
313072
313293
  message: e.toString(),
313073
313294
  };
313295
+ // if there is a current checkRun working
313296
+ // we close it with an error
313297
+ if (checkRunCtl)
313298
+ checkRunCtl.fnOnError(e);
313074
313299
  await handler.writeTerraformOutputInTfResult(item, e);
313075
- if (item.metadata.annotations['firestartr.dev/last-state-pr'] || false) {
313076
- await addDestroyCommitStatus(item, 'failure', 'Destroy operation failed', `Terraform Destroy ${item.metadata.name}`);
313077
- }
313078
313300
  void handler.error();
313079
313301
  }
313080
313302
  }
@@ -313093,6 +313315,9 @@ async function* nothing(item, op, handler) {
313093
313315
  * @param handler -
313094
313316
  */
313095
313317
  async function* doApply(item, op, handler) {
313318
+ // here we store the current callbacks that
313319
+ // are being used (synth|tf-apply...)
313320
+ let checkRunCtl;
313096
313321
  try {
313097
313322
  cleanTerraformState();
313098
313323
  yield {
@@ -313134,16 +313359,41 @@ async function* doApply(item, op, handler) {
313134
313359
  opts['create'] = true;
313135
313360
  }
313136
313361
  const deps = await handler.resolveReferences();
313137
- cdktf_log('Item %s has the following dependencies: %O', item.metadata.name, deps);
313138
- if (item.metadata.annotations['firestartr.dev/last-state-pr'] || false) {
313139
- await addApplyCommitStatus(item, 'pending', catalog_common.generic.getPrLinkFromAnnotationValue(item.metadata.annotations['firestartr.dev/last-state-pr']), 'Performing apply operation...', `Terraform Apply ${item.metadata.name}`);
313362
+ operator_src_logger.info(`Item ${item.metadata.name} has the following dependencies: ${deps}`);
313363
+ const annotation = 'firestartr.dev/last-state-pr';
313364
+ const statePr = item?.metadata?.annotations?.[annotation];
313365
+ const hasStatePr = typeof statePr === 'string' && statePr.trim().length > 0;
313366
+ if (!hasStatePr) {
313367
+ operator_src_logger.warn(`CR ${item?.kind ?? 'UnknownKind'}/${item?.metadata?.name ?? 'unknown'} ` +
313368
+ `has no "${annotation}" annotation; skipping GitHub Check Runs (synth, terraform apply).`);
313369
+ }
313370
+ else {
313371
+ operator_src_logger.debug(`CR ${item.kind}/${item.metadata.name} uses "${annotation}" = ${statePr}`);
313140
313372
  }
313141
313373
  const applyOutput = await provisioner.runProvisioner({
313142
313374
  mainCr: item,
313143
313375
  deps,
313144
- }, opts);
313376
+ }, {
313377
+ ...opts,
313378
+ ...(hasStatePr
313379
+ ? {
313380
+ logStreamCallbacksCDKTF: {
313381
+ prepare: async () => {
313382
+ checkRunCtl = await GHCheckRun('synth', item);
313383
+ return checkRunCtl;
313384
+ },
313385
+ },
313386
+ logStreamCallbacksTF: {
313387
+ prepare: async () => {
313388
+ checkRunCtl = await GHCheckRun('terraform apply', item);
313389
+ return checkRunCtl;
313390
+ },
313391
+ },
313392
+ }
313393
+ : {}),
313394
+ });
313145
313395
  await tryPublishApply(item, applyOutput?.data?.output, item.kind);
313146
- const terraformOutputJson = await provisioner.runTerraform(item, ['output', '-json']);
313396
+ const terraformOutputJson = await provisioner.runTerraform(item, ['output', '-json'], null);
313147
313397
  if (!terraformOutputJson) {
313148
313398
  throw new Error(`Terraform output is empty for ${item.kind}/${item.metadata.name}`);
313149
313399
  }
@@ -313171,9 +313421,6 @@ async function* doApply(item, op, handler) {
313171
313421
  message: 'doApply',
313172
313422
  };
313173
313423
  await handler.writeTerraformOutputInTfResult(item, output);
313174
- if (item.metadata.annotations['firestartr.dev/last-state-pr'] || false) {
313175
- await addApplyCommitStatus(item, 'success', catalog_common.generic.getPrLinkFromAnnotationValue(item.metadata.annotations['firestartr.dev/last-state-pr']), 'Apply operation completed', `Terraform Apply ${item.metadata.name}`);
313176
- }
313177
313424
  handler.success();
313178
313425
  }
313179
313426
  catch (e) {
@@ -313185,7 +313432,11 @@ async function* doApply(item, op, handler) {
313185
313432
  error = e;
313186
313433
  }
313187
313434
  await tryPublishApply(item, error, item.kind);
313188
- cdktf_log('Error applying item %s: %O', item.metadata.name, error);
313435
+ // if there is a current checkRun working
313436
+ // we close it with an error
313437
+ if (checkRunCtl)
313438
+ checkRunCtl.fnOnError(error);
313439
+ operator_src_logger.error(`Error applying item ${item.metadata.name}: ${error}`);
313189
313440
  yield {
313190
313441
  item,
313191
313442
  reason: op,
@@ -313207,9 +313458,6 @@ async function* doApply(item, op, handler) {
313207
313458
  status: 'False',
313208
313459
  message: error.toString(),
313209
313460
  };
313210
- if (item.metadata.annotations['firestartr.dev/last-state-pr'] || false) {
313211
- await addApplyCommitStatus(item, 'failure', catalog_common.generic.getPrLinkFromAnnotationValue(item.metadata.annotations['firestartr.dev/last-state-pr']), 'Apply operation failed', `Terraform Apply ${item.metadata.name}`);
313212
- }
313213
313461
  handler.error();
313214
313462
  if (error) {
313215
313463
  await handler.writeTerraformOutputInTfResult(item, error);
@@ -313326,57 +313574,72 @@ class TFPlanItemVersion extends TFPlanItem {
313326
313574
  }
313327
313575
  }
313328
313576
 
313577
+ ;// CONCATENATED MODULE: ../terraform_provisioner/src/logger.ts
313578
+
313579
+ /* harmony default export */ const terraform_provisioner_src_logger = (catalog_common.logger);
313580
+
313329
313581
  ;// CONCATENATED MODULE: ../terraform_provisioner/src/utils.ts
313330
313582
 
313331
313583
 
313332
- //import Debug from "debug"
313333
313584
 
313334
- //const infolog: Debug.Debugger = Debug('firestartr:operator:cmd:terraform')
313585
+
313335
313586
  async function utils_validate(path, secrets) {
313336
313587
  return await tfExec(path, ['validate'], secrets);
313337
313588
  }
313338
- async function init(path, secrets) {
313339
- return await tfExec(path, ['init'], secrets);
313589
+ async function init(path, secrets, stream) {
313590
+ return await tfExec(path, ['init'], secrets, ['-input=false'], stream);
313340
313591
  }
313341
- async function initFromModule(path, source, secrets) {
313342
- return tfExec(path, ['init', `-from-module=${source}`], secrets, []);
313592
+ async function initFromModule(path, source, secrets, stream) {
313593
+ return tfExec(path, ['init', `-from-module=${source}`], secrets, [], stream);
313343
313594
  }
313344
- async function plan(path, secrets, format, args = ['plan']) {
313345
- const plan = await tfExec(path, args.concat(format === 'json' ? ['-json'] : []), secrets);
313595
+ async function plan(path, secrets, format, args = ['plan'], stream) {
313596
+ terraform_provisioner_src_logger.info(`Running terraform plan with ${format} in path ${path}`);
313597
+ const plan = await tfExec(path, args.concat(format === 'json' ? ['-json'] : []), secrets, ['-input=false'], stream);
313346
313598
  if (format === 'json') {
313347
313599
  const tfPlan = planGet(plan);
313348
313600
  return tfPlan;
313349
313601
  }
313350
313602
  return plan;
313351
313603
  }
313352
- async function apply(path, secrets) {
313353
- return await tfExec(path, ['apply', '-auto-approve'], secrets);
313604
+ async function apply(path, secrets, stream) {
313605
+ terraform_provisioner_src_logger.debug(`Running terraform apply in path ${path}`);
313606
+ return await tfExec(path, ['apply', '-auto-approve'], secrets, ['-input=false'], stream);
313354
313607
  }
313355
- async function destroy(path, secrets) {
313356
- return await tfExec(path, ['destroy', '-auto-approve'], secrets);
313608
+ async function destroy(path, secrets, stream) {
313609
+ terraform_provisioner_src_logger.debug(`Running terraform destroy in path ${path}`);
313610
+ return await tfExec(path, ['destroy', '-auto-approve'], secrets, ['-input=false'], stream);
313357
313611
  }
313358
313612
  async function output(path, secrets) {
313613
+ terraform_provisioner_src_logger.debug(`Running terraform output in path ${path}`);
313359
313614
  return await tfExec(path, ['output', '-json'], secrets, []);
313360
313615
  }
313361
- async function tfExec(path, args, secrets, extraArgs = ['-input=false']) {
313616
+ async function tfExec(path, args, secrets, extraArgs = ['-input=false'], stream) {
313362
313617
  // Format to TF_VAR variables -> https://developer.hashicorp.com/terraform/cli/config/environment-variables#tf_var_name
313363
313618
  for (const secret of secrets) {
313364
313619
  process.env[`${secret.key}`] = secret.value;
313365
313620
  }
313621
+ terraform_provisioner_src_logger.info(`Spawning terraform process ['terraform ${args.concat(extraArgs).join(' ')}'] in path '${path}'`);
313366
313622
  process.env['TF_PLUGIN_CACHE_DIR'] = '/home/terraform-plugins-cache';
313367
313623
  return new Promise((ok, ko) => {
313368
- const tfProcess = (0,external_child_process_.spawn)('terraform', args.concat(extraArgs), { cwd: path });
313369
- tfProcess.stdout.pipe(process.stdout);
313370
- tfProcess.stderr.pipe(process.stderr);
313624
+ const tfProcess = (0,external_child_process_.spawn)('terraform', args.concat(extraArgs), {
313625
+ cwd: path,
313626
+ stdio: ['inherit', 'pipe', 'pipe'],
313627
+ });
313371
313628
  let output = '';
313372
313629
  let flagStdoutEnd = false;
313373
313630
  let flagStderrEnd = false;
313374
313631
  let outputErrors = '';
313375
313632
  tfProcess.stdout.on('data', (log) => {
313376
- output += catalog_common.io.stripAnsi(log.toString());
313633
+ const line = catalog_common.io.stripAnsi(log.toString());
313634
+ output += line;
313635
+ if (stream)
313636
+ stream.write(line);
313377
313637
  });
313378
313638
  tfProcess.stderr.on('data', (log) => {
313379
- outputErrors += catalog_common.io.stripAnsi(log.toString());
313639
+ const line = catalog_common.io.stripAnsi(log.toString());
313640
+ outputErrors += line;
313641
+ if (stream)
313642
+ stream.write(line);
313380
313643
  });
313381
313644
  tfProcess.stdout.on('end', () => {
313382
313645
  flagStdoutEnd = true;
@@ -313391,9 +313654,12 @@ async function tfExec(path, args, secrets, extraArgs = ['-input=false']) {
313391
313654
  await catalog_common.generic.sleep(500);
313392
313655
  }
313393
313656
  if (code !== 0) {
313394
- ko(output + outputErrors);
313657
+ terraform_provisioner_src_logger.error(`Terraform output ${path}: ${output + outputErrors}`);
313658
+ terraform_provisioner_src_logger.error(`Terraform output ${path}: ${[output, outputErrors].join('')}`);
313659
+ ko([output, outputErrors].join(''));
313395
313660
  }
313396
313661
  else {
313662
+ terraform_provisioner_src_logger.info(`Terraform output ${path}: ${output}`);
313397
313663
  ok(output);
313398
313664
  }
313399
313665
  });
@@ -313407,7 +313673,9 @@ async function configureGit(ghToken) {
313407
313673
  'url."https://' + ghToken + '@github.com".insteadOf',
313408
313674
  'https://github.com',
313409
313675
  ];
313410
- const gitProcess = spawn('git', options);
313676
+ const gitProcess = spawn('git', options, {
313677
+ stdio: ['inherit', 'pipe', 'pipe'],
313678
+ });
313411
313679
  let output = '';
313412
313680
  gitProcess.on('data', (log) => {
313413
313681
  output += common.io.stripAnsi(log.toString());
@@ -313762,6 +314030,7 @@ function fCheckString(keys, refs) {
313762
314030
 
313763
314031
 
313764
314032
 
314033
+
313765
314034
  class project_tf_TFProjectManager {
313766
314035
  constructor(ctx) {
313767
314036
  this.tfOutput = '';
@@ -313770,6 +314039,14 @@ class project_tf_TFProjectManager {
313770
314039
  this.tfVarsJsonWriter = new WriterTfVarsJson(ctx.values, ctx.references);
313771
314040
  this.secrets = ctx.secrets;
313772
314041
  }
314042
+ setStreamCallbacks(fnData, fnEnd, reopen = true) {
314043
+ if (reopen || !this.stream)
314044
+ this.stream = new external_stream_.PassThrough();
314045
+ this.stream.on('data', (data) => {
314046
+ fnData(data.toString());
314047
+ });
314048
+ this.stream.on('end', fnEnd);
314049
+ }
313773
314050
  getOutput() {
313774
314051
  return this.tfOutput;
313775
314052
  }
@@ -313780,10 +314057,10 @@ class project_tf_TFProjectManager {
313780
314057
  this.tfVarsJsonWriter.writeToTerraformProject(external_path_.join(this.projectPath, 'terraform.tfvars.json'));
313781
314058
  }
313782
314059
  async __init() {
313783
- this.tfOutput += await init(this.projectPath, this.secrets);
314060
+ this.tfOutput += await init(this.projectPath, this.secrets, this.stream);
313784
314061
  }
313785
314062
  async __initFromModule() {
313786
- this.tfOutput += await init(this.projectPath, this.secrets);
314063
+ this.tfOutput += await init(this.projectPath, this.secrets, this.stream);
313787
314064
  }
313788
314065
  async validate() {
313789
314066
  await this.__init();
@@ -313793,24 +314070,27 @@ class project_tf_TFProjectManager {
313793
314070
  await this.__init();
313794
314071
  if (format === 'json')
313795
314072
  this.tfOutput = null;
313796
- this.tfOutput = await plan(this.projectPath, this.secrets, format);
314073
+ this.tfOutput = await plan(this.projectPath, this.secrets, format, ['plan'], this.stream);
314074
+ if (this.stream)
314075
+ this.stream.end();
313797
314076
  }
313798
314077
  async planDestroy(format) {
313799
314078
  await this.__init();
313800
314079
  if (format === 'json')
313801
314080
  this.tfOutput = null;
313802
- this.tfOutput = await plan(this.projectPath, this.secrets, format, [
313803
- 'plan',
313804
- '-destroy',
313805
- ]);
314081
+ this.tfOutput = await plan(this.projectPath, this.secrets, format, ['plan', '-destroy'], this.stream);
313806
314082
  }
313807
314083
  async apply() {
313808
314084
  await this.__init();
313809
- this.tfOutput += await apply(this.projectPath, this.secrets);
314085
+ this.tfOutput += await apply(this.projectPath, this.secrets, this.stream);
314086
+ if (this.stream)
314087
+ this.stream.end();
313810
314088
  }
313811
314089
  async destroy() {
313812
314090
  await this.__init();
313813
- this.tfOutput += await destroy(this.projectPath, this.secrets);
314091
+ this.tfOutput += await destroy(this.projectPath, this.secrets, this.stream);
314092
+ if (this.stream)
314093
+ this.stream.end();
313814
314094
  }
313815
314095
  async output() {
313816
314096
  await this.__init();
@@ -313904,6 +314184,7 @@ var lib_ajv_default = /*#__PURE__*/__nccwpck_require__.n(lib_ajv);
313904
314184
 
313905
314185
 
313906
314186
 
314187
+
313907
314188
  class TFProjectManagerRemote {
313908
314189
  constructor(ctx) {
313909
314190
  this.tfOutput = '';
@@ -313916,6 +314197,14 @@ class TFProjectManagerRemote {
313916
314197
  getOutput() {
313917
314198
  return this.tfOutput;
313918
314199
  }
314200
+ setStreamCallbacks(fnData, fnEnd, reopen = true) {
314201
+ if (reopen || !this.stream)
314202
+ this.stream = new external_stream_.PassThrough();
314203
+ this.stream.on('data', (data) => {
314204
+ fnData(data.toString());
314205
+ });
314206
+ this.stream.on('end', fnEnd);
314207
+ }
313919
314208
  async build() {
313920
314209
  external_fs_.rmSync(this.projectPath, { recursive: true, force: true });
313921
314210
  await this.__configGit();
@@ -313947,19 +314236,25 @@ insteadOf = https://github.com`);
313947
314236
  async plan(format) {
313948
314237
  await this.__init();
313949
314238
  if (format === 'json') {
313950
- this.tfOutput = await plan(this.projectPath, this.secrets, format);
314239
+ this.tfOutput = await plan(this.projectPath, this.secrets, format, ['plan'], this.stream);
313951
314240
  }
313952
314241
  else {
313953
- this.tfOutput += await plan(this.projectPath, this.secrets, format);
314242
+ this.tfOutput += await plan(this.projectPath, this.secrets, format, ['plan'], this.stream);
313954
314243
  }
314244
+ if (this.stream)
314245
+ this.stream.end();
313955
314246
  }
313956
314247
  async apply() {
313957
314248
  await this.__init();
313958
- this.tfOutput += await apply(this.projectPath, this.secrets);
314249
+ this.tfOutput += await apply(this.projectPath, this.secrets, this.stream);
314250
+ if (this.stream)
314251
+ this.stream.end();
313959
314252
  }
313960
314253
  async destroy() {
313961
314254
  await this.__init();
313962
- this.tfOutput += await destroy(this.projectPath, this.secrets);
314255
+ this.tfOutput += await destroy(this.projectPath, this.secrets, this.stream);
314256
+ if (this.stream)
314257
+ this.stream.end();
313963
314258
  }
313964
314259
  async planDestroy(format) {
313965
314260
  await this.__init();
@@ -313988,6 +314283,7 @@ insteadOf = https://github.com`);
313988
314283
 
313989
314284
 
313990
314285
 
314286
+
313991
314287
  const terraform_provisioner_ajv = new (lib_ajv_default())();
313992
314288
  const terraform_provisioner_validate = terraform_provisioner_ajv.compile(terraform_provisioner_src_schema);
313993
314289
  function validateContext(context) {
@@ -314005,7 +314301,8 @@ async function run() {
314005
314301
  await execCommand(command, tfProject);
314006
314302
  }
314007
314303
  // Programatic API
314008
- async function runTerraformProvisioner(context, command = 'init') {
314304
+ async function runTerraformProvisioner(context, command = 'init', streaming) {
314305
+ terraform_provisioner_src_logger.info(`Running command ${command} on a ${context.type} project`);
314009
314306
  validateContext(context);
314010
314307
  let tfProject = {};
314011
314308
  if (context.type === 'Inline') {
@@ -314014,10 +314311,14 @@ async function runTerraformProvisioner(context, command = 'init') {
314014
314311
  else if (context.type === 'Remote') {
314015
314312
  tfProject = new TFProjectManagerRemote(context);
314016
314313
  }
314314
+ if (streaming) {
314315
+ tfProject.setStreamCallbacks(streaming.fnData, streaming.fnEnd);
314316
+ }
314017
314317
  const output = await execCommand(command, tfProject);
314018
314318
  return output;
314019
314319
  }
314020
314320
  async function execCommand(command, tfProject) {
314321
+ terraform_provisioner_src_logger.info(`Executing command ${command} on ${tfProject.projectPath}`);
314021
314322
  await tfProject.build();
314022
314323
  switch (command) {
314023
314324
  case 'init':
@@ -314126,6 +314427,52 @@ async function* errorPolicyCompatibility(syncPolicy, generalPolicy, item, op) {
314126
314427
  await tryPublishError(item, op, message);
314127
314428
  }
314128
314429
 
314430
+ ;// CONCATENATED MODULE: ../operator/src/user-feedback-ops/tf-checkrun.ts
314431
+
314432
+ async function TFCheckRun(cmd, item) {
314433
+ const prInfo = tf_checkrun_extractPrInfo(item);
314434
+ if (!prInfo.prNumber) {
314435
+ throw new Error('TFCheckRun: prNumber not retrievable');
314436
+ }
314437
+ const checkRun = await github_0.feedback.createCheckRun(prInfo.org, prInfo.repo, tf_checkrun_helperCreateCheckRunName(cmd), {
314438
+ //Number(pr_number),
314439
+ pullNumber: Number(prInfo.prNumber),
314440
+ includeCheckRunComment: true,
314441
+ checkRunComment: `The TFWorkspace is being processed (cmd=${cmd}). Details: `,
314442
+ });
314443
+ checkRun.mdOptionsDetails({
314444
+ quotes: 'terraform',
314445
+ });
314446
+ checkRun.update('Initiating', 'queued');
314447
+ return {
314448
+ fnData: (d) => {
314449
+ checkRun.update(d.toString(), 'in_progress');
314450
+ },
314451
+ fnEnd: () => {
314452
+ checkRun.close('OK', true);
314453
+ },
314454
+ fnOnError: (err) => {
314455
+ checkRun.close('KO', false);
314456
+ },
314457
+ };
314458
+ }
314459
+ function tf_checkrun_helperCreateCheckRunName(cmd) {
314460
+ return `TFWorkspace - ${cmd}`;
314461
+ }
314462
+ function tf_checkrun_extractPrInfo(item) {
314463
+ const prInfo = item.metadata.annotations['firestartr.dev/last-state-pr'];
314464
+ const prNumber = prInfo.split('#')[1];
314465
+ if (!prNumber)
314466
+ throw new Error('No PR number found in CR');
314467
+ const org = prInfo.split('#')[0].split('/')[0];
314468
+ if (!org)
314469
+ throw new Error('No org found in CR');
314470
+ const repo = prInfo.split('#')[0].split('/')[1];
314471
+ if (!repo)
314472
+ throw new Error('No repo found in CR');
314473
+ return { prNumber, repo, org };
314474
+ }
314475
+
314129
314476
  ;// CONCATENATED MODULE: ../operator/src/tfworkspaces/process-operation.ts
314130
314477
 
314131
314478
 
@@ -314136,6 +314483,7 @@ async function* errorPolicyCompatibility(syncPolicy, generalPolicy, item, op) {
314136
314483
 
314137
314484
 
314138
314485
 
314486
+
314139
314487
  const TF_PROJECTS_PATH = '/tmp/tfworkspaces';
314140
314488
  function process_operation_processOperation(item, op, handler) {
314141
314489
  try {
@@ -314169,7 +314517,7 @@ function process_operation_processOperation(item, op, handler) {
314169
314517
  }
314170
314518
  }
314171
314519
  catch (e) {
314172
- src_logger.error('TERRAFORM_PROCESSOR_OP_ERROR', { metadata: { op, error: e } });
314520
+ operator_src_logger.error(`The Terraform processor encountered an error during operation '${op}': '${e}'.`);
314173
314521
  throw e;
314174
314522
  }
314175
314523
  }
@@ -314202,9 +314550,7 @@ async function* doPlanJSONFormat(item, op, handler) {
314202
314550
  message: 'Planning process started',
314203
314551
  };
314204
314552
  const deps = await handler.resolveReferences();
314205
- src_logger.info('TERRAFORM_PROCESSOR_PLAN_ASSESS_DEPS', {
314206
- metadata: { item, deps },
314207
- });
314553
+ operator_src_logger.info(`The Terraform processor is planning to assess dependencies for item '${item.kind}/${item.metadata.name}' with dependencies: '${deps}'.`);
314208
314554
  const context = buildProvisionerContext(item, deps);
314209
314555
  let planType = 'plan-json';
314210
314556
  if ('deletionTimestamp' in item.metadata) {
@@ -314267,9 +314613,7 @@ async function* doPlanJSONFormat(item, op, handler) {
314267
314613
  }
314268
314614
  catch (e) {
314269
314615
  console.error(e);
314270
- src_logger.error('TERRAFORM_PROCESSOR_PLAN_OBSERVE_ERROR', {
314271
- metadata: { item, error: e },
314272
- });
314616
+ operator_src_logger.error(`The Terraform processor encountered an error while observing the plan for item '${item.kind}/${item.metadata.name}': '${e}'.`);
314273
314617
  yield {
314274
314618
  item,
314275
314619
  reason: op,
@@ -314355,9 +314699,7 @@ async function* process_operation_sync(item, op, handler, syncPolicy, generalPol
314355
314699
  message: 'Sync process started',
314356
314700
  };
314357
314701
  if (!syncPolicy) {
314358
- src_logger.debug('TERRAFORM_PROCESSOR_NO_SYNC_POLICY_ONLY_OBSERVE', {
314359
- metadata: { op, item },
314360
- });
314702
+ operator_src_logger.debug(`The Terraform processor is only observing item '${item.kind}/${item.metadata.name}' because no sync policy was found for operation '${op}'.`);
314361
314703
  yield* doPlanJSONFormat(item, op, handler);
314362
314704
  return;
314363
314705
  }
@@ -314376,9 +314718,7 @@ async function* process_operation_sync(item, op, handler, syncPolicy, generalPol
314376
314718
  break;
314377
314719
  }
314378
314720
  default: {
314379
- src_logger.debug('TERRAFORM_PROCESSOR_POLICY_NOT_SUPPORTED', {
314380
- metadata: { syncPolicy, item },
314381
- });
314721
+ operator_src_logger.debug(`The Terraform processor detected a sync policy '${syncPolicy}' for item '${item.kind}/${item.metadata.name}' that is not supported.`);
314382
314722
  yield* doPlanJSONFormat(item, op, handler);
314383
314723
  break;
314384
314724
  }
@@ -314503,6 +314843,7 @@ async function* process_operation_nothing(item, op, handler) {
314503
314843
  * @param handler -
314504
314844
  */
314505
314845
  async function* process_operation_doApply(item, op, handler) {
314846
+ const checkRunCtl = await TFCheckRun('apply', item);
314506
314847
  try {
314507
314848
  yield {
314508
314849
  item,
@@ -314549,14 +314890,9 @@ async function* process_operation_doApply(item, op, handler) {
314549
314890
  message: 'Provisioning process started',
314550
314891
  };
314551
314892
  const deps = await handler.resolveReferences();
314552
- src_logger.info('TERRAFORM_PROCESSOR_APPLY_ASSESS_DEPS', {
314553
- metadata: { item, deps },
314554
- });
314893
+ operator_src_logger.info(`The Terraform processor is applying and assessing dependencies for item '${item.kind}/${item.metadata.name}' with dependencies: '${deps}'.`);
314555
314894
  const context = buildProvisionerContext(item, deps);
314556
- if (item.metadata.annotations['firestartr.dev/last-state-pr'] || false) {
314557
- await addApplyCommitStatus(item, 'pending', catalog_common.generic.getPrLinkFromAnnotationValue(item.metadata.annotations['firestartr.dev/last-state-pr']), 'Performing apply operation...', `Terraform Apply ${item.metadata.name}`);
314558
- }
314559
- const applyOutput = await runTerraformProvisioner(context, 'apply');
314895
+ const applyOutput = await runTerraformProvisioner(context, 'apply', checkRunCtl);
314560
314896
  await tryPublishApply(item, applyOutput, 'TFWorkspace');
314561
314897
  const terraformOutputJson = await runTerraformProvisioner(context, 'output');
314562
314898
  if (!terraformOutputJson) {
@@ -314588,17 +314924,13 @@ async function* process_operation_doApply(item, op, handler) {
314588
314924
  message: 'doApply',
314589
314925
  };
314590
314926
  await handler.writeTerraformOutputInTfResult(item, output);
314591
- if (item.metadata.annotations['firestartr.dev/last-state-pr'] || false) {
314592
- await addApplyCommitStatus(item, 'success', catalog_common.generic.getPrLinkFromAnnotationValue(item.metadata.annotations['firestartr.dev/last-state-pr']), 'Apply operation completed', `Terraform Apply ${item.metadata.name}`);
314593
- }
314594
314927
  handler.success();
314595
314928
  }
314596
314929
  catch (e) {
314930
+ checkRunCtl.fnOnError(e);
314597
314931
  console.error(e);
314598
314932
  await tryPublishApply(item, e, 'TFWorkspace');
314599
- src_logger.error('TERRAFORM_PROCESSOR_APPLY_ERROR', {
314600
- metadata: { item, op, error: e },
314601
- });
314933
+ operator_src_logger.error(`The Terraform processor encountered an error during operation '${op}' for item '${item.kind}/${item.metadata.name}': '${e}'.`);
314602
314934
  yield {
314603
314935
  item,
314604
314936
  reason: op,
@@ -314620,9 +314952,6 @@ async function* process_operation_doApply(item, op, handler) {
314620
314952
  status: 'False',
314621
314953
  message: JSON.stringify(e),
314622
314954
  };
314623
- if (item.metadata.annotations['firestartr.dev/last-state-pr'] || false) {
314624
- await addApplyCommitStatus(item, 'failure', catalog_common.generic.getPrLinkFromAnnotationValue(item.metadata.annotations['firestartr.dev/last-state-pr']), 'Apply operation failed', `Terraform Apply ${item.metadata.name}`);
314625
- }
314626
314955
  handler.error();
314627
314956
  if (e) {
314628
314957
  await handler.writeTerraformOutputInTfResult(item, e);
@@ -314927,30 +315256,22 @@ async function tryAcquireOrRenewLease(namespace, leaseDurationSeconds) {
314927
315256
  const name = 'firestartr-lease';
314928
315257
  const currentPod = await getCurrentPod(namespace);
314929
315258
  try {
314930
- src_logger.debug('LEADER_ELECTION_TRYING_ACQUIRE_LEASE', {
314931
- metadata: { name, namespace },
314932
- });
315259
+ operator_src_logger.debug(`Attempting to acquire the leader election lease for '${name}' in namespace '${namespace}'.`);
314933
315260
  const lease = await k8sApi.readNamespacedLease(name, namespace);
314934
315261
  const weAreTheLeader = lease.body.metadata.ownerReferences[0].uid === currentPod.metadata.uid;
314935
315262
  if (!weAreTheLeader) {
314936
- src_logger.debug('LEADER_ELECTION_LEASE_ACQUIRED_BY_ANOTHER_POD', {
314937
- metadata: { name, namespace },
314938
- });
315263
+ operator_src_logger.debug(`Another pod has acquired the leader election lease for '${name}' in namespace '${namespace}'.`);
314939
315264
  throw new LeaseAcquisitionError('Lease already acquired by another pod');
314940
315265
  }
314941
315266
  lease.body.spec.acquireTime = new client_node_dist.V1MicroTime();
314942
315267
  lease.body.spec.renewTime = new client_node_dist.V1MicroTime();
314943
315268
  lease.body.spec.leaseDurationSeconds = 30;
314944
- src_logger.debug('LEADER_ELECTION_LEASE_RENEWING', {
314945
- metadata: { name, namespace },
314946
- });
315269
+ operator_src_logger.debug(`Renewing the leader election lease for '${name}' in namespace '${namespace}'.`);
314947
315270
  await k8sApi.replaceNamespacedLease(name, namespace, lease.body);
314948
315271
  }
314949
315272
  catch (err) {
314950
315273
  if (err.response && err.response.statusCode === 404) {
314951
- src_logger.debug('LEADER_ELECTION_LEASE_NOT_FOUND_CREATING', {
314952
- metadata: { name, namespace },
314953
- });
315274
+ operator_src_logger.debug(`The leader election lease for '${name}' in namespace '${namespace}' was not found. Creating a new one.`);
314954
315275
  const lease = {
314955
315276
  apiVersion: 'coordination.k8s.io/v1',
314956
315277
  kind: 'Lease',
@@ -314973,16 +315294,12 @@ async function tryAcquireOrRenewLease(namespace, leaseDurationSeconds) {
314973
315294
  },
314974
315295
  };
314975
315296
  await k8sApi.createNamespacedLease(namespace, lease);
314976
- src_logger.debug('LEADER_ELECTION_LEASE_CREATED', {
314977
- metadata: { name, namespace },
314978
- });
315297
+ operator_src_logger.debug(`A new leader election lease has been created for '${name}' in namespace '${namespace}'.`);
314979
315298
  }
314980
315299
  else {
314981
315300
  if (err.response)
314982
315301
  console.log(err.response);
314983
- src_logger.debug('LEADER_ELECTION_LEASE_RENEWAL_ERROR', {
314984
- metadata: { name, namespace, error: err },
314985
- });
315302
+ operator_src_logger.debug(`An error occurred while renewing the leader election lease for '${name}' in namespace '${namespace}': '${err}'.`);
314986
315303
  throw err;
314987
315304
  }
314988
315305
  }
@@ -314993,9 +315310,7 @@ async function tryAcquireOrRenewLease(namespace, leaseDurationSeconds) {
314993
315310
  async function acquireLease(namespace, cb, interval = 10000) {
314994
315311
  try {
314995
315312
  await tryAcquireOrRenewLease(namespace, interval / 1000);
314996
- src_logger.debug('LEADER_ELECTION_LEASE_ACQUIRED_EXEC_CALLBACK', {
314997
- metadata: { namespace },
314998
- });
315313
+ operator_src_logger.debug(`Successfully acquired the leader election lease in namespace '${namespace}'. Executing the callback.`);
314999
315314
  cb();
315000
315315
  }
315001
315316
  catch (err) {
@@ -315003,9 +315318,7 @@ async function acquireLease(namespace, cb, interval = 10000) {
315003
315318
  if (err instanceof LeaseAcquisitionError) {
315004
315319
  console.error(`Failed to acquire Lease, retrying in ${interval / 1000} seconds`);
315005
315320
  }
315006
- src_logger.silly('LEADER_ELECTION_LEASE_ACQUIRED_FAILED_RETRY', {
315007
- metadata: { retryIn: interval / 1000 },
315008
- });
315321
+ operator_src_logger.silly(`Failed to acquire the leader election lease; will retry in '${interval / 1000}' seconds.`);
315009
315322
  await setTimeout(() => acquireLease(namespace, cb), interval);
315010
315323
  }
315011
315324
  }
@@ -315034,7 +315347,7 @@ function processOperationPlan(item, op, handler) {
315034
315347
  }
315035
315348
  }
315036
315349
  catch (e) {
315037
- src_logger.error('TFWORKSPACE_PROCESSOR_PLAN_ERROR', {
315350
+ operator_src_logger.error('TFWORKSPACE_PROCESSOR_PLAN_ERROR', {
315038
315351
  metadata: { item, error: e, op },
315039
315352
  });
315040
315353
  throw e;
@@ -315085,7 +315398,7 @@ async function* doPlanPlainTextFormat(item, op, handler, action) {
315085
315398
  message: 'Planning process started',
315086
315399
  };
315087
315400
  const deps = await handler.resolveReferences();
315088
- src_logger.info('TFWORKSPACE_PROCESSOR_PLAN_ASSESS_DEPS', {
315401
+ operator_src_logger.info('TFWORKSPACE_PROCESSOR_PLAN_ASSESS_DEPS', {
315089
315402
  metadata: { item, deps },
315090
315403
  });
315091
315404
  const context = processOperationPlan_buildProvisionerContext(item, deps);
@@ -315121,7 +315434,7 @@ async function* doPlanPlainTextFormat(item, op, handler, action) {
315121
315434
  }
315122
315435
  catch (e) {
315123
315436
  await processOperationPlan_publishPlan(item, JSON.stringify(e));
315124
- src_logger.error('TFWORKSPACE_PROCESSOR_PLAN_OBSERVING_ERROR', {
315437
+ operator_src_logger.error('TFWORKSPACE_PROCESSOR_PLAN_OBSERVING_ERROR', {
315125
315438
  metadata: { item, error: e },
315126
315439
  });
315127
315440
  yield {
@@ -315179,7 +315492,7 @@ async function* processOperationPlan_doPlanJSONFormat(item, op, handler, action)
315179
315492
  message: 'Planning process started',
315180
315493
  };
315181
315494
  const deps = await handler.resolveReferences();
315182
- src_logger.info('TFWORKSPACE_PROCESSOR_PLAN_DO_PLAN_ASSESS_DEPS', {
315495
+ operator_src_logger.info('TFWORKSPACE_PROCESSOR_PLAN_DO_PLAN_ASSESS_DEPS', {
315183
315496
  metadata: { item, deps },
315184
315497
  });
315185
315498
  const context = processOperationPlan_buildProvisionerContext(item, deps);
@@ -315239,7 +315552,7 @@ async function* processOperationPlan_doPlanJSONFormat(item, op, handler, action)
315239
315552
  }
315240
315553
  catch (e) {
315241
315554
  console.error(e);
315242
- src_logger.error('TFWORKSPACE_PROCESSOR_PLAN_DO_PLAN_ERROR', {
315555
+ operator_src_logger.error('TFWORKSPACE_PROCESSOR_PLAN_DO_PLAN_ERROR', {
315243
315556
  metadata: { item, error: e },
315244
315557
  });
315245
315558
  yield {
@@ -315606,42 +315919,30 @@ async function ctx_buildContext(claim, namespace, command) {
315606
315919
  let cr = null;
315607
315920
  let deps = null;
315608
315921
  compute['resolveDeps'] = async () => {
315609
- src_logger.debug('TFWORKSPACE_RESOLVE_DEPS_FOR_CLAIM', {
315610
- metadata: { name: claim.name },
315611
- });
315922
+ operator_src_logger.debug(`The Terraform workspace is resolving dependencies for the claim '${claim.name}'.`);
315612
315923
  // First, we bring the previous CR, if any, to get the tfStateKey
315613
- src_logger.debug('TFWORKSPACE_RESOLVE_GET_PREVIOUS_CR', {
315614
- metadata: { name: claim.name },
315615
- });
315924
+ operator_src_logger.debug(`The Terraform workspace is resolving and getting the previous custom resource for claim '${claim.name}'.`);
315616
315925
  previousCR = await getCRfromClaimRef(claim.kind, claim.name, namespace);
315617
315926
  let tfStateKey = null;
315618
315927
  if (previousCR) {
315619
- src_logger.debug('TFWORKSPACE_RESOLVE_PREVIOUS_CR_FOUND', {
315620
- metadata: { name: claim.name },
315621
- });
315928
+ operator_src_logger.debug(`The Terraform workspace found a previous custom resource for claim '${claim.name}'.`);
315622
315929
  tfStateKey = previousCR.spec.firestartr.tfStateKey;
315623
315930
  }
315624
315931
  else
315625
- src_logger.debug('TFWORKSPACE_RESOLVE_PREVIOUS_CR_NOT_FOUND', {
315626
- metadata: { name: claim.name },
315627
- });
315932
+ operator_src_logger.debug(`The Terraform workspace did not find a previous custom resource for claim '${claim.name}'.`);
315628
315933
  // Then we render the claim passing a function to resolve the refs in the k8s API
315629
- src_logger.debug('TFWORKSPACE_RESOLVE_START_RENDERING', {
315630
- metadata: { name: claim.name },
315631
- });
315934
+ operator_src_logger.debug(`The Terraform workspace is starting the rendering process for claim '${claim.name}'.`);
315632
315935
  cr = await cdk8s_renderer.renderTfWorkspace(claim, tfStateKey, getTFWorkspaceRefs, namespace);
315633
315936
  cr['metadata']['namespace'] = namespace;
315634
- src_logger.debug('TFWORKSPACE_RESOLVE_CR_RENDERED', { metadata: { cr } });
315937
+ operator_src_logger.debug(`The Terraform workspace has finished rendering the custom resource '${cr.kind}/${cr.metadata.name}' in namespace '${cr.metadata.namespace}'.`);
315635
315938
  // Finally, we resolve the deps in the rendered CR
315636
315939
  deps = await resolve(cr, getItemByItemPath, getSecret, namespace);
315637
- src_logger.debug('TFWORKSPACE_RESOLVE_DEPS_RESOLVED', {
315638
- metadata: { name: claim.name },
315639
- });
315940
+ operator_src_logger.debug(`The Terraform workspace has finished resolving all dependencies for claim '${claim.name}'.`);
315640
315941
  };
315641
315942
  compute['dryRunExec'] = async () => {
315642
315943
  // We assume that if there is no previous CR, we are creating a new one
315643
315944
  // This will be preceeded by the resolveDeps function
315644
- src_logger.debug('TFWORKSPACE_DRY_RUN_VALIDATING_CR', { metadata: { cr } });
315945
+ operator_src_logger.debug(`The Terraform workspace is dry-running the validation for custom resource '${cr.kind}/${cr.metadata.name}' in namespace '${cr.metadata.namespace}'.`);
315645
315946
  if (!previousCR) {
315646
315947
  await createDryRun(cr, namespace);
315647
315948
  }
@@ -315649,17 +315950,15 @@ async function ctx_buildContext(claim, namespace, command) {
315649
315950
  cr.metadata.resourceVersion = previousCR.metadata.resourceVersion;
315650
315951
  await updateDryRun(cr, namespace);
315651
315952
  }
315652
- src_logger.debug('TFWORKSPACE_DRY_RUN_VALIDATED_CR', { metadata: { cr } });
315953
+ operator_src_logger.debug(`The Terraform workspace has finished validating the custom resource '${cr.kind}/${cr.metadata.name}' in namespace '${cr.metadata.namespace}'.`);
315653
315954
  };
315654
315955
  compute['runProvision'] = async () => {
315655
- src_logger.debug('TFWORKSPACE_RUN_PROVISION_INIT_TERRAFORM', {
315956
+ operator_src_logger.debug('TFWORKSPACE_RUN_PROVISION_INIT_TERRAFORM', {
315656
315957
  metadata: { cr, command },
315657
315958
  });
315658
315959
  const data = await buildProvisionerContext(cr, deps);
315659
315960
  const result = await runTerraformProvisioner(data, command);
315660
- src_logger.debug('TFWORKSPACE_RUN_PROVISION_FINISHED_TERRAFORM', {
315661
- metadata: { cr, command },
315662
- });
315961
+ operator_src_logger.debug(`The Terraform workspace has finished the '${command}' command for provisioning custom resource '${cr.kind}/${cr.metadata.name}' in namespace '${cr.metadata.namespace}'.`);
315663
315962
  return result;
315664
315963
  };
315665
315964
  return new Ctx({}, compute);
@@ -315916,7 +316215,7 @@ class CRStateMetrics {
315916
316215
  catch (err) {
315917
316216
  console.log(`CRStateMetrics: update ${err}`);
315918
316217
  this.onUpdate = false;
315919
- src_logger.error('CR_METRICS_UPDATE', { error: err });
316218
+ operator_src_logger.error('CR_METRICS_UPDATE', { error: err });
315920
316219
  }
315921
316220
  this.onUpdate = false;
315922
316221
  }
@@ -316010,7 +316309,7 @@ async function startCRStates(meter, kindList, namespace) {
316010
316309
 
316011
316310
  const deploymentName = catalog_common.environment.getFromEnvironment(catalog_common.types.envVars.operatorDeploymentName) || 'firestartr-firestartr-controller';
316012
316311
  const DEFAULT_OPERATOR_DEPLOY = (/* unused pure expression or super */ null && (deploymentName));
316013
- async function tfPlanner(claimFilePath, claim, namespace, debug, jobTtl, cmd = 'plan') {
316312
+ async function tfPlanner(claimFilePath, claim, namespace, debug, jobTtl = 300, cmd = 'plan') {
316014
316313
  const { kc } = await getConnection();
316015
316314
  const k8sApi = kc.makeApiClient(client.AppsV1Api);
316016
316315
  const batchV1Api = kc.makeApiClient(client.BatchV1Api);
@@ -316030,10 +316329,12 @@ async function tfPlanner(claimFilePath, claim, namespace, debug, jobTtl, cmd = '
316030
316329
  ? '/library/scripts/run.sh'
316031
316330
  : '/library/run.sh';
316032
316331
  job.spec = new client.V1JobSpec();
316033
- if (jobTtl)
316034
- job.spec.ttlSecondsAfterFinished = jobTtl;
316332
+ job.spec.ttlSecondsAfterFinished = jobTtl;
316035
316333
  job.spec.template = controllerDeploy.body.spec
316036
316334
  .template;
316335
+ // set activeDeadlineSeconds to force terminate jobs that exceed this time
316336
+ // see https://kubernetes.io/docs/concepts/workloads/controllers/job/#job-termination-and-cleanup
316337
+ job.spec.activeDeadlineSeconds = 3600;
316037
316338
  job.spec.template.spec.containers[0].command = [
316038
316339
  'sh',
316039
316340
  '-c',
@@ -316045,9 +316346,12 @@ async function tfPlanner(claimFilePath, claim, namespace, debug, jobTtl, cmd = '
316045
316346
  }
316046
316347
  job.spec.template.spec.restartPolicy = 'Never';
316047
316348
  job.metadata = metadata;
316349
+ // we exclude logs to be sent to datadog
316350
+ job.spec.template.metadata.annotations = {
316351
+ 'ad.datadoghq.com/logs_exclude': 'true',
316352
+ };
316048
316353
  await batchV1Api.createNamespacedJob(namespace, job);
316049
316354
  await copyClaimAndGetLogs(namespace, job.metadata.name, claimFilePath);
316050
- await batchV1Api.deleteNamespacedJob(job.metadata.name, namespace);
316051
316355
  }
316052
316356
  async function copyClaimAndGetLogs(namespace, jobName, sourcePath) {
316053
316357
  const { kc } = await getConnection();
@@ -316201,7 +316505,7 @@ function runOperator(opts) {
316201
316505
  importModeActive = importMode;
316202
316506
  if (importModeSkipPlan)
316203
316507
  importModeSkipPlanActive = importModeSkipPlan;
316204
- src_logger.info('START_OPERATOR', { ...opts });
316508
+ operator_src_logger.info(`started the operator with options ${JSON.stringify(opts)}`);
316205
316509
  const run = ignoreLease
316206
316510
  ? (_namespace, cb) => cb()
316207
316511
  : acquireLease;
@@ -316221,7 +316525,7 @@ function runOperator(opts) {
316221
316525
  .catch((e) => {
316222
316526
  console.log('exit catch kind', kind);
316223
316527
  console.error(e);
316224
- src_logger.error('CRASHED', { kind, error: e });
316528
+ operator_src_logger.error('CRASHED', { kind, error: e });
316225
316529
  })
316226
316530
  .finally(() => {
316227
316531
  console.log('kind', kind);
@@ -316253,7 +316557,7 @@ function getProvisionImplementation(plural) {
316253
316557
  }
316254
316558
  if (!implementation)
316255
316559
  throw new Error(`No implementation found for ${plural}`);
316256
- src_logger.info('GOT_PROVISION_IMPL', { kind: plural });
316560
+ operator_src_logger.info(`Retrieved the provision implementation for the kind '${plural}'`);
316257
316561
  return implementation;
316258
316562
  }
316259
316563