@firestartr/cli 1.48.0-snapshot-0 → 1.48.1-snapshot-0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. package/build/index.js +1075 -756
  2. package/build/packages/catalog_common/index.d.ts +8 -0
  3. package/build/packages/catalog_common/src/io/write.d.ts +2 -2
  4. package/build/packages/catalog_common/src/logger/index.d.ts +2 -0
  5. package/build/packages/catalog_common/src/logger/logger.d.ts +9 -0
  6. package/build/packages/catalog_common/src/logger/utils.d.ts +1 -0
  7. package/build/packages/cdk8s_renderer/src/validations/crSize.d.ts +1 -0
  8. package/build/packages/features_preparer/src/logger.d.ts +9 -0
  9. package/build/packages/features_renderer/index.d.ts +4 -2
  10. package/build/packages/features_renderer/src/auxiliar.d.ts +1 -2
  11. package/build/packages/features_renderer/src/render.d.ts +2 -0
  12. package/build/packages/github/index.d.ts +5 -0
  13. package/build/packages/github/src/check_run.d.ts +83 -0
  14. package/build/packages/github/src/logger.d.ts +9 -0
  15. package/build/packages/operator/src/logger.d.ts +2 -2
  16. package/build/packages/operator/src/user-feedback-ops/gh-checkrun.d.ts +5 -0
  17. package/build/packages/operator/src/user-feedback-ops/tf-checkrun.d.ts +5 -0
  18. package/build/packages/provisioner/src/cdktf.d.ts +3 -1
  19. package/build/packages/provisioner/src/logger.d.ts +9 -0
  20. package/build/packages/provisioner/src/resources/resource.d.ts +10 -0
  21. package/build/packages/provisioner/src/terraform.d.ts +7 -5
  22. package/build/packages/terraform_provisioner/index.d.ts +1 -1
  23. package/build/packages/terraform_provisioner/src/logger.d.ts +9 -0
  24. package/build/packages/terraform_provisioner/src/project_tf.d.ts +4 -0
  25. package/build/packages/terraform_provisioner/src/project_tf_remote.d.ts +4 -0
  26. package/build/packages/terraform_provisioner/src/utils.d.ts +8 -6
  27. package/package.json +1 -1
package/build/index.js CHANGED
@@ -288941,14 +288941,131 @@ var external_path_ = __nccwpck_require__(71017);
288941
288941
  var external_path_default = /*#__PURE__*/__nccwpck_require__.n(external_path_);
288942
288942
  // EXTERNAL MODULE: ../../node_modules/yaml/dist/index.js
288943
288943
  var yaml_dist = __nccwpck_require__(8447);
288944
- // EXTERNAL MODULE: ../../node_modules/debug/src/index.js
288945
- var src = __nccwpck_require__(67984);
288946
- var src_default = /*#__PURE__*/__nccwpck_require__.n(src);
288944
+ // EXTERNAL MODULE: ../../node_modules/winston/lib/winston.js
288945
+ var winston = __nccwpck_require__(66752);
288946
+ var winston_default = /*#__PURE__*/__nccwpck_require__.n(winston);
288947
+ ;// CONCATENATED MODULE: ../catalog_common/src/logger/utils.ts
288948
+ // https://siderite.dev/blog/jsonstringify-with-circular-references.html/#at2011170946
288949
+ function fixCircularReferences(o) {
288950
+ const weirdTypes = [
288951
+ Int8Array,
288952
+ Uint8Array,
288953
+ Uint8ClampedArray,
288954
+ Int16Array,
288955
+ Uint16Array,
288956
+ Int32Array,
288957
+ Uint32Array,
288958
+ BigInt64Array,
288959
+ BigUint64Array,
288960
+ Float32Array,
288961
+ Float64Array,
288962
+ ArrayBuffer,
288963
+ SharedArrayBuffer,
288964
+ DataView,
288965
+ ];
288966
+ const defs = new Map();
288967
+ return (k, v) => {
288968
+ if (k && v === o) {
288969
+ return `[${String(k)} is the same as original object]`;
288970
+ }
288971
+ if (v === undefined || v === null) {
288972
+ return v;
288973
+ }
288974
+ // Check for the Timeout constructor. This will also catch TimersList indirectly
288975
+ // since TimersList is part of the circular structure *of* a Timeout object.
288976
+ if (v && v.constructor && v.constructor.name === 'Timeout') {
288977
+ return '[Node.js internal timer object]';
288978
+ }
288979
+ // An alternative check could be `v instanceof Timeout` but the constructor name
288980
+ // check is more reliable for these internal types.
288981
+ const weirdType = weirdTypes.find((t) => v instanceof t);
288982
+ if (weirdType) {
288983
+ return weirdType.toString();
288984
+ }
288985
+ if (typeof v === 'function') {
288986
+ return v.toString();
288987
+ }
288988
+ if (v && typeof v === 'object') {
288989
+ const def = defs.get(v);
288990
+ if (def) {
288991
+ return `[${String(k)} is the same as ${def}]`;
288992
+ }
288993
+ defs.set(v, String(k));
288994
+ }
288995
+ return v;
288996
+ };
288997
+ }
288998
+
288999
+ ;// CONCATENATED MODULE: ../catalog_common/src/logger/logger.ts
289000
+
289001
+
289002
+ const validLogLevels = [
289003
+ 'error',
289004
+ 'warn',
289005
+ 'info',
289006
+ 'debug',
289007
+ 'verbose',
289008
+ 'silly',
289009
+ ];
289010
+ let initiated = false;
289011
+ let logger = null;
289012
+ // Type guard to check if a value is a valid LogLevel
289013
+ function isValidLogLevel(level) {
289014
+ return (typeof level === 'string' && validLogLevels.includes(level));
289015
+ }
289016
+ function initLogger() {
289017
+ if (initiated)
289018
+ return;
289019
+ const logLevel = process.env.LOG_LEVEL && isValidLogLevel(process.env.LOG_LEVEL)
289020
+ ? process.env.LOG_LEVEL
289021
+ : 'info';
289022
+ logger = winston_default().createLogger({
289023
+ level: logLevel,
289024
+ exitOnError: false,
289025
+ format: winston.format.combine(winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }), winston.format.json()),
289026
+ transports: [
289027
+ new winston.transports.Console({
289028
+ level: logLevel,
289029
+ }),
289030
+ ],
289031
+ });
289032
+ initiated = true;
289033
+ }
289034
+ function doLog(level, args) {
289035
+ initLogger();
289036
+ const [message, data] = args;
289037
+ let finalMessage = message;
289038
+ if (data) {
289039
+ const fx = fixCircularReferences(data.metadata);
289040
+ try {
289041
+ finalMessage =
289042
+ finalMessage + ' | ' + JSON.stringify(data?.metadata, fx, 2);
289043
+ }
289044
+ catch (err) {
289045
+ console.error(`Serializing ${message}: ${err}`);
289046
+ return;
289047
+ }
289048
+ }
289049
+ logger[level].apply(logger, [finalMessage]);
289050
+ }
289051
+ const logger_log = {
289052
+ error: (...args) => doLog('error', args),
289053
+ warn: (...args) => doLog('warn', args),
289054
+ info: (...args) => doLog('info', args),
289055
+ debug: (...args) => doLog('debug', args),
289056
+ verbose: (...args) => doLog('verbose', args),
289057
+ silly: (...args) => doLog('silly', args),
289058
+ };
289059
+ /* harmony default export */ const logger_logger = (logger_log);
289060
+
289061
+ ;// CONCATENATED MODULE: ../catalog_common/src/logger/index.ts
289062
+
289063
+ /* harmony default export */ const src_logger = (logger_logger);
289064
+
288947
289065
  ;// CONCATENATED MODULE: ../catalog_common/src/io/common.ts
288948
289066
 
288949
289067
 
288950
289068
 
288951
- const messageLog = src_default()('firestartr:catalog_common:io:common');
288952
289069
  const ComponentPaths = (/* unused pure expression or super */ null && ([
288953
289070
  'apiVersion',
288954
289071
  'kind',
@@ -289033,25 +289150,25 @@ function transformKind(kind) {
289033
289150
  }
289034
289151
  }
289035
289152
  function getPath(kind, name, catalogPath) {
289036
- messageLog(`Getting path for kind ${kind} and name ${name} in catalog path ${catalogPath}`);
289153
+ src_logger.debug(`Getting path for kind ${kind} and name ${name} in catalog path ${catalogPath}`);
289037
289154
  return external_path_.join(catalogPath, transformKind(kind), name + '.yaml');
289038
289155
  }
289039
289156
  function getKindPath(kind, catalogPath) {
289040
- messageLog(`Getting path for kind ${kind} in catalog path ${catalogPath}`);
289157
+ src_logger.debug(`Getting path for kind ${kind} in catalog path ${catalogPath}`);
289041
289158
  return external_path_.join(catalogPath, transformKind(kind));
289042
289159
  }
289043
289160
  function fromYaml(data) {
289044
289161
  const result = yaml_dist.parse(data);
289045
- messageLog('Loading YAML data: %O', result);
289162
+ src_logger.debug('Loading YAML data: %O', result);
289046
289163
  return result;
289047
289164
  }
289048
289165
  function toYaml(data, opts = {}) {
289049
- messageLog('opts', opts);
289166
+ src_logger.debug('opts', opts);
289050
289167
  const result = yaml_dist.stringify(data);
289051
289168
  return result;
289052
289169
  }
289053
289170
  function dumpYaml(data) {
289054
- messageLog('Dumping object data to YAML %O', data);
289171
+ src_logger.debug('Dumping object data to YAML %O', data);
289055
289172
  return yaml_dist.stringify(data);
289056
289173
  }
289057
289174
 
@@ -289059,7 +289176,6 @@ function dumpYaml(data) {
289059
289176
  var external_child_process_ = __nccwpck_require__(32081);
289060
289177
  ;// CONCATENATED MODULE: ../catalog_common/src/generic/random.ts
289061
289178
 
289062
- const random_messageLog = src_default()('firestartr:catalog_common:generic:random');
289063
289179
  function randomString(length = 10) {
289064
289180
  let result = '';
289065
289181
  const characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
@@ -289069,7 +289185,7 @@ function randomString(length = 10) {
289069
289185
  result += characters.charAt(Math.floor(Math.random() * charactersLength));
289070
289186
  counter += 1;
289071
289187
  }
289072
- random_messageLog('Generated random string %s', result);
289188
+ src_logger.debug(`Generated random string ${result}`);
289073
289189
  return result;
289074
289190
  }
289075
289191
  function shuffleArray(array) {
@@ -289104,17 +289220,16 @@ function shuffleObject(obj, shuffleArrays = false) {
289104
289220
 
289105
289221
 
289106
289222
 
289107
- const clone_catalog_messageLog = src_default()('firestartr:catalog_common:io:clone_catalog');
289108
289223
  function cloneCatalog(catalogPath, dest = _calculateRandomDestination()) {
289109
- clone_catalog_messageLog(`Cloning catalog from ${catalogPath} to ${dest}`);
289224
+ src_logger.info(`Cloning catalog from ${catalogPath} to ${dest}`);
289110
289225
  return new Promise((ok, ko) => {
289111
289226
  (0,external_child_process_.exec)(`cp -a ${catalogPath} ${dest}`, (error, _stdout, _stderr) => {
289112
289227
  if (error) {
289113
- clone_catalog_messageLog(`Error cloning catalog: ${error.message}`);
289228
+ src_logger.error(`Error cloning catalog: ${error.message}`);
289114
289229
  return ko(error.message);
289115
289230
  }
289116
289231
  else {
289117
- clone_catalog_messageLog(`Catalog cloned to successfully to ${dest}`);
289232
+ src_logger.info(`Catalog cloned to successfully to ${dest}`);
289118
289233
  return ok(dest);
289119
289234
  }
289120
289235
  });
@@ -289133,29 +289248,28 @@ var external_fs_default = /*#__PURE__*/__nccwpck_require__.n(external_fs_);
289133
289248
 
289134
289249
 
289135
289250
 
289136
- const write_messageLog = src_default()('firestartr:catalog_common:io:write');
289137
289251
  function writeEntity(entity, path) {
289138
289252
  try {
289139
289253
  entity['metadata']['annotations']['fire-starter.dev/timestamp'] =
289140
289254
  Math.floor(Date.now() / 1000).toString();
289141
289255
  //If we have an status, we remove it
289142
- write_messageLog(`Writing to catalog ${path} entity %O`, entity);
289256
+ src_logger.debug(`Writing to catalog ${path} entity ${entity}`);
289143
289257
  external_fs_.writeFileSync(getPath(entity['kind'], entity['metadata']['name'], path), dumpYaml(entity));
289144
289258
  }
289145
289259
  catch (err) {
289146
- write_messageLog('Error writing entity, error %O', err);
289260
+ src_logger.error(`Error writing entity '${entity.kind}', error ${err}`);
289147
289261
  throw `writeEntity: ${entity.kind} ${err}`;
289148
289262
  }
289149
289263
  }
289150
289264
  function writeClaim(claim, claimsPath) {
289151
289265
  try {
289152
289266
  const kindFolder = `${claim['kind']}s`.toLowerCase().replace('claim', '');
289153
- write_messageLog(`Writing to gitops ${claimsPath}/${kindFolder} claim %O`, claim);
289267
+ src_logger.debug(`Writing to gitops ${claimsPath}/${kindFolder} claim ${claim}`);
289154
289268
  external_fs_.mkdirSync(external_path_.join(claimsPath, kindFolder), { recursive: true });
289155
289269
  external_fs_.writeFileSync(getPathClaim(claim['kind'], claim['name'], claimsPath), dumpYaml(claim));
289156
289270
  }
289157
289271
  catch (err) {
289158
- write_messageLog('Error writing claim, error %O', err);
289272
+ src_logger.error(`Error writing claim, error ${err}`);
289159
289273
  throw `writeClaim: ${claim.kind} ${err}`;
289160
289274
  }
289161
289275
  }
@@ -289172,7 +289286,7 @@ function writeYamlFile(fileName, data, pathFile = '/tmp') {
289172
289286
  external_fs_.writeFileSync(external_path_.join(pathFile, fileName), dumpYaml(data));
289173
289287
  }
289174
289288
  catch (err) {
289175
- write_messageLog('Error writing yaml file, error %O', err);
289289
+ src_logger.error(`Error writing yaml file, error ${err}`);
289176
289290
  throw `writeYamlFile: ${fileName} ${err}`;
289177
289291
  }
289178
289292
  }
@@ -289181,23 +289295,23 @@ function getPathClaim(kind, name, claimsPath) {
289181
289295
  }
289182
289296
  function renameEntity(entity, catalogPath, oldname) {
289183
289297
  try {
289184
- write_messageLog('Renaming oldname %s in %O', oldname, entity);
289298
+ src_logger.debug(`Renaming oldname ${oldname} in ${entity}`);
289185
289299
  const oldPath = getPath(entity.kind, oldname, catalogPath);
289186
289300
  const newPath = getPath(entity.kind, entity.metadata.name, catalogPath);
289187
289301
  external_fs_.renameSync(oldPath, newPath);
289188
289302
  }
289189
289303
  catch (err) {
289190
- write_messageLog('Error writing entity, error %O', err);
289304
+ src_logger.error(`Error writing entity, error ${err}`);
289191
289305
  throw `renameEntity: ${entity.kind} ${err}`;
289192
289306
  }
289193
289307
  }
289194
289308
  function removeEntity(entity, catalogPath) {
289195
289309
  try {
289196
- write_messageLog(`Removing entity ${entity.kind}/${entity.metadata.name} in catalog ${catalogPath}`);
289310
+ src_logger.debug(`Removing entity ${entity.kind}/${entity.metadata.name} in catalog ${catalogPath}`);
289197
289311
  external_fs_.rmSync(getPath(entity.kind, entity.metadata.name, catalogPath));
289198
289312
  }
289199
289313
  catch (err) {
289200
- write_messageLog(`Error removing entity ${entity.kind}/${entity.metadata.name} in catalog ${catalogPath}: ${err}`);
289314
+ src_logger.error(`Error removing entity ${entity.kind}/${entity.metadata.name} in catalog ${catalogPath}: ${err}`);
289201
289315
  throw `removeEntity: ${entity.kind} ${err}`;
289202
289316
  }
289203
289317
  }
@@ -289213,21 +289327,21 @@ function moveFile(oldPath, newPath) {
289213
289327
  external_fs_.cpSync(oldPath, newPath);
289214
289328
  external_fs_.rmSync(oldPath);
289215
289329
  }
289216
- function writeFunctionLog(functionName, log) {
289330
+ function writeFunctionLog(functionName, logStream) {
289217
289331
  try {
289218
- external_fs_.writeFileSync(external_path_.join('/tmp', `${functionName}.${randomString(5)}.log`), log + '\n');
289332
+ external_fs_.writeFileSync(external_path_.join('/tmp', `${functionName}.${randomString(5)}.log`), logStream + '\n');
289219
289333
  }
289220
289334
  catch (err) {
289221
- write_messageLog('Error writing log, error %O', err);
289335
+ src_logger.error(`Error writing log, error ${err}`);
289222
289336
  throw `writeLog: ${functionName} ${err}`;
289223
289337
  }
289224
289338
  }
289225
- function writeLogFile(fileName, log) {
289339
+ function writeLogFile(fileName, logStream) {
289226
289340
  try {
289227
- external_fs_.appendFileSync(external_path_.join('/tmp', fileName + '.log'), log + '\n');
289341
+ external_fs_.appendFileSync(external_path_.join('/tmp', fileName + '.log'), logStream + '\n');
289228
289342
  }
289229
289343
  catch (err) {
289230
- write_messageLog('Error writing log, error %O', err);
289344
+ src_logger.error(`Error writing log, error ${err}`);
289231
289345
  throw `writeLog: ${fileName} ${err}`;
289232
289346
  }
289233
289347
  }
@@ -289237,7 +289351,6 @@ function writeLogFile(fileName, log) {
289237
289351
 
289238
289352
 
289239
289353
 
289240
- const read_messageLog = src_default()('firestartr:catalog_common:io:read');
289241
289354
  function readEntity(kind, name, catalogPaths) {
289242
289355
  try {
289243
289356
  if (typeof catalogPaths === 'string') {
@@ -289246,7 +289359,7 @@ function readEntity(kind, name, catalogPaths) {
289246
289359
  let data = false;
289247
289360
  for (const catalogPath of catalogPaths) {
289248
289361
  try {
289249
- read_messageLog(`Reading entity ${kind}/${name} from catalog ${catalogPath}`);
289362
+ src_logger.debug(`Reading entity ${kind}/${name} from catalog ${catalogPath}`);
289250
289363
  const entityPath = getPath(kind, name, catalogPath);
289251
289364
  if (external_fs_.existsSync(entityPath)) {
289252
289365
  if (data) {
@@ -289256,7 +289369,7 @@ function readEntity(kind, name, catalogPaths) {
289256
289369
  }
289257
289370
  }
289258
289371
  catch (err) {
289259
- read_messageLog('readEntity: cached error %s', err);
289372
+ src_logger.debug('readEntity: cached error %s', err);
289260
289373
  if (err === 'DUPLICATED') {
289261
289374
  throw `Error reading entity: Duplicated ${kind}/${name} in ${catalogPaths.join(', ')}`;
289262
289375
  }
@@ -289268,7 +289381,7 @@ function readEntity(kind, name, catalogPaths) {
289268
289381
  return fromYaml(data);
289269
289382
  }
289270
289383
  catch (err) {
289271
- read_messageLog(err);
289384
+ src_logger.error(err);
289272
289385
  throw `readEntity->: ${kind}/${name}: ${err}`;
289273
289386
  }
289274
289387
  }
@@ -289276,13 +289389,13 @@ function listByKind(kind, catalogPaths, callback, exclude = []) {
289276
289389
  if (typeof catalogPaths === 'string') {
289277
289390
  catalogPaths = [catalogPaths];
289278
289391
  }
289279
- read_messageLog('CATALOGS_PATHS_ %O', catalogPaths);
289392
+ src_logger.debug(`CATALOGS_PATHS_ ${catalogPaths}`);
289280
289393
  const list = [];
289281
289394
  catalogPaths.forEach((catalogPath) => {
289282
289395
  list.push(...external_fs_.readdirSync(getKindPath(kind, catalogPath)));
289283
289396
  });
289284
- read_messageLog('LIST_ %O', list);
289285
- read_messageLog(`Listing entities of kind ${kind} from catalogs`);
289397
+ src_logger.debug(`LIST_ ${list}`);
289398
+ src_logger.debug(`Listing entities of kind ${kind} from catalogs`);
289286
289399
  return list
289287
289400
  .filter((file) => file.match(/\.yaml$/))
289288
289401
  .filter((file) => exclude.indexOf(file.replace(/\.yaml/, '')) === -1)
@@ -290213,6 +290326,9 @@ class CsvWriter {
290213
290326
  }
290214
290327
  /* harmony default export */ const csv_generator = (CsvWriter);
290215
290328
 
290329
+ // EXTERNAL MODULE: ../../node_modules/debug/src/index.js
290330
+ var src = __nccwpck_require__(67984);
290331
+ var src_default = /*#__PURE__*/__nccwpck_require__.n(src);
290216
290332
  ;// CONCATENATED MODULE: ../catalog_common/src/generic/logger.ts
290217
290333
 
290218
290334
 
@@ -290275,9 +290391,8 @@ var lodash_default = /*#__PURE__*/__nccwpck_require__.n(lodash);
290275
290391
 
290276
290392
 
290277
290393
  const { camelCase } = (lodash_default());
290278
- const name_log = src_default()('firestartr:catalog_common:generic:name');
290279
290394
  function normalizeName(name) {
290280
- name_log('Normalizing name %s', name);
290395
+ src_logger.debug(`Normalizing name ${name}`);
290281
290396
  return name.replace(/[^a-z0-9]/gi, '-').toLowerCase();
290282
290397
  }
290283
290398
  function transformKeysToCamelCase(obj) {
@@ -290618,7 +290733,6 @@ const ExternalSecretsApiGroup = 'external-secrets.io';
290618
290733
 
290619
290734
  ;// CONCATENATED MODULE: ../catalog_common/src/environment/index.ts
290620
290735
 
290621
- const environment_messageLog = src_default()('firestartr:catalog_common:environment');
290622
290736
  function getFromEnvironment(envVar) {
290623
290737
  return process.env[envVar];
290624
290738
  }
@@ -290636,7 +290750,7 @@ function getFromEnvironmentAsBoolean(envVar) {
290636
290750
  }
290637
290751
  function checkExistOnEnvironment(envVar) {
290638
290752
  const environmentValue = getFromEnvironment(envVar);
290639
- environment_messageLog(`Checking if environment variable ${envVar} exists: ${environmentValue}`);
290753
+ src_logger.debug(`Checking if environment variable ${envVar} exists: ${environmentValue}`);
290640
290754
  if (!environmentValue || environmentValue === '') {
290641
290755
  return false;
290642
290756
  }
@@ -290682,30 +290796,29 @@ const fullMembersTeam = getFromEnvironmentWithDefault(envVars.fullOrgGroup, `${o
290682
290796
  ;// CONCATENATED MODULE: ../catalog_common/src/features/tarballs.ts
290683
290797
 
290684
290798
 
290685
- const tarballs_messageLog = src_default()('firestartr:catalog_common:features:tarballs');
290686
290799
  function getFeatureZipDownloadPath(featureName, version, owner, repo) {
290687
290800
  const featureDownloadPath = `/tmp/${basicFeaturePath(featureName, version, owner, repo)}-zipball.zip`;
290688
- tarballs_messageLog('Feature tarball download path %s', featureDownloadPath);
290801
+ src_logger.debug(`Feature tarball download path ${featureDownloadPath}`);
290689
290802
  return featureDownloadPath;
290690
290803
  }
290691
290804
  function removeFeatureTarball(featureName, version, owner, repo) {
290692
290805
  const featurePath = getFeatureZipDownloadPath(featureName, version, owner, repo);
290693
- tarballs_messageLog('Removing feature tarball %s', featurePath);
290806
+ src_logger.debug(`Removing feature tarball ${featurePath}`);
290694
290807
  external_fs_.unlinkSync(featurePath);
290695
- tarballs_messageLog(`Removed tarball for feature ${featureName} and version ${version}: ${featurePath}`);
290808
+ src_logger.debug(`Removed tarball for feature ${featureName} and version ${version}: ${featurePath}`);
290696
290809
  }
290697
290810
  function featureTarballExists(featureName, version, owner, repo) {
290698
290811
  const featurePath = getFeatureZipDownloadPath(featureName, version, owner, repo);
290699
290812
  const exists = external_fs_.existsSync(featurePath);
290700
- tarballs_messageLog(`Tarball ${featurePath} exists? ${exists}`);
290813
+ src_logger.debug(`Tarball ${featurePath} exists? ${exists}`);
290701
290814
  return exists;
290702
290815
  }
290703
290816
  function getFeaturesExtractPath(featureName, version, owner, repo, options = {}) {
290704
290817
  const { createIfNotExists } = options;
290705
290818
  const extractPath = `/tmp/${basicFeaturePath(featureName, version, owner, repo)}-extract`;
290706
- tarballs_messageLog('Extract path %s', extractPath);
290819
+ src_logger.debug(`Extract path ${extractPath}`);
290707
290820
  if (createIfNotExists && !external_fs_.existsSync(extractPath)) {
290708
- tarballs_messageLog('Extract path %s does not exist, creating', extractPath);
290821
+ src_logger.debug(`Extract path ${extractPath} does not exist, creating`);
290709
290822
  external_fs_.mkdirSync(extractPath, { recursive: true });
290710
290823
  }
290711
290824
  return extractPath;
@@ -290722,17 +290835,16 @@ function trasformLeg(leg) {
290722
290835
 
290723
290836
 
290724
290837
 
290725
- const features_io_messageLog = src_default()('firestartr:catalog_common:features:features_io');
290726
290838
  function getFeatureRenderedPathForEntity(entity, featureName, basePath = '/tmp') {
290727
290839
  const entityFolderName = `${entity.metadata.name}`.toLowerCase();
290728
290840
  return external_path_default().join(basePath, entityFolderName, featureName);
290729
290841
  }
290730
290842
  function getFeatureRenderedConfigForComponent(entity, featureName, basePath = '/tmp/features') {
290731
- features_io_messageLog('Getting rendered config for component %s and feature %s', entity.name, featureName);
290843
+ src_logger.info(`Getting rendered config for component ${entity.name}and feature ${featureName}`);
290732
290844
  const workdir = getFeatureRenderedPathForEntity(entity, featureName, basePath);
290733
290845
  const config = JSON.parse(external_fs_.readFileSync(`${workdir}/output.json`, { encoding: 'utf8' }));
290734
- features_io_messageLog('Feature output: %O', config);
290735
- features_io_messageLog(`Rendered feature ${featureName} for component ${entity.name}. Result: ${(JSON.stringify, config)}`);
290846
+ src_logger.debug(`Feature output: ${config}`);
290847
+ src_logger.debug(`Rendered feature ${featureName} for component ${entity.name}. Result: ${(JSON.stringify, config)}`);
290736
290848
  return config;
290737
290849
  }
290738
290850
 
@@ -290746,7 +290858,6 @@ function getFeatureRenderedConfigForComponent(entity, featureName, basePath = '/
290746
290858
 
290747
290859
  ;// CONCATENATED MODULE: ../catalog_common/src/policies/policies.ts
290748
290860
 
290749
- const policies_log = src_default()('firestartr:catalog_common:policies');
290750
290861
  const FIRESTARTR_POLICIES = [
290751
290862
  {
290752
290863
  name: 'full-control',
@@ -290785,17 +290896,17 @@ function getPolicyByName(policyName) {
290785
290896
  return FIRESTARTR_POLICIES.find((p) => p.name === policyName || p.aliases.includes(policyName));
290786
290897
  }
290787
290898
  function policiesAreCompatible(syncPolicy, generalPolicy) {
290788
- policies_log('Validating policy compatibility: %s %s', syncPolicy, generalPolicy);
290899
+ src_logger.debug('Validating policy compatibility: %s %s', syncPolicy, generalPolicy);
290789
290900
  const syncPolicyWeight = getPolicyByName(syncPolicy)?.weight;
290790
290901
  const generalPolicyWeight = getPolicyByName(generalPolicy)?.weight;
290791
290902
  if (!syncPolicyWeight || !generalPolicyWeight) {
290792
290903
  throw new Error(`Policy ${syncPolicy} or ${generalPolicy} not found`);
290793
290904
  }
290794
290905
  if (generalPolicyWeight >= syncPolicyWeight) {
290795
- policies_log('Policies %s %s are compatible', syncPolicy, generalPolicy);
290906
+ src_logger.debug('Policies %s %s are compatible', syncPolicy, generalPolicy);
290796
290907
  return true;
290797
290908
  }
290798
- policies_log('Policies %s %s are not compatible', syncPolicy, generalPolicy);
290909
+ src_logger.debug('Policies %s %s are not compatible', syncPolicy, generalPolicy);
290799
290910
  return false;
290800
290911
  }
290801
290912
 
@@ -290815,6 +290926,7 @@ function policiesAreCompatible(syncPolicy, generalPolicy) {
290815
290926
 
290816
290927
 
290817
290928
 
290929
+
290818
290930
  /* harmony default export */ const catalog_common = ({
290819
290931
  io: io,
290820
290932
  generic: generic,
@@ -290823,6 +290935,7 @@ function policiesAreCompatible(syncPolicy, generalPolicy) {
290823
290935
  defaults: defaults,
290824
290936
  features: features,
290825
290937
  policies: policies,
290938
+ logger: logger_logger,
290826
290939
  });
290827
290940
 
290828
290941
  ;// CONCATENATED MODULE: ../../node_modules/universal-user-agent/index.js
@@ -297369,13 +297482,16 @@ async function getOctokitFromPat(envVar) {
297369
297482
  }
297370
297483
  /* harmony default export */ const src_auth = ({ getOctokitForOrg });
297371
297484
 
297485
+ ;// CONCATENATED MODULE: ../github/src/logger.ts
297486
+
297487
+ /* harmony default export */ const github_src_logger = (catalog_common.logger);
297488
+
297372
297489
  ;// CONCATENATED MODULE: ../github/src/organization.ts
297373
297490
 
297374
297491
 
297375
- const organization_messageLog = src_default()('firestartr:github:organization');
297376
297492
  const defaultPerPage = 100;
297377
297493
  async function getRepositoryList(org, perPageEntries = defaultPerPage) {
297378
- organization_messageLog(`Getting repository list for ${org} with ${perPageEntries} entries per page`);
297494
+ github_src_logger.info(`Getting repository list for ${org} with ${perPageEntries} entries per page`);
297379
297495
  const octokit = await getOctokitForOrg(org);
297380
297496
  const options = octokit.repos.listForOrg.endpoint.merge({
297381
297497
  org: org,
@@ -297385,7 +297501,7 @@ async function getRepositoryList(org, perPageEntries = defaultPerPage) {
297385
297501
  return await doPaginatedRequest(options);
297386
297502
  }
297387
297503
  async function getTeamList(org, perPageEntries = defaultPerPage) {
297388
- organization_messageLog(`Getting team list for ${org} with ${perPageEntries} entries per page`);
297504
+ github_src_logger.info(`Getting team list for ${org} with ${perPageEntries} entries per page`);
297389
297505
  const octokit = await getOctokitForOrg(org);
297390
297506
  const options = octokit.rest.teams.list.endpoint.merge({
297391
297507
  org: org,
@@ -297394,7 +297510,7 @@ async function getTeamList(org, perPageEntries = defaultPerPage) {
297394
297510
  return await doPaginatedRequest(options);
297395
297511
  }
297396
297512
  async function getUserList(org, perPageEntries = defaultPerPage) {
297397
- organization_messageLog(`Getting user list for ${org} with ${perPageEntries} entries per page`);
297513
+ github_src_logger.info(`Getting user list for ${org} with ${perPageEntries} entries per page`);
297398
297514
  const octokit = await getOctokitForOrg(org);
297399
297515
  const options = await octokit.rest.orgs.listMembers.endpoint.merge({
297400
297516
  org: org,
@@ -297403,7 +297519,7 @@ async function getUserList(org, perPageEntries = defaultPerPage) {
297403
297519
  return await doPaginatedRequest(options);
297404
297520
  }
297405
297521
  async function validateMember(username, org) {
297406
- organization_messageLog(`Validating ${username} is a member of ${org}`);
297522
+ github_src_logger.debug(`Validating ${username} is a member of ${org}`);
297407
297523
  const octokit = await getOctokitForOrg(org);
297408
297524
  const result = await octokit.orgs.checkMembershipForUser({
297409
297525
  org: org,
@@ -297412,7 +297528,7 @@ async function validateMember(username, org) {
297412
297528
  return result;
297413
297529
  }
297414
297530
  async function getUserRoleInOrg(username, org) {
297415
- organization_messageLog(`Getting user ${username} role in ${org}`);
297531
+ github_src_logger.info(`Getting user ${username} role in ${org}`);
297416
297532
  const octokit = await getOctokitForOrg(org);
297417
297533
  const membership = await octokit.orgs.getMembershipForUser({
297418
297534
  org: org,
@@ -297421,13 +297537,13 @@ async function getUserRoleInOrg(username, org) {
297421
297537
  return membership.data.role;
297422
297538
  }
297423
297539
  async function getOrgInfo(org) {
297424
- organization_messageLog(`Getting info for org ${org}`);
297540
+ github_src_logger.info(`Getting info for org ${org}`);
297425
297541
  const octokit = await getOctokitForOrg(org);
297426
297542
  const orgInfo = await octokit.orgs.get({ org });
297427
297543
  return orgInfo.data;
297428
297544
  }
297429
297545
  async function getOrgPlanName(org) {
297430
- organization_messageLog(`Getting plan for org ${org}`);
297546
+ github_src_logger.info(`Getting plan for org ${org}`);
297431
297547
  const orgInfo = await getOrgInfo(org);
297432
297548
  return orgInfo.plan.name;
297433
297549
  }
@@ -297451,9 +297567,8 @@ async function doPaginatedRequest(options) {
297451
297567
 
297452
297568
 
297453
297569
 
297454
- const repository_messageLog = src_default()('firestartr:github:repository');
297455
297570
  async function listReleases(repo, owner = 'prefapp') {
297456
- repository_messageLog(`Getting releases for ${owner}/${repo}`);
297571
+ github_src_logger.info(`Getting releases for ${owner}/${repo}`);
297457
297572
  const octokit = await getOctokitForOrg(owner);
297458
297573
  const response = await octokit.rest.repos.listReleases({
297459
297574
  owner,
@@ -297464,7 +297579,7 @@ async function listReleases(repo, owner = 'prefapp') {
297464
297579
  return response.data;
297465
297580
  }
297466
297581
  async function getReleaseByTag(releaseTag, repo, owner = 'prefapp') {
297467
- repository_messageLog(`Getting release ${releaseTag} for ${owner}/${repo}`);
297582
+ github_src_logger.info(`Getting release ${releaseTag} for ${owner}/${repo}`);
297468
297583
  const octokit = await getOctokitForOrg(owner);
297469
297584
  const response = await octokit.rest.repos.getReleaseByTag({
297470
297585
  owner,
@@ -297479,7 +297594,7 @@ async function getFileFromGithub(path, repo, owner = 'prefapp') {
297479
297594
  return await octokit.rest.repos.getContent({ owner, repo, path });
297480
297595
  }
297481
297596
  async function getContent(path, repo, owner = 'prefapp', ref = '') {
297482
- repository_messageLog(`Getting content for ${owner}/${repo}/${path}`);
297597
+ github_src_logger.info(`Getting content for ${owner}/${repo}/${path}`);
297483
297598
  const octokit = await getOctokitForOrg(owner);
297484
297599
  const opts = {
297485
297600
  owner,
@@ -297493,19 +297608,19 @@ async function getContent(path, repo, owner = 'prefapp', ref = '') {
297493
297608
  return Buffer.from(content.data.content, 'base64').toString('utf8');
297494
297609
  }
297495
297610
  async function getRepoInfo(owner, name) {
297496
- repository_messageLog(`Getting repo info for ${owner}/${name}`);
297611
+ github_src_logger.info(`Getting repo info for ${owner}/${name}`);
297497
297612
  const octokit = await getOctokitForOrg(owner);
297498
297613
  const res = await octokit.repos.get({ owner: owner, repo: name });
297499
297614
  return res['data'];
297500
297615
  }
297501
297616
  async function getPages(owner, name) {
297502
- repository_messageLog(`Getting pages for ${owner}/${name}`);
297617
+ github_src_logger.info(`Getting pages for ${owner}/${name}`);
297503
297618
  const octokit = await getOctokitForOrg(owner);
297504
297619
  const res = await octokit.repos.getPages({ owner: owner, repo: name });
297505
297620
  return res['data'];
297506
297621
  }
297507
297622
  async function getOIDCRepo(owner, name) {
297508
- repository_messageLog(`Getting repo info for ${owner}/${name}`);
297623
+ github_src_logger.info(`Getting repo info for ${owner}/${name}`);
297509
297624
  const octokit = await getOctokitForOrg(owner);
297510
297625
  return await octokit.request(`GET /repos/${owner}/${name}/actions/oidc/customization/sub`, {
297511
297626
  owner: owner,
@@ -297516,7 +297631,7 @@ async function getOIDCRepo(owner, name) {
297516
297631
  });
297517
297632
  }
297518
297633
  async function getBranchProtection(owner, repo, branch = 'main') {
297519
- repository_messageLog(`Getting branch protection for ${owner}/${repo}/${branch}`);
297634
+ github_src_logger.info(`Getting branch protection for ${owner}/${repo}/${branch}`);
297520
297635
  const octokit = await getOctokitForOrg(owner);
297521
297636
  const res = await octokit.repos.getBranchProtection({
297522
297637
  owner: owner,
@@ -297526,13 +297641,13 @@ async function getBranchProtection(owner, repo, branch = 'main') {
297526
297641
  return res['data'];
297527
297642
  }
297528
297643
  async function getTeams(owner, repo) {
297529
- repository_messageLog(`Getting teams for ${owner}/${repo}`);
297644
+ github_src_logger.info(`Getting teams for ${owner}/${repo}`);
297530
297645
  const octokit = await getOctokitForOrg(owner);
297531
297646
  const res = await octokit.repos.listTeams({ owner: owner, repo: repo });
297532
297647
  return res['data'];
297533
297648
  }
297534
297649
  async function getCollaborators(owner, repo, affiliation = 'direct') {
297535
- repository_messageLog(`Getting collaborators for ${owner}/${repo}`);
297650
+ github_src_logger.info(`Getting collaborators for ${owner}/${repo}`);
297536
297651
  const octokit = await getOctokitForOrg(owner);
297537
297652
  const res = await octokit.repos.listCollaborators({
297538
297653
  owner: owner,
@@ -297543,7 +297658,7 @@ async function getCollaborators(owner, repo, affiliation = 'direct') {
297543
297658
  }
297544
297659
  async function setContent(path, fileContent, repo, owner = 'prefapp', branch = 'main', message = '') {
297545
297660
  const base64Content = Buffer.from(fileContent, 'utf8').toString('base64');
297546
- repository_messageLog(`Setting content for ${owner}/${repo}/${path}`);
297661
+ github_src_logger.info(`Setting content for ${owner}/${repo}/${path}`);
297547
297662
  if (message === '') {
297548
297663
  message = `Update ${path}`;
297549
297664
  }
@@ -297551,10 +297666,10 @@ async function setContent(path, fileContent, repo, owner = 'prefapp', branch = '
297551
297666
  try {
297552
297667
  const currentContent = await getFileFromGithub(path, repo, owner);
297553
297668
  sha = currentContent.data.sha;
297554
- repository_messageLog('File already exists, updating it');
297669
+ github_src_logger.debug('File already exists, updating it');
297555
297670
  }
297556
297671
  catch {
297557
- repository_messageLog('File does not exists, creating it');
297672
+ github_src_logger.debug('File does not exist, creating it');
297558
297673
  }
297559
297674
  const octokit = await getOctokitForOrg(owner);
297560
297675
  await octokit.rest.repos.createOrUpdateFileContents({
@@ -297569,7 +297684,7 @@ async function setContent(path, fileContent, repo, owner = 'prefapp', branch = '
297569
297684
  }
297570
297685
  async function uploadFile(destinationPath, filePath, repo, owner = 'prefapp', branch = 'main', message = '') {
297571
297686
  if (!external_fs_.existsSync(filePath)) {
297572
- repository_messageLog(`File ${filePath} does not exists or is not readable`);
297687
+ github_src_logger.error(`File ${filePath} does not exists or is not readable`);
297573
297688
  throw `${filePath} does not exists or is not readable`;
297574
297689
  }
297575
297690
  // Read file contents and call setContent
@@ -297578,16 +297693,16 @@ async function uploadFile(destinationPath, filePath, repo, owner = 'prefapp', br
297578
297693
  }
297579
297694
  async function deleteFile(path, repo, owner = 'prefapp', branch = 'main', message = '') {
297580
297695
  let sha = undefined;
297581
- repository_messageLog(`Deleting file ${owner}/${repo}/${path}`);
297696
+ github_src_logger.info(`Deleting file ${owner}/${repo}/${path}`);
297582
297697
  try {
297583
297698
  const currentContent = await getFileFromGithub(path, repo, owner);
297584
297699
  sha = currentContent.data.sha;
297585
297700
  }
297586
297701
  catch {
297587
- repository_messageLog(`File ${path} does not exist in ${repo}`);
297702
+ github_src_logger.error(`File ${path} does not exist in ${repo}`);
297588
297703
  }
297589
297704
  if (!sha) {
297590
- repository_messageLog(`File ${path} does not exist in ${repo}`);
297705
+ github_src_logger.error(`File ${path} does not exist in ${repo}`);
297591
297706
  throw `File ${path} does not exist in ${repo}`;
297592
297707
  }
297593
297708
  if (message === '') {
@@ -297604,7 +297719,7 @@ async function deleteFile(path, repo, owner = 'prefapp', branch = 'main', messag
297604
297719
  });
297605
297720
  }
297606
297721
  async function addStatusCheck(output, is_failure, head_sha, name, status, repo, owner = 'prefapp') {
297607
- repository_messageLog(`Adding status checks to commit ${head_sha} in ${owner}/${repo}`);
297722
+ github_src_logger.info(`Adding status checks to commit ${head_sha} in ${owner}/${repo}`);
297608
297723
  const octokit = await getOctokitForOrg(owner);
297609
297724
  const payload = { output, head_sha, name, owner, repo, status };
297610
297725
  if (status === 'completed') {
@@ -297613,7 +297728,7 @@ async function addStatusCheck(output, is_failure, head_sha, name, status, repo,
297613
297728
  await octokit.rest.checks.create(payload);
297614
297729
  }
297615
297730
  async function addCommitStatus(state, sha, repo, owner = 'prefapp', target_url = '', description = '', context = '') {
297616
- repository_messageLog(`Adding commit status with state ${state} to SHA ${sha} in ${owner}/${repo}`);
297731
+ github_src_logger.info(`Adding commit status with state ${state} to SHA ${sha} in ${owner}/${repo}`);
297617
297732
  const octokit = await getOctokitForOrg(owner);
297618
297733
  await octokit.rest.repos.createCommitStatus({
297619
297734
  owner,
@@ -297645,9 +297760,8 @@ async function addCommitStatus(state, sha, repo, owner = 'prefapp', target_url =
297645
297760
  ;// CONCATENATED MODULE: ../github/src/team.ts
297646
297761
 
297647
297762
 
297648
- const team_messageLog = src_default()('firestartr:github:team');
297649
297763
  async function getTeamMembers(team, org) {
297650
- team_messageLog(`Getting members for ${org}/${team}`);
297764
+ github_src_logger.info(`Getting members for ${org}/${team}`);
297651
297765
  const octokit = await getOctokitForOrg(org);
297652
297766
  const res = await octokit.rest.teams.listMembersInOrg({
297653
297767
  org: org,
@@ -297656,13 +297770,13 @@ async function getTeamMembers(team, org) {
297656
297770
  return res['data'];
297657
297771
  }
297658
297772
  async function getTeamInfo(team, org) {
297659
- team_messageLog(`Getting info for ${org}/${team}`);
297773
+ github_src_logger.info(`Getting info for ${org}/${team}`);
297660
297774
  const octokit = await getOctokitForOrg(org);
297661
297775
  const res = await octokit.rest.teams.getByName({ org: org, team_slug: team });
297662
297776
  return res['data'];
297663
297777
  }
297664
297778
  async function getTeamRoleUser(org, team, username) {
297665
- team_messageLog(`Getting role for ${username} in ${org}/${team}`);
297779
+ github_src_logger.info(`Getting role for ${username} in ${org}/${team}`);
297666
297780
  const octokit = await getOctokitForOrg(org);
297667
297781
  const res = await octokit.rest.teams.getMembershipForUserInOrg({
297668
297782
  org: org,
@@ -297672,7 +297786,7 @@ async function getTeamRoleUser(org, team, username) {
297672
297786
  return res['data'];
297673
297787
  }
297674
297788
  async function create(org, team, privacy = 'closed') {
297675
- team_messageLog(`Creating team ${org}/${team}`);
297789
+ github_src_logger.info(`Creating team ${org}/${team}`);
297676
297790
  const octokit = await getOctokitForOrg(org);
297677
297791
  return await octokit.rest.teams.create({
297678
297792
  org: org,
@@ -297681,7 +297795,7 @@ async function create(org, team, privacy = 'closed') {
297681
297795
  });
297682
297796
  }
297683
297797
  async function addOrUpdateMember(org, team, username, role = 'member') {
297684
- team_messageLog(`Adding or updating ${username} in ${org}/${team} with role ${role}`);
297798
+ github_src_logger.info(`Adding or updating ${username} in ${org}/${team} with role ${role}`);
297685
297799
  const octokit = await getOctokitForOrg(org);
297686
297800
  return await octokit.rest.teams.addOrUpdateMembershipForUserInOrg({
297687
297801
  org: org,
@@ -297691,7 +297805,7 @@ async function addOrUpdateMember(org, team, username, role = 'member') {
297691
297805
  });
297692
297806
  }
297693
297807
  async function removeMember(org, team, username) {
297694
- team_messageLog(`Removing ${username} from ${org}/${team}`);
297808
+ github_src_logger.info(`Removing ${username} from ${org}/${team}`);
297695
297809
  const octokit = await getOctokitForOrg(org);
297696
297810
  return await octokit.teams.removeMembershipForUserInOrg({
297697
297811
  org: org,
@@ -297711,9 +297825,8 @@ async function removeMember(org, team, username) {
297711
297825
  ;// CONCATENATED MODULE: ../github/src/user.ts
297712
297826
 
297713
297827
 
297714
- const user_messageLog = src_default()('firestartr:github:user');
297715
297828
  async function getUserInfo(name) {
297716
- user_messageLog(`Getting user ${name} info`);
297829
+ github_src_logger.info(`Getting user ${name} info`);
297717
297830
  const octokit = await getOctokitForOrg(name);
297718
297831
  return await octokit.users.getByUsername({ username: name });
297719
297832
  }
@@ -297724,11 +297837,10 @@ async function getUserInfo(name) {
297724
297837
  ;// CONCATENATED MODULE: ../github/src/pull_request.ts
297725
297838
 
297726
297839
 
297727
- const pull_request_messageLog = src_default()('firestartr:github:pull_request');
297728
297840
  const commentMaxSize = 65535;
297729
297841
  async function commentInPR(comment, pr_number, repo, owner = 'prefapp') {
297730
297842
  try {
297731
- pull_request_messageLog(`Commenting ${comment} in PR ${pr_number} of ${owner}/${repo}`);
297843
+ github_src_logger.info(`Commenting ${comment} in PR ${pr_number} of ${owner}/${repo}`);
297732
297844
  const octokit = await getOctokitForOrg(owner);
297733
297845
  await octokit.rest.issues.createComment({
297734
297846
  owner,
@@ -297747,12 +297859,12 @@ async function getPrData(pull_number, repo, owner) {
297747
297859
  return await octokit.rest.pulls.get({ owner, repo, pull_number });
297748
297860
  }
297749
297861
  async function getPrLastCommitSHA(pull_number, repo, owner = 'prefapp') {
297750
- pull_request_messageLog(`Getting last commit SHA for PR ${pull_number} of ${owner}/${repo}`);
297862
+ github_src_logger.info(`Getting last commit SHA for PR ${pull_number} of ${owner}/${repo}`);
297751
297863
  const prData = await getPrData(pull_number, repo, owner);
297752
297864
  return prData.data.head.sha;
297753
297865
  }
297754
297866
  async function getPrMergeCommitSHA(pull_number, repo, owner = 'prefapp') {
297755
- pull_request_messageLog(`Getting merge commit SHA for PR ${pull_number} of ${owner}/${repo}`);
297867
+ github_src_logger.info(`Getting merge commit SHA for PR ${pull_number} of ${owner}/${repo}`);
297756
297868
  const prData = await getPrData(pull_number, repo, owner);
297757
297869
  if (prData.data.merge_commit_sha !== null) {
297758
297870
  return prData.data.merge_commit_sha;
@@ -297790,7 +297902,7 @@ function divideCommentIntoChunks(comment, sizeReduction = 0) {
297790
297902
  return result;
297791
297903
  }
297792
297904
  async function getPrFiles(pr_number, repo, owner = 'prefapp') {
297793
- pull_request_messageLog(`Getting PR details of PR ${pr_number} of ${owner}/${repo}`);
297905
+ github_src_logger.info(`Getting PR details of PR ${pr_number} of ${owner}/${repo}`);
297794
297906
  const octokit = await getOctokitForOrg(owner);
297795
297907
  return await octokit.rest.pulls.listFiles({
297796
297908
  owner,
@@ -297832,9 +297944,8 @@ async function filterPrBy(filter, opts) {
297832
297944
  ;// CONCATENATED MODULE: ../github/src/issues.ts
297833
297945
 
297834
297946
 
297835
- const issues_log = src_default()('firestartr:github:issues');
297836
297947
  async function issues_create(owner, repo, title, body, labels = []) {
297837
- issues_log(`Creating issue in ${owner}/${repo}`);
297948
+ github_src_logger.info(`Creating issue in ${owner}/${repo}`);
297838
297949
  const octokit = await getOctokitForOrg(owner);
297839
297950
  return await octokit.rest.issues.create({
297840
297951
  owner,
@@ -297845,7 +297956,7 @@ async function issues_create(owner, repo, title, body, labels = []) {
297845
297956
  });
297846
297957
  }
297847
297958
  async function update(owner, repo, issue_number, title, body, labels = []) {
297848
- issues_log(`Updating issue ${issue_number} in ${owner}/${repo}`);
297959
+ github_src_logger.info(`Updating issue ${issue_number} in ${owner}/${repo}`);
297849
297960
  const octokit = await getOctokitForOrg(owner);
297850
297961
  return await octokit.rest.issues.update({
297851
297962
  owner,
@@ -297857,7 +297968,7 @@ async function update(owner, repo, issue_number, title, body, labels = []) {
297857
297968
  });
297858
297969
  }
297859
297970
  async function filterBy(owner, repo, title, labels, state = 'open', creator = undefined, assignee = undefined) {
297860
- issues_log(`Filtering issues by title in ${owner}/${repo}`);
297971
+ github_src_logger.info(`Filtering issues by title in ${owner}/${repo}`);
297861
297972
  const octokit = await getOctokitForOrg(owner);
297862
297973
  const resp = await octokit.rest.issues.listForRepo({
297863
297974
  owner,
@@ -297872,7 +297983,7 @@ async function filterBy(owner, repo, title, labels, state = 'open', creator = un
297872
297983
  return resp.data.filter((issue) => issue.title.includes(title));
297873
297984
  }
297874
297985
  async function upsertByTitle(owner, repo, title, body, labels = []) {
297875
- issues_log(`Upserting issue by title in ${owner}/${repo}`);
297986
+ github_src_logger.info(`Upserting issue by title in ${owner}/${repo}`);
297876
297987
  const foundIssues = await filterBy(owner, repo, title, labels.join(','));
297877
297988
  if (foundIssues.length > 0) {
297878
297989
  return update(owner, repo, foundIssues[0].number, title, body, labels);
@@ -297882,7 +297993,7 @@ async function upsertByTitle(owner, repo, title, body, labels = []) {
297882
297993
  }
297883
297994
  }
297884
297995
  async function issues_close(owner, repo, issue_number) {
297885
- issues_log(`Closing issue ${issue_number} in ${owner}/${repo}`);
297996
+ github_src_logger.info(`Closing issue ${issue_number} in ${owner}/${repo}`);
297886
297997
  const octokit = await getOctokitForOrg(owner);
297887
297998
  return await octokit.rest.issues.update({
297888
297999
  owner,
@@ -297902,10 +298013,9 @@ async function issues_close(owner, repo, issue_number) {
297902
298013
  ;// CONCATENATED MODULE: ../github/src/branches.ts
297903
298014
 
297904
298015
 
297905
- const branches_messageLog = src_default()('firestartr:github:branches');
297906
298016
  const SHA1_EMPTY_TREE = '4b825dc642cb6eb9a060e54bf8d69288fbee4904';
297907
298017
  async function listBranches(repo, owner = 'prefapp') {
297908
- branches_messageLog(`Getting branches for ${owner}/${repo}`);
298018
+ github_src_logger.info(`Getting branches for ${owner}/${repo}`);
297909
298019
  const octokit = await getOctokitForOrg(owner);
297910
298020
  const response = await octokit.rest.repos.listBranches({
297911
298021
  owner,
@@ -297916,7 +298026,7 @@ async function listBranches(repo, owner = 'prefapp') {
297916
298026
  return response.data;
297917
298027
  }
297918
298028
  async function getBranch(repo, branch, owner = 'prefapp') {
297919
- branches_messageLog(`Getting branch ${branch} for ${owner}/${repo}`);
298029
+ github_src_logger.info(`Getting branch ${branch} for ${owner}/${repo}`);
297920
298030
  const octokit = await getOctokitForOrg(owner);
297921
298031
  const response = await octokit.rest.repos.getBranch({
297922
298032
  owner,
@@ -297926,7 +298036,7 @@ async function getBranch(repo, branch, owner = 'prefapp') {
297926
298036
  return response.data;
297927
298037
  }
297928
298038
  async function createBranch(repo, branch, sha, owner = 'prefapp') {
297929
- branches_messageLog(`Creating branch ${branch} for ${owner}/${repo}`);
298039
+ github_src_logger.info(`Creating branch ${branch} for ${owner}/${repo}`);
297930
298040
  const octokit = await getOctokitForOrg(owner);
297931
298041
  const response = await octokit.rest.git.createRef({
297932
298042
  owner,
@@ -297937,7 +298047,7 @@ async function createBranch(repo, branch, sha, owner = 'prefapp') {
297937
298047
  return response.data;
297938
298048
  }
297939
298049
  async function createOrphanBranch(repo, branch, owner = 'prefapp') {
297940
- branches_messageLog(`Creating orphan branch ${branch} for ${owner}/${repo}`);
298050
+ github_src_logger.info(`Creating orphan branch ${branch} for ${owner}/${repo}`);
297941
298051
  const octokit = await getOctokitForOrg(owner);
297942
298052
  // Create a commit with an empty tree
297943
298053
  const { data: commit } = await octokit.request('POST /repos/{owner}/{repo}/git/commits', {
@@ -297963,6 +298073,268 @@ async function createOrphanBranch(repo, branch, owner = 'prefapp') {
297963
298073
  createOrphanBranch,
297964
298074
  });
297965
298075
 
298076
+ ;// CONCATENATED MODULE: ../github/src/check_run.ts
298077
+
298078
+
298079
+ const FLUSH_TIMEOUT = 4; // seconds
298080
+ const GITHUB_OUTPUT_TEXT_LIMIT = 65000; // ~65k hard limit for output.text
298081
+ /**
298082
+ * Streams text updates to a callback on a fixed cadence, with a size-triggered early flush.
298083
+ * Does NOT clear content on flush (so the consumer can send the full, current log each time).
298084
+ */
298085
+ class CheckRunBuffer {
298086
+ constructor(initial = '', onFlush, opts) {
298087
+ this.content = initial;
298088
+ this.updated = Boolean(initial);
298089
+ this.onFlush = onFlush;
298090
+ this.flushIntervalMs = (opts?.intervalSec ?? FLUSH_TIMEOUT) * 1000;
298091
+ this.timer = setInterval(() => {
298092
+ this.flush();
298093
+ }, this.flushIntervalMs);
298094
+ if (initial)
298095
+ this.flush();
298096
+ }
298097
+ stop() {
298098
+ if (this.timer !== null) {
298099
+ clearInterval(this.timer);
298100
+ this.timer = null;
298101
+ }
298102
+ }
298103
+ update(data = '') {
298104
+ if (!data)
298105
+ return;
298106
+ this.content += data;
298107
+ this.updated = true;
298108
+ }
298109
+ flush() {
298110
+ if (!this.updated)
298111
+ return;
298112
+ try {
298113
+ this.onFlush(this.content);
298114
+ }
298115
+ finally {
298116
+ this.updated = false;
298117
+ }
298118
+ }
298119
+ snapshot() {
298120
+ return this.content;
298121
+ }
298122
+ }
298123
+ class GithubCheckRun {
298124
+ constructor(octokit, params) {
298125
+ this.hasCommented = false;
298126
+ this.closing = false;
298127
+ this.closed = false;
298128
+ this.lastStatus = 'in_progress';
298129
+ this.detailsFormatter = (s) => s;
298130
+ this.octokit = octokit;
298131
+ this.owner = params.owner;
298132
+ this.repo = params.repo;
298133
+ this.headSHA = params.headSHA;
298134
+ this.name = params.name;
298135
+ this.detailsUrl = params.detailsUrl;
298136
+ this.title = params.title ?? params.name;
298137
+ if (params.summary)
298138
+ this._summaryOverride = params.summary;
298139
+ this.pullNumber = params.pullNumber;
298140
+ this.includeCheckRunComment = Boolean(params.includeCheckRunComment);
298141
+ this.checkRunComment = params.checkRunComment;
298142
+ this.buffer = new CheckRunBuffer('', (data) => this.__updateCheckRun(data).catch(() => { }), { intervalSec: FLUSH_TIMEOUT });
298143
+ }
298144
+ /**
298145
+ * Configure markdown formatting for the details (output.text).
298146
+ * Example: ch.mdOptionsDetails({ quotes: 'terraform' })
298147
+ * Result:
298148
+ * ```terraform
298149
+ * <log>
298150
+ * ```
298151
+ */
298152
+ mdOptionsDetails(opts) {
298153
+ const lang = (opts?.quotes ?? '').trim();
298154
+ if (!lang) {
298155
+ this.detailsFormatter = (s) => s;
298156
+ return;
298157
+ }
298158
+ const fenceOpen = '```' + lang + '\n';
298159
+ const fenceClose = '\n```';
298160
+ const overhead = fenceOpen.length + fenceClose.length;
298161
+ this.detailsFormatter = (body) => {
298162
+ const maxBody = Math.max(0, GITHUB_OUTPUT_TEXT_LIMIT - overhead);
298163
+ const safeBody = body.length > maxBody ? truncateRight(body, maxBody) : body;
298164
+ return fenceOpen + safeBody + fenceClose;
298165
+ };
298166
+ }
298167
+ set summary(data) {
298168
+ this._summaryOverride = data;
298169
+ // Push an immediate update if already created and not closed.
298170
+ if (!this.closed && this.checkRunId) {
298171
+ // do not mutate buffer flags; just send current snapshot using new summary
298172
+ this.__updateCheckRun(this.buffer.snapshot()).catch(() => { });
298173
+ }
298174
+ }
298175
+ get summary() {
298176
+ return this._summaryOverride;
298177
+ }
298178
+ /**
298179
+ * Append log text and optionally set status ('queued' | 'in_progress').
298180
+ */
298181
+ update(text, status) {
298182
+ if (this.closed)
298183
+ return;
298184
+ if (status)
298185
+ this.lastStatus = status;
298186
+ if (text)
298187
+ this.buffer.update(text);
298188
+ }
298189
+ /**
298190
+ * Finalize the check with a conclusion. Flushes buffered text, marks completed.
298191
+ */
298192
+ async close(finalText, ok) {
298193
+ if (this.closed || this.closing)
298194
+ return;
298195
+ this.closing = true;
298196
+ this.buffer.stop();
298197
+ const finalContent = this.buffer.snapshot() + (finalText || '');
298198
+ try {
298199
+ await this.__ensureCreated();
298200
+ const { text, summary } = this.buildOutputTextAndSummary(finalContent);
298201
+ await this.octokit.rest.checks.update({
298202
+ owner: this.owner,
298203
+ repo: this.repo,
298204
+ check_run_id: this.checkRunId,
298205
+ conclusion: ok ? 'success' : 'failure',
298206
+ completed_at: new Date().toISOString(),
298207
+ output: {
298208
+ title: this.title,
298209
+ summary,
298210
+ text,
298211
+ },
298212
+ });
298213
+ this.closed = true;
298214
+ }
298215
+ finally {
298216
+ this.closing = false;
298217
+ }
298218
+ }
298219
+ // -------------------- Internals --------------------
298220
+ async __ensureCreated() {
298221
+ if (this.checkRunId)
298222
+ return;
298223
+ const startedAt = new Date().toISOString();
298224
+ const res = await this.octokit.rest.checks.create({
298225
+ owner: this.owner,
298226
+ repo: this.repo,
298227
+ name: this.name,
298228
+ head_sha: this.headSHA,
298229
+ status: 'in_progress',
298230
+ started_at: startedAt,
298231
+ details_url: this.detailsUrl,
298232
+ output: {
298233
+ title: this.title,
298234
+ summary: this._summaryOverride ?? '',
298235
+ text: undefined,
298236
+ },
298237
+ });
298238
+ this.checkRunId = res.data.id;
298239
+ if (this.includeCheckRunComment &&
298240
+ this.pullNumber !== undefined &&
298241
+ !this.hasCommented) {
298242
+ const link = this.__buildCheckRunUrl();
298243
+ const formattedLink = `[here](${link})`;
298244
+ const base = this.checkRunComment ?? '';
298245
+ const body = base ? `${base}${formattedLink}` : formattedLink;
298246
+ await this.octokit.rest.issues.createComment({
298247
+ owner: this.owner,
298248
+ repo: this.repo,
298249
+ issue_number: this.pullNumber,
298250
+ body,
298251
+ });
298252
+ this.hasCommented = true;
298253
+ }
298254
+ }
298255
+ async __updateCheckRun(allContent) {
298256
+ if (this.closed || this.closing)
298257
+ return;
298258
+ await this.__ensureCreated();
298259
+ const { text, summary } = this.buildOutputTextAndSummary(allContent);
298260
+ await this.octokit.rest.checks.update({
298261
+ owner: this.owner,
298262
+ repo: this.repo,
298263
+ check_run_id: this.checkRunId,
298264
+ status: this.lastStatus,
298265
+ output: {
298266
+ title: this.title,
298267
+ summary,
298268
+ text,
298269
+ },
298270
+ });
298271
+ }
298272
+ __buildCheckRunUrl() {
298273
+ if (this.checkRunId) {
298274
+ return `https://github.com/${this.owner}/${this.repo}/runs/${this.checkRunId}?check_suite_focus=true`;
298275
+ }
298276
+ return `https://github.com/${this.owner}/${this.repo}/commit/${this.headSHA}/checks?check_suite_focus=true`;
298277
+ }
298278
+ buildOutputTextAndSummary(full) {
298279
+ if (!full) {
298280
+ return {
298281
+ text: undefined,
298282
+ summary: this._summaryOverride ?? '',
298283
+ };
298284
+ }
298285
+ let text = this.detailsFormatter(full);
298286
+ let truncated = false;
298287
+ if (text.length > GITHUB_OUTPUT_TEXT_LIMIT) {
298288
+ text = truncateRight(text, GITHUB_OUTPUT_TEXT_LIMIT);
298289
+ truncated = true;
298290
+ }
298291
+ else {
298292
+ truncated = text.length < full.length;
298293
+ }
298294
+ let summary = this._summaryOverride ?? '';
298295
+ if (this._summaryOverride && truncated) {
298296
+ summary = `${summary}\n\n... (log truncated to ~${GITHUB_OUTPUT_TEXT_LIMIT.toLocaleString()} chars)`;
298297
+ }
298298
+ return { text, summary };
298299
+ }
298300
+ }
298301
+ // -------------------- Helpers --------------------
298302
+ function truncateRight(s, max) {
298303
+ if (s.length <= max)
298304
+ return s;
298305
+ const HARD = Math.max(0, max - 3);
298306
+ return s.slice(0, HARD) + '...';
298307
+ }
298308
+ /**
298309
+ * Factory: build a GithubCheckRun using an installation token for the given org.
298310
+ */
298311
+ async function createCheckRunForOrg(org, owner, repo, name, opts) {
298312
+ const octokit = await getOctokitForOrg(org);
298313
+ let headSHA = opts?.headSHA;
298314
+ if (!headSHA && typeof opts?.pullNumber === 'number') {
298315
+ headSHA = await getPrMergeCommitSHA(opts.pullNumber, repo, owner);
298316
+ }
298317
+ if (!headSHA) {
298318
+ throw new Error('createCheckRunForOrg: either opts.headSHA or opts.pullNumber must be provided');
298319
+ }
298320
+ return new GithubCheckRun(octokit, {
298321
+ owner,
298322
+ repo,
298323
+ headSHA,
298324
+ name,
298325
+ detailsUrl: opts?.detailsUrl,
298326
+ title: opts?.title,
298327
+ summary: opts?.summary,
298328
+ pullNumber: opts?.pullNumber,
298329
+ includeCheckRunComment: Boolean(opts?.includeCheckRunComment),
298330
+ checkRunComment: opts?.checkRunComment,
298331
+ });
298332
+ }
298333
+ async function createCheckRun(owner, repo, name, opts) {
298334
+ return createCheckRunForOrg(owner, owner, repo, name, opts);
298335
+ }
298336
+ const CheckRun = GithubCheckRun;
298337
+
297966
298338
  ;// CONCATENATED MODULE: ../github/index.ts
297967
298339
 
297968
298340
 
@@ -297973,6 +298345,7 @@ async function createOrphanBranch(repo, branch, owner = 'prefapp') {
297973
298345
 
297974
298346
 
297975
298347
 
298348
+
297976
298349
  /* harmony default export */ const github_0 = ({
297977
298350
  org: organization,
297978
298351
  repo: repository,
@@ -297986,6 +298359,10 @@ async function createOrphanBranch(repo, branch, owner = 'prefapp') {
297986
298359
  pulls: pull_request,
297987
298360
  issues: issues,
297988
298361
  branches: branches,
298362
+ feedback: {
298363
+ createCheckRun: createCheckRun,
298364
+ CheckRun: CheckRun,
298365
+ },
297989
298366
  });
297990
298367
 
297991
298368
  ;// CONCATENATED MODULE: ../cdk8s_renderer/src/patches/base.ts
@@ -301506,14 +301883,14 @@ async function loadSchema(schemaURI) {
301506
301883
 
301507
301884
 
301508
301885
  const ajv = new (_2020_default())({ useDefaults: true });
301509
- let initiated = false;
301886
+ let validation_initiated = false;
301510
301887
  const validations = {};
301511
301888
  function prepareValidation(schemaId) {
301512
- if (!initiated)
301889
+ if (!validation_initiated)
301513
301890
  ajv.addSchema(base_schemas.schemas);
301514
301891
  if (!validations[schemaId])
301515
301892
  validations[schemaId] = ajv.getSchema(schemaId);
301516
- initiated = true;
301893
+ validation_initiated = true;
301517
301894
  return validations[schemaId];
301518
301895
  }
301519
301896
  function validateClaim(data, schemaId = 'firestartr.dev://common/ClaimEnvelope') {
@@ -302087,6 +302464,19 @@ function isTerraformWorkspace(cr) {
302087
302464
  return cr.kind === 'FirestartrTerraformWorkspace';
302088
302465
  }
302089
302466
 
302467
+ ;// CONCATENATED MODULE: ../cdk8s_renderer/src/validations/crSize.ts
302468
+
302469
+ const K8S_OBJECT_SIZE_LIMIT = 1572864; // 1.5 MiB in bytes
302470
+ function validateCrSizes(crs) {
302471
+ for (const [key, cr] of Object.entries(crs)) {
302472
+ const serialized = catalog_common.io.toYaml(cr);
302473
+ const size = Buffer.byteLength(serialized, 'utf8');
302474
+ if (size > K8S_OBJECT_SIZE_LIMIT) {
302475
+ throw new Error(`CR "${key}" exceeds the Kubernetes object size limit by ${size - K8S_OBJECT_SIZE_LIMIT} bytes`);
302476
+ }
302477
+ }
302478
+ }
302479
+
302090
302480
  ;// CONCATENATED MODULE: ../cdk8s_renderer/src/refsSorter/refsSorter.ts
302091
302481
 
302092
302482
  /**
@@ -303147,7 +303537,7 @@ function addTraceabilityStamp(context, content) {
303147
303537
 
303148
303538
 
303149
303539
 
303150
- const render_messageLog = src_default()('firestartr:features_renderer');
303540
+ const messageLog = src_default()('firestartr:features_renderer');
303151
303541
  function render(featurePath, featureRenderPath, entity, firestartrConfig = {}, featureArgs = {}) {
303152
303542
  const configData = validate_validate(featurePath);
303153
303543
  const context = buildContext(entity, configData.args, firestartrConfig, featureArgs);
@@ -303158,7 +303548,7 @@ function render(featurePath, featureRenderPath, entity, firestartrConfig = {}, f
303158
303548
  // For now let's keep upgradeable flag for backward compatibility
303159
303549
  // by default it's false
303160
303550
  const userManaged = file.user_managed ?? file.upgradeable ?? false;
303161
- render_messageLog(`Rendering ${src} to ${dest}`);
303551
+ messageLog(`Rendering ${src} to ${dest}`);
303162
303552
  // render the content of the file
303163
303553
  const content = addTraceability(context, src, renderContent(external_fs_default().readFileSync(external_path_default().join(featurePath, 'templates', src)).toString(), context));
303164
303554
  const destFilePath = external_path_default().join(`${featureRenderPath}`, dest);
@@ -303321,7 +303711,7 @@ function loadYaml(file) {
303321
303711
  throw new Error(`Failed to parse YAML "${file}": ${msg}`);
303322
303712
  }
303323
303713
  }
303324
- function ensureSafeTmpName(name) {
303714
+ function ensureSafeTmpNames(name) {
303325
303715
  if (typeof name !== 'string' || !name.trim()) {
303326
303716
  throw new Error('Test "name" must be a non-empty string');
303327
303717
  }
@@ -303334,16 +303724,15 @@ function ensureSafeTmpName(name) {
303334
303724
  if (name.includes('..')) {
303335
303725
  throw new Error('Test "name" must not contain ".."');
303336
303726
  }
303337
- if (/[/\\]/.test(name)) {
303338
- throw new Error('Test "name" must not contain path separators (/ or \\)');
303339
- }
303340
303727
  if (!/^[A-Za-z0-9._-]+$/.test(name)) {
303341
303728
  throw new Error('Test "name" may only contain letters, numbers, ".", "_", or "-"');
303342
303729
  }
303343
303730
  }
303344
- async function mkNamedTmp(name) {
303345
- ensureSafeTmpName(name);
303346
- const dir = external_node_path_.join(external_node_os_namespaceObject.tmpdir(), name);
303731
+ async function mkNamedTmp(...names) {
303732
+ for (const name of names) {
303733
+ ensureSafeTmpNames(name);
303734
+ }
303735
+ const dir = external_node_path_.join(external_node_os_namespaceObject.tmpdir(), ...names);
303347
303736
  await promises_namespaceObject.rm(dir, { recursive: true, force: true });
303348
303737
  await promises_namespaceObject.mkdir(dir, { recursive: true });
303349
303738
  return dir;
@@ -303354,7 +303743,7 @@ async function mkTmp(prefix = 'feature-render-') {
303354
303743
  function buildExpectedOutput(config, renderDir) {
303355
303744
  const files = (config.files || []).map((f) => ({
303356
303745
  localPath: external_node_path_.join(renderDir, f.dest),
303357
- repoPath: f.src,
303746
+ repoPath: f.dest,
303358
303747
  userManaged: f.user_managed,
303359
303748
  }));
303360
303749
  return {
@@ -303432,18 +303821,23 @@ function updateFileContent(featureRenderPath, filePath, content) {
303432
303821
  render: render,
303433
303822
  updateFileContent: updateFileContent,
303434
303823
  auxiliar: auxiliar,
303824
+ buildContext: buildContext,
303825
+ renderContent: renderContent,
303435
303826
  });
303436
303827
 
303828
+ ;// CONCATENATED MODULE: ../features_preparer/src/logger.ts
303829
+
303830
+ /* harmony default export */ const features_preparer_src_logger = (catalog_common.logger);
303831
+
303437
303832
  ;// CONCATENATED MODULE: ../features_preparer/src/renderer.ts
303438
303833
 
303439
303834
 
303440
303835
 
303441
303836
 
303442
- const renderer_messageLog = src_default()('firestartr:features_preparer:renderer');
303443
303837
  function renderFeature(featureName, version, owner, repo, featureOwner, renderPath = '/tmp', featureArgs = {}) {
303444
303838
  const extractPath = external_path_default().join(catalog_common.features.tarballs.getFeaturesExtractPath(featureName, version, owner, repo), 'packages', featureName);
303445
303839
  const renderedPath = catalog_common.features.features.getFeatureRenderedPathForEntity(featureOwner, featureName, renderPath);
303446
- renderer_messageLog(`Rendering feature ${featureName} to ${renderedPath} with component ${JSON.stringify(featureOwner)}`);
303840
+ features_preparer_src_logger.info(`Rendering feature ${featureName} to ${renderedPath} with component ${JSON.stringify(featureOwner)}`);
303447
303841
  return features_renderer.render(extractPath, renderedPath, featureOwner, {}, featureArgs);
303448
303842
  }
303449
303843
 
@@ -303482,7 +303876,6 @@ async function downloadZipBall(url, filePath) {
303482
303876
 
303483
303877
 
303484
303878
 
303485
- const installer_log = src_default()('firestartr:features_preparer:installer');
303486
303879
  async function getFeatureConfigFromRef(featureName, featureRef, featureOwner, // -> cr
303487
303880
  featureArgs = {}, repo = 'features', owner = 'prefapp') {
303488
303881
  // reference is the featureRef directly
@@ -303513,12 +303906,12 @@ async function prepareFeature(featureName, version, repo = 'features', owner = '
303513
303906
  async function downloadFeatureZip(repo, featureName, reference, owner = 'prefapp') {
303514
303907
  try {
303515
303908
  const zipballExtractPath = catalog_common.features.tarballs.getFeaturesExtractPath(featureName, reference, owner, repo, { createIfNotExists: false });
303516
- console.log(`Zipball extract path: ${zipballExtractPath}`);
303909
+ features_preparer_src_logger.debug(`Zipball extract path: ${zipballExtractPath}`);
303517
303910
  if (external_fs_.existsSync(zipballExtractPath)) {
303518
- console.log(`Zipball extract path ${zipballExtractPath} already exists, reusing it.`);
303911
+ features_preparer_src_logger.debug(`Zipball extract path ${zipballExtractPath} already exists, reusing it.`);
303519
303912
  return zipballExtractPath;
303520
303913
  }
303521
- installer_log(`Feature ${[featureName, reference, owner, repo].join('-')} has not been downloaded yet, downloading`);
303914
+ features_preparer_src_logger.info(`Feature ${[featureName, reference, owner, repo].join('-')} has not been downloaded yet, downloading`);
303522
303915
  const octokit = await github_0.getOctokitForOrg(owner);
303523
303916
  const response = await octokit.request('GET /repos/{owner}/{repo}/zipball/{reference}', {
303524
303917
  request: {
@@ -303529,28 +303922,28 @@ async function downloadFeatureZip(repo, featureName, reference, owner = 'prefapp
303529
303922
  reference,
303530
303923
  });
303531
303924
  const randomZipTmpPath = `/tmp/${catalog_common.generic.randomString(20)}.zip`;
303532
- console.log(`Downloading feature ${featureName} version ${reference} to ${randomZipTmpPath}`);
303925
+ features_preparer_src_logger.info(`Downloading feature ${featureName} version ${reference} to ${randomZipTmpPath}`);
303533
303926
  if (external_fs_.existsSync(randomZipTmpPath)) {
303534
- console.log(`Temporary zip file ${randomZipTmpPath} already exists, removing it.`);
303927
+ features_preparer_src_logger.debug(`Temporary zip file ${randomZipTmpPath} already exists, removing it.`);
303535
303928
  external_fs_.unlinkSync(randomZipTmpPath);
303536
303929
  }
303537
303930
  const randomExtractPath = `/tmp/${catalog_common.generic.randomString(20)}`;
303538
- console.log(`Extracting feature ${featureName} version ${reference} to ${randomExtractPath}`);
303931
+ features_preparer_src_logger.debug(`Extracting feature ${featureName} version ${reference} to ${randomExtractPath}`);
303539
303932
  external_fs_.rmSync(randomExtractPath, { recursive: true, force: true });
303540
303933
  await downloadZipBall(response.url, randomZipTmpPath);
303541
303934
  const zip = new (adm_zip_default())(randomZipTmpPath);
303542
303935
  const mainEntry = zip.getEntries()[0].entryName;
303543
- console.log(`Main entry in zip: ${mainEntry}`);
303544
- console.log(`Extracting zip to ${randomExtractPath}`);
303936
+ features_preparer_src_logger.debug(`Main entry in zip: ${mainEntry}`);
303937
+ features_preparer_src_logger.debug(`Extracting zip to ${randomExtractPath}`);
303545
303938
  zip.extractAllTo(randomExtractPath, true);
303546
- console.log(`Renaming main entry ${mainEntry} to ${zipballExtractPath}`);
303939
+ features_preparer_src_logger.debug(`Renaming main entry ${mainEntry} to ${zipballExtractPath}`);
303547
303940
  external_fs_.renameSync(`${randomExtractPath}/${mainEntry}`, zipballExtractPath);
303548
- console.log(`Removing temporary zip file ${randomZipTmpPath}`);
303941
+ features_preparer_src_logger.debug(`Removing temporary zip file ${randomZipTmpPath}`);
303549
303942
  external_fs_.unlinkSync(randomZipTmpPath);
303550
303943
  return zipballExtractPath;
303551
303944
  }
303552
303945
  catch (error) {
303553
- console.error(error);
303946
+ features_preparer_src_logger.error(`Error on prepare feature with tag ${reference}: ${error}`);
303554
303947
  throw new Error(`Error for feature with tag ${reference}: ${error}. GitHub response: ${error}`);
303555
303948
  }
303556
303949
  }
@@ -307436,6 +307829,7 @@ async function renderClaim(catalogScope, firestartrScope, claim, patches, previo
307436
307829
 
307437
307830
 
307438
307831
 
307832
+
307439
307833
  /*
307440
307834
  * Function called when rendering but not importing
307441
307835
  *
@@ -307451,6 +307845,7 @@ async function renderer_render(catalogScope, firestartrScope, activateReferentia
307451
307845
  const data = await loadClaimsList(claimList);
307452
307846
  const result = await renderClaims(catalogScope, firestartrScope, data);
307453
307847
  validateTfStateKeyUniqueness(result);
307848
+ validateCrSizes(result);
307454
307849
  return result;
307455
307850
  }
307456
307851
 
@@ -309139,60 +309534,9 @@ const scaffoldSubcommand = {
309139
309534
 
309140
309535
  // EXTERNAL MODULE: ../../node_modules/@kubernetes/client-node/dist/index.js
309141
309536
  var client_node_dist = __nccwpck_require__(54851);
309142
- // EXTERNAL MODULE: ../../node_modules/winston/lib/winston.js
309143
- var winston = __nccwpck_require__(66752);
309144
- var winston_default = /*#__PURE__*/__nccwpck_require__.n(winston);
309145
309537
  ;// CONCATENATED MODULE: ../operator/src/logger.ts
309146
309538
 
309147
- const validLogLevels = [
309148
- 'error',
309149
- 'warn',
309150
- 'info',
309151
- 'debug',
309152
- 'verbose',
309153
- 'silly',
309154
- ];
309155
- let logger_initiated = false;
309156
- let logger = null;
309157
- // Type guard to check if a value is a valid LogLevel
309158
- function isValidLogLevel(level) {
309159
- return (typeof level === 'string' && validLogLevels.includes(level));
309160
- }
309161
- function initLogger() {
309162
- if (logger_initiated)
309163
- return;
309164
- const logLevel = process.env.LOG_LEVEL && isValidLogLevel(process.env.LOG_LEVEL)
309165
- ? process.env.LOG_LEVEL
309166
- : 'info';
309167
- logger = winston_default().createLogger({
309168
- level: logLevel,
309169
- exitOnError: false,
309170
- format: winston.format.combine(winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }), winston.format.json()),
309171
- transports: [
309172
- new winston.transports.Console({
309173
- level: logLevel,
309174
- }),
309175
- ],
309176
- });
309177
- logger_initiated = true;
309178
- }
309179
- function doLog(level, args) {
309180
- initLogger();
309181
- const [message, metadata] = args;
309182
- // eslint-disable-next-line prefer-spread
309183
- logger[level].apply(logger, [
309184
- message + ' | ' + JSON.stringify({ ...metadata }),
309185
- ]);
309186
- }
309187
- const logger_log = {
309188
- error: (...args) => doLog('error', args),
309189
- warn: (...args) => doLog('warn', args),
309190
- info: (...args) => doLog('info', args),
309191
- debug: (...args) => doLog('debug', args),
309192
- verbose: (...args) => doLog('verbose', args),
309193
- silly: (...args) => doLog('silly', args),
309194
- };
309195
- /* harmony default export */ const src_logger = (logger_log);
309539
+ /* harmony default export */ const operator_src_logger = (catalog_common.logger);
309196
309540
 
309197
309541
  ;// CONCATENATED MODULE: ../operator/src/store.ts
309198
309542
 
@@ -309203,13 +309547,7 @@ class Store {
309203
309547
  this.kind = kind;
309204
309548
  }
309205
309549
  add(item) {
309206
- src_logger.debug('STORE_ADD_ITEM', {
309207
- metadata: {
309208
- name: item.metadata.name,
309209
- kind: this.kind,
309210
- namespace: item.metadata.namespace,
309211
- },
309212
- });
309550
+ operator_src_logger.debug(`Added item '${item.metadata.name}' of kind '${this.kind}' to the store in namespace '${item.metadata.namespace}'`);
309213
309551
  this.store[itemPath(this.kind, item)] = {
309214
309552
  item,
309215
309553
  };
@@ -309218,13 +309556,7 @@ class Store {
309218
309556
  return 'deletionTimestamp' in item.metadata;
309219
309557
  }
309220
309558
  markToDelete(item) {
309221
- src_logger.debug('STORE_MARKED_ITEM_TO_DELETE', {
309222
- metadata: {
309223
- name: item.metadata.name,
309224
- kind: this.kind,
309225
- namespace: item.metadata.namespace,
309226
- },
309227
- });
309559
+ operator_src_logger.debug(`Marked item '${item.metadata.name}' of kind '${this.kind}' for deletion in namespace '${item.metadata.namespace}'`);
309228
309560
  this.store[itemPath(this.kind, item)] = {
309229
309561
  item,
309230
309562
  markedToDelete: true,
@@ -309257,24 +309589,11 @@ class Store {
309257
309589
  item,
309258
309590
  };
309259
309591
  if (updated)
309260
- src_logger.debug('STORE_ITEM_MODIFIED', {
309261
- metadata: {
309262
- name: item.metadata.name,
309263
- kind: this.kind,
309264
- namespace: item.metadata.namespace,
309265
- patches,
309266
- },
309267
- });
309592
+ operator_src_logger.debug(`Modified item '${item.metadata.name}' of kind '${this.kind}' in namespace '${item.metadata.namespace}' with patches ${JSON.stringify(patches)}`);
309268
309593
  return updated;
309269
309594
  }
309270
309595
  remove(item) {
309271
- src_logger.debug('STORE_ITEM_REMOVED', {
309272
- metadata: {
309273
- name: item.metadata.name,
309274
- kind: this.kind,
309275
- namespace: item.metadata.namespace,
309276
- },
309277
- });
309596
+ operator_src_logger.debug(`Removed item '${item.metadata.name}' of kind '${this.kind}' from namespace '${item.metadata.namespace}'`);
309278
309597
  delete this.store[itemPath(this.kind, item)];
309279
309598
  }
309280
309599
  getItem(item) {
@@ -309321,7 +309640,7 @@ async function getItem(kind, namespace, item) {
309321
309640
  }
309322
309641
  async function getItemByItemPath(itemPath, apiGroup = catalog_common.types.controller.FirestartrApiGroup, apiVersion = 'v1') {
309323
309642
  try {
309324
- src_logger.debug('CTL_GET_ITEM', { metadata: { itemPath } });
309643
+ operator_src_logger.debug(`The ctl is getting the item at '${itemPath}'.`);
309325
309644
  const { kc, opts } = await ctl_getConnection();
309326
309645
  opts.headers['Content-Type'] = 'application/json';
309327
309646
  opts.headers['Accept'] = 'application/json';
@@ -309331,14 +309650,14 @@ async function getItemByItemPath(itemPath, apiGroup = catalog_common.types.contr
309331
309650
  const r = await fetch(url, { method: 'get', headers: opts.headers });
309332
309651
  if (!r.ok) {
309333
309652
  const err = new Error(`Error on getItemByItemPath: ${itemPath}: ${r.statusText}`);
309334
- console.log(err.stack);
309653
+ operator_src_logger.error(`Error on getItemByItemPath: ${itemPath}: ${r.statusText}`);
309335
309654
  throw err;
309336
309655
  }
309337
309656
  const jsonResponse = await r.json();
309338
309657
  return jsonResponse;
309339
309658
  }
309340
309659
  catch (e) {
309341
- console.dir(e, { depth: null });
309660
+ operator_src_logger.error(`Error on getItemByItemPath: ${e}`);
309342
309661
  throw e;
309343
309662
  }
309344
309663
  }
@@ -309358,17 +309677,15 @@ async function writeManifest(kind, namespace, item, apiSlug) {
309358
309677
  return jsonResponse;
309359
309678
  }
309360
309679
  function writeSecret(secret, namespace) {
309361
- log.debug('CTL_WRITE_SECRET', {
309362
- metadata: { namespace, name: secret.metadata.name },
309363
- });
309680
+ log.debug(`The ctl is writing the secret '${secret.metadata.name}' in namespace '${namespace}'.`);
309364
309681
  return writeManifest('secrets', namespace, secret, `api/v1/namespaces/${namespace}/secrets/${secret.metadata.name}`);
309365
309682
  }
309366
309683
  async function writeStatus(kind, namespace, item) {
309367
- src_logger.debug('CTL_WRITE_STATUS', { metadata: { item } });
309684
+ operator_src_logger.debug(`The ctl is writing the status for item '${item.kind}/${item.metadata.name}' in namespace '${item.metadata.namespace}'.`);
309368
309685
  return await writeManifest(kind, namespace, item, `apis/firestartr.dev/v1/namespaces/${namespace}/${kind}/${item.metadata.name}/status`);
309369
309686
  }
309370
309687
  function writeFinalizer(kind, namespace, item) {
309371
- log.debug('CTL_WRITE_FINALIZER', { metadata: { item } });
309688
+ log.debug(`The ctl is writing the status for item '${item.kind}/${item.metadata.name}' in namespace '${item.metadata.namespace}'.`);
309372
309689
  return writeManifest(kind, namespace, item, `apis/firestartr.dev/v1/namespaces/${namespace}/${kind}/${item.metadata.name}/metadata/finalizers`);
309373
309690
  }
309374
309691
  async function listItems(kind, namespace, kc, opts) {
@@ -309383,7 +309700,7 @@ async function listItems(kind, namespace, kc, opts) {
309383
309700
  return await r.json();
309384
309701
  }
309385
309702
  catch (err) {
309386
- console.dir(err);
309703
+ log.error(`On listItems: ${err}`);
309387
309704
  throw err;
309388
309705
  }
309389
309706
  }
@@ -309410,15 +309727,10 @@ async function* observeList(kind, namespace, revision, kc, opts) {
309410
309727
  }
309411
309728
  catch (err) {
309412
309729
  if (err instanceof TypeError) {
309413
- log.error('CTL_OBSERVE_LIST_ERROR_CHUNKS', {
309414
- metadata: { namespace, kind, revision, error: err },
309415
- });
309730
+ log.error(`The ctl encountered an error while listing chunks for '${kind}' with revision '${revision}' in namespace '${namespace}': '${err}'.`);
309416
309731
  }
309417
309732
  else {
309418
- log.error('CTL_OBSERVE_LIST_UNKNOWN_ERROR_CHUNKS', {
309419
- metadata: { namespace, kind, revision, error: err },
309420
- });
309421
- console.error(err);
309733
+ log.error(`The ctl encountered an unknown error while listing chunks for '${kind}' with revision '${revision}' in namespace '${namespace}': '${err}'.`);
309422
309734
  }
309423
309735
  }
309424
309736
  }
@@ -309441,7 +309753,7 @@ async function ctl_getConnection() {
309441
309753
  return { kc, opts };
309442
309754
  }
309443
309755
  catch (err) {
309444
- console.dir(err, { depth: null });
309756
+ operator_src_logger.error(`getConnection: ${err}`);
309445
309757
  throw err;
309446
309758
  }
309447
309759
  }
@@ -309500,9 +309812,7 @@ async function deleteSecret(secretName, namespace) {
309500
309812
  }
309501
309813
  catch (e) {
309502
309814
  if (e && e.code === 404) {
309503
- src_logger.error('CTL_DELETE_SECRET_NOT_FOUND', {
309504
- metadata: { secretName, namespace },
309505
- });
309815
+ operator_src_logger.error(`The ctl failed to delete the secret '${secretName}' in namespace '${namespace}' because it was not found.`);
309506
309816
  return null;
309507
309817
  }
309508
309818
  else {
@@ -309536,9 +309846,7 @@ async function getSecret(namespace, secretName) {
309536
309846
  }
309537
309847
  catch (e) {
309538
309848
  if (e.response && e.response.statusCode === 404) {
309539
- src_logger.error('CTL_SECRET_NOT_FOUND', {
309540
- metadata: { secretName, namespace },
309541
- });
309849
+ operator_src_logger.error(`The ctl could not find the secret '${secretName}' in namespace '${namespace}'.`);
309542
309850
  return null;
309543
309851
  }
309544
309852
  else {
@@ -309569,13 +309877,7 @@ async function getTFResult(namespace, item) {
309569
309877
  * @param {any} item - Object to check if has been renamed
309570
309878
  */
309571
309879
  async function checkIfRenamed(namespace, item) {
309572
- log.debug('CTL_CHECK_IF_RENAMED', {
309573
- metadata: {
309574
- kind: item.kind,
309575
- name: item.metadata.name,
309576
- namespace,
309577
- },
309578
- });
309880
+ log.debug(`The ctl is checking if item '${item.kind}/${item.metadata.name}' in namespace '${namespace}' has been renamed.`);
309579
309881
  const oldName = item.metadata?.labels?.[common.types.controller.FirestartrLabelOldName];
309580
309882
  // If the item does not have firestartr.dev/old-name label, it has not been renamed
309581
309883
  if (!oldName)
@@ -309591,9 +309893,7 @@ async function checkIfRenamed(namespace, item) {
309591
309893
  });
309592
309894
  if (!r.ok) {
309593
309895
  if (r.status === 404) {
309594
- log.debug('CTL_CHECK_IF_RENAMED_OLDNAME_NOT_FOUND', {
309595
- metadata: { kind: item.kind, name: item.metadata.name, namespace },
309596
- });
309896
+ log.debug(`The ctl is checking for a rename of item '${item.kind}/${item.metadata.name}' in namespace '${namespace}', but the old item name was not found.`);
309597
309897
  return false;
309598
309898
  }
309599
309899
  }
@@ -309601,21 +309901,17 @@ async function checkIfRenamed(namespace, item) {
309601
309901
  return true;
309602
309902
  }
309603
309903
  catch (err) {
309604
- console.log(err);
309904
+ log.debug(err);
309605
309905
  return false;
309606
309906
  }
309607
309907
  }
309608
309908
  async function upsertFinalizer(kind, namespace, item, finalizer) {
309609
309909
  if ('finalizers' in item.metadata &&
309610
309910
  item.metadata.finalizers.includes(finalizer)) {
309611
- src_logger.debug('CTL_UPSERT_FINALIZER_ALREADY_SET', {
309612
- metadata: { finalizer, kind, name: item.metadata.name, namespace },
309613
- });
309911
+ operator_src_logger.debug(`The ctl tried to upsert the finalizer '${finalizer}' for '${kind}/${item.metadata.name}' in namespace '${namespace}', but it was already set.`);
309614
309912
  return;
309615
309913
  }
309616
- src_logger.debug('CTL_UPSERT_FINALIZER_SETTING', {
309617
- metadata: { finalizer, kind, name: item.metadata.name, namespace },
309618
- });
309914
+ operator_src_logger.debug(`The ctl is setting the finalizer '${finalizer}' for '${kind}/${item.metadata.name}' in namespace '${namespace}'.`);
309619
309915
  const { kc, opts } = await ctl_getConnection();
309620
309916
  const url = `${kc.getCurrentCluster().server}/apis/firestartr.dev/v1/namespaces/${namespace}/${kind}/${item.metadata.name}`;
309621
309917
  opts.headers['Content-Type'] = 'application/json-patch+json';
@@ -309656,14 +309952,7 @@ async function upsertFinalizer(kind, namespace, item, finalizer) {
309656
309952
  async function unsetFinalizer(kind, namespace, item, finalizer) {
309657
309953
  const { kc, opts } = await ctl_getConnection();
309658
309954
  const name = typeof item === 'string' ? item : item.metadata.name;
309659
- src_logger.debug('CTL_REMOVE_FINALIZER', {
309660
- metadata: {
309661
- finalizer,
309662
- kind,
309663
- name,
309664
- namespace,
309665
- },
309666
- });
309955
+ operator_src_logger.debug(`The ctl is removing the finalizer '${finalizer}' from '${kind}/${name}' in namespace '${namespace}'.`);
309667
309956
  const url = `${kc.getCurrentCluster().server}/apis/firestartr.dev/v1/namespaces/${namespace}/${kind}/${name}`;
309668
309957
  opts.headers['Content-Type'] = 'application/json-patch+json';
309669
309958
  opts.headers['Accept'] = '*';
@@ -309725,8 +310014,7 @@ async function writePlanInGithubPR(prUrl, planText) {
309725
310014
  await github_0.pulls.commentInPR(message, +pr_number, repo, owner);
309726
310015
  }
309727
310016
  catch (err) {
309728
- console.error(err);
309729
- console.log('Cannot write plan in PR');
310017
+ operator_src_logger.error(`writePlanInGithubPR: Cannot write plan in PR: ${err}`);
309730
310018
  }
309731
310019
  }
309732
310020
  async function addApplyCommitStatus(cr, state, targetURL = '', description = '', context = '') {
@@ -309734,15 +310022,7 @@ async function addApplyCommitStatus(cr, state, targetURL = '', description = '',
309734
310022
  await addCommitStatusToPrMergeCommit(cr.metadata.annotations['firestartr.dev/last-state-pr'], state, targetURL, description, context);
309735
310023
  }
309736
310024
  catch (e) {
309737
- src_logger.error('CTL_ADD_APPLY_COMMIT_STATUS_ERROR', {
309738
- metadata: {
309739
- state,
309740
- targetURL,
309741
- description,
309742
- cr_metadata: cr.metadata,
309743
- error: e,
309744
- },
309745
- });
310025
+ log.error(`The ctl encountered an error while adding commit status for custom resource '${cr.metadata.name}' in namespace '${cr.metadata.namespace}'. State: '${state}'. Target URL: '${targetURL}'. Description: '${description}'. Error: '${e}'.`);
309746
310026
  }
309747
310027
  }
309748
310028
  async function addDestroyCommitStatus(cr, state, description = '', context = '') {
@@ -309751,50 +310031,33 @@ async function addDestroyCommitStatus(cr, state, description = '', context = '')
309751
310031
  await addCommitStatusToPrMergeCommit(prUrl, state, '', description, context);
309752
310032
  }
309753
310033
  catch (e) {
309754
- src_logger.error('CTL_ADD_DESTROY_COMMIT_STATUS_ERROR', {
309755
- metadata: {
309756
- state,
309757
- description,
309758
- cr_metadata: cr.metadata,
309759
- error: e,
309760
- },
309761
- });
310034
+ operator_src_logger.error(`The ctl encountered an error while adding the destroy commit status for custom resource '${cr.metadata.name}' in namespace '${cr.metadata.namespace}'. State: '${state}'. Description: '${description}'. Error: '${e}'.`);
309762
310035
  }
309763
310036
  }
309764
310037
  async function addPlanStatusCheck(prUrl, summary, status = 'in_progress', isFailure = false) {
309765
310038
  try {
309766
- src_logger.debug('CTL_ADD_PLAN_STATUS_CHECK_SUMMARY_LENGTH', {
309767
- metadata: { length: summary.length },
309768
- });
310039
+ operator_src_logger.debug(`The ctl is checking the length of the plan summary, which is '${summary.length}'.`);
309769
310040
  if (summary.length > MAX_CHARS_OUPUT_PLAN) {
309770
310041
  const mustDrop = summary.length - MAX_CHARS_OUPUT_PLAN;
309771
310042
  summary = summary.substring(mustDrop);
309772
- src_logger.debug('CTL_ADD_PLAN_STATUS_CHECK_SUMMARY_TOO_LENGTHY', {
309773
- metadata: { mustDrop, length: summary.length },
309774
- });
310043
+ operator_src_logger.debug(`The ctl found the plan summary too lengthy (length: '${summary.length}'). The summary must drop because '${mustDrop}'.`);
309775
310044
  }
309776
310045
  await ctl_addStatusCheck({ summary, title: 'Terraform Plan Results' }, isFailure, 'terraform_plan', prUrl, status);
309777
310046
  }
309778
310047
  catch (e) {
309779
- src_logger.error('CTL_ADD_PLAN_STATUS_CHECK_ERROR', {
309780
- metadata: { prUrl, status, isFailure, error: e },
309781
- });
310048
+ operator_src_logger.error(`The ctl encountered an error while adding plan status for PR '${prUrl}' with status '${status}'. Is Failure: '${isFailure}'. Error: '${e}'.`);
309782
310049
  }
309783
310050
  }
309784
310051
  async function ctl_addStatusCheck(output, isFailure, name, prAnnotationValue, status) {
309785
310052
  const { owner, repo, prNumber } = catalog_common.generic.getOwnerRepoPrNumberFromAnnotationValue(prAnnotationValue);
309786
310053
  const branchSha = await github_0.pulls.getPrLastCommitSHA(prNumber, repo, owner);
309787
- src_logger.info('CTL_ADD_STATUS_CHECK', {
309788
- metadata: { owner, repo, branchSha, prAnnotationValue, name },
309789
- });
310054
+ operator_src_logger.info(`The ctl is adding a status check for '${owner}/${repo}' on branch '${branchSha}' with PR annotation value '${prAnnotationValue}' and name '${name}'.`);
309790
310055
  await github_0.repo.addStatusCheck(output, isFailure, branchSha, name, status, repo, owner);
309791
310056
  }
309792
310057
  async function addCommitStatusToPrMergeCommit(prAnnotationValue, state, targetURL, description, context) {
309793
310058
  const { owner, repo, prNumber } = catalog_common.generic.getOwnerRepoPrNumberFromAnnotationValue(prAnnotationValue);
309794
310059
  const branchSha = await github_0.pulls.getPrMergeCommitSHA(prNumber, repo, owner);
309795
- src_logger.info('CTL_ADD_COMMIT_STATUS', {
309796
- metadata: { owner, repo, branchSha, state, targetURL },
309797
- });
310060
+ operator_src_logger.info(`The ctl is adding a commit status for '${owner}/${repo}' on branch '${branchSha}'. State: '${state}'. Target URL: '${targetURL}'.`);
309798
310061
  await github_0.repo.addCommitStatus(state, branchSha, repo, owner, targetURL, description, context);
309799
310062
  }
309800
310063
  async function getLastStatePrInfo(cr) {
@@ -309862,62 +310125,42 @@ async function observe(plural, namespace, onAdd, onChange, onDelete, _onRename)
309862
310125
  informer.on('add', (obj) => {
309863
310126
  store.add(obj);
309864
310127
  if (store.hasDeletionTimestamp(obj)) {
309865
- src_logger.info('REFLECTOR_ITEM_MARKED_TO_DELETION', {
309866
- metadata: { kind: obj.kind, name: obj.metadata.name },
309867
- });
310128
+ operator_src_logger.info(`Reflector has marked item '${obj.kind}/${obj.metadata.name}' for deletion.`);
309868
310129
  store.markToDelete(obj);
309869
310130
  onDelete(obj);
309870
310131
  }
309871
310132
  else {
309872
- src_logger.info('REFLECTOR_ITEM_ADDED', {
309873
- metadata: { kind: obj.kind, name: obj.metadata.name },
309874
- });
310133
+ operator_src_logger.info(`Reflector has added item '${obj.kind}/${obj.metadata.name}'.`);
309875
310134
  onAdd(obj);
309876
310135
  }
309877
310136
  });
309878
310137
  informer.on('update', (obj) => {
309879
- src_logger.info('REFLECTOR_ITEM_UPDATED', {
309880
- metadata: {
309881
- kind: obj.kind,
309882
- name: obj.metadata.name,
309883
- resourceVersion: obj.metadata.resourceVersion,
309884
- },
309885
- });
310138
+ operator_src_logger.info(`Reflector has updated item '${obj.kind}/${obj.metadata.name}' to a new resource version: '${obj.metadata.resourceVersion}'.`);
309886
310139
  if (!store.getItem(obj).markedToDelete &&
309887
310140
  store.hasDeletionTimestamp(obj) &&
309888
310141
  (store.hasBeenMarkedToDelete(obj) || store.modified(obj))) {
309889
- src_logger.info('REFLECTOR_ITEM_UPDATED_MARKED_TO_DELETION', {
309890
- metadata: { kind: obj.kind, name: obj.metadata.name },
309891
- });
310142
+ operator_src_logger.info(`Reflector has updated item '${obj.kind}/${obj.metadata.name}' and marked it for deletion.`);
309892
310143
  store.markToDelete(obj);
309893
310144
  onDelete(obj);
309894
310145
  }
309895
310146
  else if (store.modified(obj)) {
309896
- src_logger.info('REFLECTOR_ITEM_UPDATED_AND_MODIFIED', {
309897
- metadata: { kind: obj.kind, name: obj.metadata.name },
309898
- });
310147
+ operator_src_logger.info(`Reflector has updated and modified item '${obj.kind}/${obj.metadata.name}'.`);
309899
310148
  onChange(obj);
309900
310149
  }
309901
310150
  });
309902
310151
  informer.on('delete', (obj) => {
309903
310152
  // deleted from the etcd
309904
- src_logger.info('REFLECTOR_ITEM_DELETED', {
309905
- metadata: { kind: obj.kind, name: obj.metadata.name },
309906
- });
310153
+ operator_src_logger.info(`Reflector has deleted item '${obj.kind}/${obj.metadata.name}' from the etcd.`);
309907
310154
  store.remove(obj);
309908
310155
  });
309909
310156
  informer.on('error', (err) => {
309910
- src_logger.error('REFLECTOR_ITEM_ERROR', {
309911
- metadata: { error: err, plural, namespace },
309912
- });
310157
+ operator_src_logger.error(`An error occurred in the reflector for '${plural}' in namespace '${namespace}': '${err}'.`);
309913
310158
  setTimeout(async () => {
309914
310159
  try {
309915
310160
  await informer.start();
309916
310161
  }
309917
310162
  catch (err) {
309918
- src_logger.error('REFLECTOR_INFORMER_START_ERROR', {
309919
- metadata: { error: err, plural, namespace },
309920
- });
310163
+ operator_src_logger.error(`Failed to start the reflector informer for '${plural}' in namespace '${namespace}': '${err}'.`);
309921
310164
  }
309922
310165
  }, 5000);
309923
310166
  });
@@ -309937,13 +310180,13 @@ async function needsProvisioningOnCreate(cr) {
309937
310180
  const fCrLog = (cr) => `The item ${cr.kind}: ${cr.metadata.name}`;
309938
310181
  // NO STATUS
309939
310182
  if (!('status' in cr) || !('conditions' in cr.status)) {
309940
- src_logger.debug('STATUS_NO_STATUS_NOR_CONDITION', { metadata: { cr } });
310183
+ operator_src_logger.debug(`The custom resource '${cr.kind}/${cr.metadata.name}' is missing a status and any conditions.`);
309941
310184
  return true;
309942
310185
  }
309943
310186
  // ERROR
309944
310187
  const errCond = getConditionByType(cr.status.conditions, 'ERROR');
309945
310188
  if (errCond && errCond.status === 'True') {
309946
- src_logger.debug('STATUS_ERROR_SKIP_PROVISION', { metadata: { cr } });
310189
+ operator_src_logger.debug(`Skipping the provisioning process for custom resource '${cr.kind}/${cr.metadata.name}' due to a status error.`);
309947
310190
  return false;
309948
310191
  }
309949
310192
  // PROVISIONED
@@ -309951,7 +310194,7 @@ async function needsProvisioningOnCreate(cr) {
309951
310194
  if (provCond &&
309952
310195
  provCond.status === 'True' &&
309953
310196
  provCond.observedGeneration >= cr.metadata.generation) {
309954
- src_logger.debug('STATUS_ALREADY_PROVISIONED', { metadata: { cr } });
310197
+ operator_src_logger.debug(`The custom resource '${cr.kind}/${cr.metadata.name}' is already provisioned; skipping the process.`);
309955
310198
  return false;
309956
310199
  }
309957
310200
  // DELETED
@@ -309959,29 +310202,20 @@ async function needsProvisioningOnCreate(cr) {
309959
310202
  if (delCond &&
309960
310203
  delCond.status === 'True' &&
309961
310204
  delCond.observedGeneration >= cr.metadata.generation) {
309962
- src_logger.debug('STATUS_ALREADY_DELETED', { metadata: { cr } });
310205
+ operator_src_logger.debug(`The custom resource '${cr.kind}/${cr.metadata.name}' has already been deleted; no action is required.`);
309963
310206
  return false;
309964
310207
  }
309965
310208
  // PROVISIONING
309966
310209
  const provisioningCondition = getConditionByType(cr.status.conditions, 'PROVISIONING');
309967
310210
  if (provisioningCondition && provisioningCondition.status === 'True') {
309968
- src_logger.debug('STATUS_IN_PROVISIONING_REPROVISIONING', { metadata: { cr } });
310211
+ operator_src_logger.debug(`The custom resource '${cr.kind}/${cr.metadata.name}' is currently in a provisioning or reprovisioning state.`);
309969
310212
  return true;
309970
310213
  }
309971
- src_logger.debug('STATUS_NOT_HANDLED_STATE_SKIP_PROVISIONING', { metadata: { cr } });
310214
+ operator_src_logger.debug(`Skipping the provisioning process for custom resource '${cr.kind}/${cr.metadata.name}' because its current state is not handled.`);
309972
310215
  return false;
309973
310216
  }
309974
310217
  async function updateTransition(itemPath, reason, type, statusValue, message = '', updateStatusOnly = false) {
309975
- src_logger.info('STATUS_UPDATE_TRANSITION_FOR_ITEM', {
309976
- metadata: {
309977
- itemPath,
309978
- reason,
309979
- type,
309980
- statusValue,
309981
- message,
309982
- updateStatusOnly,
309983
- },
309984
- });
310218
+ operator_src_logger.info(`The item at '${itemPath}' transitioned to a new status of '${statusValue}' (type: '${type}'). The reason for the change is '${reason}' with the message: '${message}'. This was a status-only update: '${updateStatusOnly}'.`);
309985
310219
  const k8sItem = await getItemByItemPath(itemPath);
309986
310220
  if (!('status' in k8sItem))
309987
310221
  k8sItem.status = {};
@@ -310064,7 +310298,7 @@ async function syncer(enqueue) {
310064
310298
  void loop(enqueue);
310065
310299
  return {
310066
310300
  addItem(itemPath) {
310067
- src_logger.info('SYNC_ADD_ITEM', { metadata: { itemPath } });
310301
+ operator_src_logger.info(`Added item of path '${itemPath}' for synchronization`);
310068
310302
  void itemIsSyncable(itemPath).then((itemSyncInfo) => {
310069
310303
  if (!itemSyncInfo.syncable) {
310070
310304
  return;
@@ -310076,7 +310310,7 @@ async function syncer(enqueue) {
310076
310310
  }, helperCalculateRevisionTime(itemSyncInfo.period)),
310077
310311
  needsRevision: false,
310078
310312
  };
310079
- src_logger.info('Configured syncing for item %s %s', itemPath, syncWatchers[itemPath]);
310313
+ operator_src_logger.info(`Configured synchronization for item at path '${itemPath}'`);
310080
310314
  });
310081
310315
  },
310082
310316
  updateItem(itemPath) {
@@ -310084,13 +310318,13 @@ async function syncer(enqueue) {
310084
310318
  // log('Item %s not found, ignoring...', itemPath)
310085
310319
  // return
310086
310320
  //}
310087
- src_logger.debug('SYNC_UPDATE_ITEM', { metadata: { itemPath } });
310321
+ operator_src_logger.debug(`Updated item of path '${itemPath}' during synchronization`);
310088
310322
  void itemIsSyncable(itemPath).then((itemSyncInfo) => {
310089
310323
  if (!itemSyncInfo.syncable) {
310090
310324
  if (syncWatchers[itemPath]) {
310091
310325
  clearInterval(syncWatchers[itemPath].lastRevision);
310092
310326
  delete syncWatchers[itemPath];
310093
- src_logger.info('SYNC_REMOVE_FOR_ITEM', { metadata: { itemPath } });
310327
+ operator_src_logger.info(`Removed item of path '${itemPath}' from synchronization`);
310094
310328
  }
310095
310329
  }
310096
310330
  else {
@@ -310104,26 +310338,19 @@ async function syncer(enqueue) {
310104
310338
  }, helperCalculateRevisionTime(itemSyncInfo.period)),
310105
310339
  needsRevision: false,
310106
310340
  };
310107
- src_logger.debug('SYNC_CONFIGURED_FOR_ITEM', {
310108
- metadata: {
310109
- itemPath,
310110
- watcher: syncWatchers[itemPath],
310111
- },
310112
- });
310341
+ operator_src_logger.debug(`Configured synchronization for item at path '${itemPath}' with watcher '${syncWatchers[itemPath]}'`);
310113
310342
  }
310114
310343
  });
310115
310344
  },
310116
310345
  deleteItem(itemPath) {
310117
310346
  if (!syncWatchers[itemPath]) {
310118
- src_logger.debug('SYNC_DELETE_ITEM_NOT_FOUND_IGNORE', {
310119
- metadata: { itemPath },
310120
- });
310347
+ operator_src_logger.debug(`Ignored deletion attempt for item at path '${itemPath}' as it was not found during synchronization`);
310121
310348
  return;
310122
310349
  }
310123
- src_logger.debug('SYNC_DELETE_ITEM', { metadata: { itemPath } });
310350
+ operator_src_logger.debug(`Deleted item of path '${itemPath}' during synchronization`);
310124
310351
  clearInterval(syncWatchers[itemPath].lastRevision);
310125
310352
  delete syncWatchers[itemPath];
310126
- src_logger.debug('SYNC_DELETE_ITEM_DELETED', { metadata: { itemPath } });
310353
+ operator_src_logger.debug(`Successfully deleted item at path '${itemPath}' during synchronization`);
310127
310354
  },
310128
310355
  };
310129
310356
  }
@@ -310195,13 +310422,7 @@ async function initRetry(enqueue) {
310195
310422
  function retry(itemPath) {
310196
310423
  if (retryWatchers[itemPath]) {
310197
310424
  retryWatchers[itemPath].retryCounter++;
310198
- src_logger.debug('RETRY_FAILED', {
310199
- metadata: {
310200
- itemPath,
310201
- remainRetries: MAXRETRY - retryWatchers[itemPath].retryCounter,
310202
- nextRetry: NEXT_RETRY_SECS * retryWatchers[itemPath].retryCounter,
310203
- },
310204
- });
310425
+ operator_src_logger.debug(`Failed to process item '${itemPath}'. Retrying in '${NEXT_RETRY_SECS * retryWatchers[itemPath].retryCounter}' seconds. Remaining retries: '${MAXRETRY - retryWatchers[itemPath].retryCounter}'.`);
310205
310426
  retryWatchers[itemPath].retry = false;
310206
310427
  retryWatchers[itemPath].nextRetry = setTimeout(() => {
310207
310428
  if (itemPath in retryWatchers)
@@ -310252,12 +310473,7 @@ async function getItemIfNeededRetry(watcher) {
310252
310473
  }
310253
310474
  catch (e) {
310254
310475
  if (e.message && e.message.includes('Error on getItemByItemPath')) {
310255
- src_logger.debug('RETRY_ERROR_ITEM_NOT_FOUND', {
310256
- metadata: {
310257
- message: 'item not found, removed from the retry process',
310258
- itemPath: watcher.itemPath,
310259
- },
310260
- });
310476
+ operator_src_logger.debug(`Item '${watcher.itemPath}' not found, so it has been removed from the retry process.`);
310261
310477
  removeFromRetry(watcher.itemPath);
310262
310478
  return null;
310263
310479
  }
@@ -310297,9 +310513,7 @@ async function resolve(cr, getItemByItemPath, getSecret, namespace = 'default')
310297
310513
  async function resolveSecretRef(namespace, crDependency, getSecret) {
310298
310514
  let secretName = `${crDependency['kind']}-${crDependency['metadata']['name']}-outputs`.toLowerCase();
310299
310515
  if (crDependency.kind === 'FirestartrProviderConfig') {
310300
- src_logger.debug('RESOLVER_SKIP_SECRET_RESOLUTION_FOR', {
310301
- metadata: { kind: 'FirestartrProviderConfig', namespace, crDependency },
310302
- });
310516
+ operator_src_logger.debug(`The resolver is skipping secret resolution for '${crDependency.kind}/${crDependency.metadata.name}' of kind 'FirestartrProviderConfig' in namespace '${namespace}'.`);
310303
310517
  return undefined;
310304
310518
  }
310305
310519
  if (crDependency.kind === 'ExternalSecret') {
@@ -310307,9 +310521,7 @@ async function resolveSecretRef(namespace, crDependency, getSecret) {
310307
310521
  }
310308
310522
  const secret = await getSecret(namespace, secretName);
310309
310523
  if (!secret) {
310310
- src_logger.error('RESOLVER_SECRET_NOT_SOLVABLE', {
310311
- metadata: { secretName, crDependency, namespace },
310312
- });
310524
+ operator_src_logger.error(`The resolver could not find the secret '${secretName}' required by custom resource dependency '${crDependency}' in namespace '${namespace}'.`);
310313
310525
  console.error(`Could not resolve secret ${secretName}`);
310314
310526
  }
310315
310527
  return secret;
@@ -310445,9 +310657,7 @@ const kindsWithFinalizer = [
310445
310657
  */
310446
310658
  async function observeKind(pluralKind, namespace, queue, compute) {
310447
310659
  const lastWorkItems = {};
310448
- src_logger.info('INFORMER_OBSERVE_START', {
310449
- metadata: { kind: pluralKind, namespace },
310450
- });
310660
+ operator_src_logger.info(`The informer has started observing the '${pluralKind}' resource in namespace '${namespace}'.`);
310451
310661
  // onSync
310452
310662
  const enqueueCallback = (event) => {
310453
310663
  return async (item) => {
@@ -310464,13 +310674,7 @@ async function observeKind(pluralKind, namespace, queue, compute) {
310464
310674
  await observe(pluralKind, namespace,
310465
310675
  // on add
310466
310676
  async (item) => {
310467
- src_logger.info('INFORMER_ON_ITEM_ADDED', {
310468
- metadata: {
310469
- kind: pluralKind,
310470
- namespace,
310471
- name: item.metadata.name,
310472
- },
310473
- });
310677
+ operator_src_logger.info(`The informer has detected a new item, '${item.metadata.name}', for '${pluralKind}' in namespace '${namespace}'.`);
310474
310678
  await handleUpsertFinalizer(pluralKind, namespace, item);
310475
310679
  const workItem = await inform(pluralKind, item, 'onAdd', getLastWorkItem(pluralKind, lastWorkItems, item));
310476
310680
  syncCtl.addItem(informer_itemPath(pluralKind, item));
@@ -310481,13 +310685,7 @@ async function observeKind(pluralKind, namespace, queue, compute) {
310481
310685
  },
310482
310686
  // on modify
310483
310687
  async (item) => {
310484
- src_logger.info('INFORMER_ON_ITEM_MODIFIED', {
310485
- metadata: {
310486
- kind: pluralKind,
310487
- namespace,
310488
- name: item.metadata.name,
310489
- },
310490
- });
310688
+ operator_src_logger.info(`The informer has detected that item '${item.metadata.name}' for '${pluralKind}' in namespace '${namespace}' was modified.`);
310491
310689
  const workItem = await inform(pluralKind, item, 'onUpdate', getLastWorkItem(pluralKind, lastWorkItems, item));
310492
310690
  if (workItem) {
310493
310691
  setLastWorkItem(pluralKind, lastWorkItems, item, workItem);
@@ -310496,13 +310694,7 @@ async function observeKind(pluralKind, namespace, queue, compute) {
310496
310694
  },
310497
310695
  // on delete
310498
310696
  async (item) => {
310499
- src_logger.info('INFORMER_ON_ITEM_DELETED', {
310500
- metadata: {
310501
- kind: pluralKind,
310502
- namespace,
310503
- name: item.metadata.name,
310504
- },
310505
- });
310697
+ operator_src_logger.info(`The informer has detected that item '${item.metadata.name}' for '${pluralKind}' in namespace '${namespace}' was deleted.`);
310506
310698
  const workItem = await inform(pluralKind, item, 'onMarkedToDeletion', getLastWorkItem(pluralKind, lastWorkItems, item));
310507
310699
  if (workItem) {
310508
310700
  setLastWorkItem(pluralKind, lastWorkItems, item, workItem);
@@ -310512,17 +310704,11 @@ async function observeKind(pluralKind, namespace, queue, compute) {
310512
310704
  },
310513
310705
  // on rename
310514
310706
  async (item) => {
310515
- src_logger.info('INFORMER_ON_ITEM_RENAMED', {
310516
- metadata: {
310517
- kind: pluralKind,
310518
- namespace,
310519
- name: item.metadata.name,
310520
- },
310521
- });
310707
+ operator_src_logger.info(`The informer has detected that an item for '${pluralKind}' in namespace '${namespace}' has been renamed to '${item.metadata.name}'.`);
310522
310708
  const workItem = await inform(pluralKind, item, 'onRename', getLastWorkItem(pluralKind, lastWorkItems, item));
310523
310709
  // Add the renamed item to the sync queue
310524
310710
  syncCtl.addItem(informer_itemPath(pluralKind, item));
310525
- src_logger.debug('INFORMER_RENAMING_ITEM', { metadata: { workItem } });
310711
+ operator_src_logger.debug(`The informer is renaming item '${workItem.item.metadata.name}' of kind '${workItem.item.kind}' due to a change in its name.`);
310526
310712
  if (workItem) {
310527
310713
  const oldName = workItem.item.metadata.labels[catalog_common.types.controller.FirestartrLabelOldName];
310528
310714
  await handleUnsetFinalizer(pluralKind, namespace, item);
@@ -310583,7 +310769,7 @@ function enqueue(pluralKind, workItem, queue, compute, syncCtl, retryCtl) {
310583
310769
  syncCtl.updateItem(informer_itemPath(pluralKind, item));
310584
310770
  }
310585
310771
  else {
310586
- src_logger.debug('INFORMER_NOT_SPEC_OPERATION', { metadata: { operation } });
310772
+ operator_src_logger.debug(`The informer received an item with an operation type of '${operation}', which is not a specific operation.`);
310587
310773
  }
310588
310774
  };
310589
310775
  queue(workItem);
@@ -310626,9 +310812,7 @@ async function inform(pluralKind, item, op, lastWorkItem = null) {
310626
310812
  return workItem;
310627
310813
  case 'onRename':
310628
310814
  if (await needsProvisioningOnCreate(item)) {
310629
- src_logger.debug('INFORMER_ON_RENAME_NEEDS_PROVISION_ON_CREATE', {
310630
- metadata: { item },
310631
- });
310815
+ operator_src_logger.debug(`The informer is triggering a new provisioning process for the renamed item '${item.kind}/${item.metadata.name}'.`);
310632
310816
  workItem = {
310633
310817
  operation: OperationType.RENAMED,
310634
310818
  item,
@@ -310900,15 +311084,7 @@ let INIT = false;
310900
311084
  * @param {WorkItem} workItem - WorkItem to process
310901
311085
  */
310902
311086
  async function processItem(workItem) {
310903
- src_logger.info('PROCESSOR_NEW_WORKITEM', {
310904
- metadata: {
310905
- operation: workItem.operation,
310906
- workStatus: workItem.workStatus,
310907
- kind: workItem.item.kind,
310908
- name: workItem.item.metadata.name,
310909
- namespace: workItem.item.metadata.namespace,
310910
- },
310911
- });
311087
+ operator_src_logger.info(`The processor received a new work item for '${workItem.operation}' operation on '${workItem.item.kind}/${workItem.item.metadata.name}' in namespace '${workItem.item.metadata.namespace}'. Current work status is '${workItem.workStatus}'.`);
310912
311088
  queue.push(workItem);
310913
311089
  if (!INIT) {
310914
311090
  processItem_loop().catch((err) => {
@@ -310929,15 +311105,7 @@ async function processItem_loop() {
310929
311105
  const logMessage = `${new Date().toISOString()} : Processing OPERATION: ${w.operation} ITEM: ${w.item.kind}/${w.item.metadata.name}`;
310930
311106
  catalog_common.io.writeLogFile('process_item', logMessage);
310931
311107
  const timeout = createTimeout(w);
310932
- src_logger.info('PROCESSOR_PROCESSING_WORKITEM', {
310933
- metadata: {
310934
- operation: w.operation,
310935
- workStatus: w.workStatus,
310936
- kind: w.item.kind,
310937
- name: w.item.metadata.name,
310938
- namespace: w.item.metadata.namespace,
310939
- },
310940
- });
311108
+ operator_src_logger.info(`The processor is currently handling a '${w.operation}' operation for item '${w.item.kind}/${w.item.metadata.name}' in namespace '${w.item.metadata.namespace}'. The current work status is '${w.workStatus}'.`);
310941
311109
  await runWorkItem(w);
310942
311110
  clearTimeout(timeout);
310943
311111
  }
@@ -310953,15 +311121,7 @@ function createTimeout(w) {
310953
311121
  return setTimeout(() => {
310954
311122
  //throw new Error('Timeout on workitem ' + w);
310955
311123
  console.error('Timeout on workitem %O', w);
310956
- src_logger.error('PROCESSOR_TIMEOUT_ON_WORKITEM', {
310957
- metadata: {
310958
- operation: w.operation,
310959
- workStatus: w.workStatus,
310960
- kind: w.item.kind,
310961
- name: w.item.metadata.name,
310962
- namespace: w.item.metadata.namespace,
310963
- },
310964
- });
311124
+ operator_src_logger.error(`The processor timed out while handling a '${w.operation}' operation for item '${w.item.kind}/${w.item.metadata.name}' in namespace '${w.item.metadata.namespace}'. The current work status is '${w.workStatus}'.`);
310965
311125
  process.exit(1);
310966
311126
  }, TIMEOUTS[w.operation] * 1000);
310967
311127
  }
@@ -310987,7 +311147,7 @@ function processItem_wait(t = 2000) {
310987
311147
  return new Promise((ok) => setTimeout(ok, t));
310988
311148
  }
310989
311149
  async function runWorkItem(workItem) {
310990
- src_logger.debug('PROCESSOR_RUNNING_WORK_ITEM', { metadata: { workItem } });
311150
+ operator_src_logger.debug(`The processor is now running the '${workItem.operation}' operation for item '${workItem.item.kind}/${workItem.item.metadata.name}' in namespace '${workItem.item.metadata.namespace}'.`);
310991
311151
  if (!workItem.getItem || !workItem.process || !workItem.operation)
310992
311152
  return;
310993
311153
  try {
@@ -310999,33 +311159,17 @@ async function runWorkItem(workItem) {
310999
311159
  await updateTransition(workItem.handler.itemPath(), condition.reason, condition.type, condition.status, condition.message, condition.updateStatusOnly || false);
311000
311160
  }
311001
311161
  workItem.workStatus = WorkStatus.FINISHED;
311002
- src_logger.debug('PROCESSOR_REMAIN_ITEMS_IN_QUEUE', {
311003
- metadata: { remainingItems: queue.length },
311004
- });
311162
+ operator_src_logger.debug(`The processor has '${queue.length}' items remaining in the queue.`);
311005
311163
  }
311006
311164
  catch (e) {
311007
311165
  if (e instanceof Error &&
311008
311166
  e.message.includes('Error on getItemByItemPath')) {
311009
- src_logger.debug('PROCESSOR_ERROR_ITEM_NOT_FOUND', {
311010
- metadata: {
311011
- workItem,
311012
- message: 'item was not found, removing work item from queue',
311013
- },
311014
- });
311167
+ operator_src_logger.debug(`Item '${workItem.item.kind}/${workItem.item.metadata.name}' was not found, so its work item is being removed from the processor queue.`);
311015
311168
  workItem.workStatus = WorkStatus.FINISHED;
311016
311169
  return;
311017
311170
  }
311018
311171
  else {
311019
- src_logger.error('PROCESSOR_ERROR_PROCESSING_WORKITEM', {
311020
- metadata: {
311021
- operation: workItem.operation,
311022
- workStatus: workItem.workStatus,
311023
- kind: workItem.item.kind,
311024
- name: workItem.item.metadata.name,
311025
- namespace: workItem.item.metadata.namespace,
311026
- error: e,
311027
- },
311028
- });
311172
+ operator_src_logger.error(`An error occurred while the processor was handling the '${workItem.operation}' operation for item '${workItem.item.kind}/${workItem.item.metadata.name}' in namespace '${workItem.item.metadata.namespace}'. Current work status is '${workItem.workStatus}'. The error was: '${e}'.`);
311029
311173
  console.error(e);
311030
311174
  }
311031
311175
  return;
@@ -311037,11 +311181,7 @@ async function runWorkItem(workItem) {
311037
311181
  */
311038
311182
  async function workItemGarbageCollector(queue) {
311039
311183
  while (1) {
311040
- src_logger.debug('PROCESS_ITEM_GARBAGE_COLLECTOR_RUN', {
311041
- metadata: {
311042
- workItemsFound: queue.length,
311043
- },
311044
- });
311184
+ operator_src_logger.debug(`The garbage collector processed '${queue.length}' work items.`);
311045
311185
  for (const [index, wi] of queue.entries()) {
311046
311186
  if (wi.workStatus === WorkStatus.FINISHED) {
311047
311187
  // Because the queue is a constant, we cannot reassign it, instead we
@@ -311050,11 +311190,7 @@ async function workItemGarbageCollector(queue) {
311050
311190
  queue.splice(index, 1);
311051
311191
  }
311052
311192
  }
311053
- src_logger.debug('PROCESS_ITEM_GARBAGE_COLLECTOR_FINISHED', {
311054
- metadata: {
311055
- workItemsLeft: queue.length,
311056
- },
311057
- });
311193
+ operator_src_logger.debug(`The garbage collector finished its run, leaving '${queue.length}' work items in the queue.`);
311058
311194
  await processItem_wait(10 * 1000);
311059
311195
  }
311060
311196
  }
@@ -311066,11 +311202,14 @@ if (process.env.GARBAGE_QUEUE_COLLECTOR) {
311066
311202
  var cdktf_lib = __nccwpck_require__(95933);
311067
311203
  // EXTERNAL MODULE: ../../node_modules/@cdktf/provider-github/lib/provider/index.js
311068
311204
  var lib_provider = __nccwpck_require__(95107);
311069
- ;// CONCATENATED MODULE: ../provisioner/src/entities/base/Entity.ts
311205
+ ;// CONCATENATED MODULE: ../provisioner/src/logger.ts
311206
+
311207
+ /* harmony default export */ const provisioner_src_logger = (catalog_common.logger);
311070
311208
 
311209
+ ;// CONCATENATED MODULE: ../provisioner/src/entities/base/Entity.ts
311071
311210
 
311072
- const Entity_log = src_default()('firestartr:provisioner:entity:base');
311073
311211
  const EXTERNAL_NAME_ANNOTATION = 'firestartr.dev/external-name';
311212
+
311074
311213
  class Metadata {
311075
311214
  constructor(metadata) {
311076
311215
  this._metadata = metadata;
@@ -311117,11 +311256,13 @@ class Entity {
311117
311256
  }
311118
311257
  resolveRef(ref, propertyRef) {
311119
311258
  if (!this.deps) {
311120
- throw `resolveRef:
311259
+ const ErrorMessage = `resolveRef:
311121
311260
 
311122
311261
  Entity with kind ${this.kind} ${this.metadata.name}
311123
311262
 
311124
311263
  does not have any dependencies`;
311264
+ provisioner_src_logger.error(ErrorMessage);
311265
+ throw new Error(ErrorMessage);
311125
311266
  }
311126
311267
  const { kind, name, needsSecret } = ref;
311127
311268
  if (!needsSecret) {
@@ -311130,22 +311271,26 @@ class Entity {
311130
311271
  }
311131
311272
  else {
311132
311273
  if (!propertyRef) {
311133
- throw `resolveRef:
311274
+ const ErrorMessage = `resolveRef:
311134
311275
 
311135
311276
  Entity with kind ${this.kind} ${this.metadata.name}
311136
311277
 
311137
311278
  needs a propertyRef to resolve the secret`;
311279
+ provisioner_src_logger.error(ErrorMessage);
311280
+ throw new Error(ErrorMessage);
311138
311281
  }
311139
311282
  return Buffer.from(this.deps[`${kind}-${name}`].secret.data[propertyRef], 'base64').toString('utf8');
311140
311283
  }
311141
311284
  }
311142
311285
  resolveSecretRef(ref) {
311143
311286
  if (!this.deps) {
311144
- throw `resolveSecretRef:
311287
+ const ErrorMessage = `resolveSecretRef:
311145
311288
 
311146
311289
  Entity with kind ${this.kind} ${this.metadata.name}
311147
311290
 
311148
311291
  does not have any dependencies`;
311292
+ provisioner_src_logger.error(ErrorMessage);
311293
+ throw new Error(ErrorMessage);
311149
311294
  }
311150
311295
  const { name, key } = ref;
311151
311296
  return Buffer.from(this.deps[`Secret-${name}`].cr.data[key], 'base64').toString('utf8');
@@ -311153,11 +311298,13 @@ class Entity {
311153
311298
  resolveOutputs(scope) {
311154
311299
  if (this.spec.writeConnectionSecretToRef) {
311155
311300
  if (!this.mainResource) {
311156
- throw `resolveOutputs:
311301
+ const ErrorMessage = `resolveOutputs:
311157
311302
 
311158
311303
  Entity with kind ${this.kind} ${this.metadata.name}
311159
311304
 
311160
311305
  does not have a mainResource`;
311306
+ provisioner_src_logger.error(ErrorMessage);
311307
+ throw new Error(ErrorMessage);
311161
311308
  }
311162
311309
  /**
311163
311310
  * We don't currently support writing outputs to modules
@@ -311167,13 +311314,15 @@ class Entity {
311167
311314
  const keys = this.getKeysFrom(this.mainResource);
311168
311315
  const outputs = this.spec.writeConnectionSecretToRef.outputs;
311169
311316
  for (const o of outputs) {
311170
- Entity_log('OUTPUT %s', o.key);
311317
+ provisioner_src_logger.debug('OUTPUT %s', o.key);
311171
311318
  if (!keys.includes(o.key)) {
311172
- throw `resolveOutputs:
311319
+ const ErrorMessage = `resolveOutputs:
311173
311320
 
311174
311321
  Entity with kind ${this.kind} ${this.metadata.name}
311175
311322
 
311176
311323
  does not have the output ${o.key}`;
311324
+ provisioner_src_logger.error(ErrorMessage);
311325
+ throw new Error(ErrorMessage);
311177
311326
  }
311178
311327
  new cdktf_lib.TerraformOutput(scope, o.key, {
311179
311328
  value: this.mainResource.getAnyMapAttribute(this.camelToSnake(o.key)),
@@ -311206,7 +311355,6 @@ var repository_file = __nccwpck_require__(79507);
311206
311355
  ;// CONCATENATED MODULE: ../provisioner/src/entities/firestartrgithubrepository/helpers/CodeownersHelper.ts
311207
311356
 
311208
311357
 
311209
- const CodeownersHelper_messageLog = src_default()('firestartr:provisioner:entities:component:helpers:codeownerscreator');
311210
311358
  function provisionCodeowners(scope, repo, branchDefault, fsGithubRepository) {
311211
311359
  const config = {
311212
311360
  dependsOn: [repo, branchDefault],
@@ -311217,7 +311365,7 @@ function provisionCodeowners(scope, repo, branchDefault, fsGithubRepository) {
311217
311365
  overwriteOnCreate: true,
311218
311366
  repository: repo.name,
311219
311367
  };
311220
- CodeownersHelper_messageLog(`Content of the codeowners: ${fsGithubRepository.spec.repo.codeowners}`);
311368
+ provisioner_src_logger.debug(`Content of the codeowners: ${fsGithubRepository.spec.repo.codeowners}`);
311221
311369
  const tfStateKey = `_${fsGithubRepository.getTfStateKey()}-codeowners`;
311222
311370
  new repository_file/* RepositoryFile */.h(scope, tfStateKey, config);
311223
311371
  }
@@ -311230,9 +311378,8 @@ var repository_collaborator = __nccwpck_require__(33786);
311230
311378
 
311231
311379
 
311232
311380
 
311233
- const RepositoryTeamsHelper_messageLog = src_default()('firestartr:provisioner:entities:component:helpers:repositoryteamshelper');
311234
311381
  function provisionPermissions(scope, repo, fsGithubRepository) {
311235
- RepositoryTeamsHelper_messageLog(`provisionRepositoryTeams with name ${fsGithubRepository.metadata.name} in org ${fsGithubRepository.spec.org}`);
311382
+ provisioner_src_logger.info(`provisionRepositoryTeams with name ${fsGithubRepository.metadata.name} in org ${fsGithubRepository.spec.org}`);
311236
311383
  for (const permission of fsGithubRepository.spec.permissions) {
311237
311384
  if ('ref' in permission) {
311238
311385
  const tfStateKey = `_${fsGithubRepository.getTfStateKey()}-${permission.ref.kind}-${permission.ref.name}-tr`;
@@ -311278,9 +311425,8 @@ var branch_protection_v3 = __nccwpck_require__(31706);
311278
311425
 
311279
311426
 
311280
311427
 
311281
- const RepositoryHelper_messageLog = src_default()('firestartr:provisioner:entities:component:helpers:repositoryhelper');
311282
311428
  function provisionRepository(scope, fsGithubRepository) {
311283
- RepositoryHelper_messageLog(`provisionRepository with name ${fsGithubRepository.metadata.name} in org ${fsGithubRepository.spec.org}`);
311429
+ provisioner_src_logger.info(`provisionRepository with name ${fsGithubRepository.metadata.name} in org ${fsGithubRepository.spec.org}`);
311284
311430
  const config = {
311285
311431
  name: fsGithubRepository.metadata.name,
311286
311432
  description: fsGithubRepository.spec.repo.description,
@@ -311317,7 +311463,7 @@ function provisionRepository(scope, fsGithubRepository) {
311317
311463
  return repo;
311318
311464
  }
311319
311465
  function provisionBranchProtections(scope, repo, fsGithubRepository) {
311320
- RepositoryHelper_messageLog(`provisionBranchProtections with name ${fsGithubRepository.metadata.name} in org ${fsGithubRepository.spec.org}`);
311466
+ provisioner_src_logger.info(`provisionBranchProtections with name ${fsGithubRepository.metadata.name} in org ${fsGithubRepository.spec.org}`);
311321
311467
  for (const branchProtection of fsGithubRepository.spec.branchProtections) {
311322
311468
  const tfStateKey = `_${fsGithubRepository.getTfStateKey()}-${branchProtection.pattern}-bp`;
311323
311469
  const statusChecks = {
@@ -311342,8 +311488,6 @@ function provisionBranchProtections(scope, repo, fsGithubRepository) {
311342
311488
 
311343
311489
  ;// CONCATENATED MODULE: ../provisioner/src/config/config.ts
311344
311490
 
311345
-
311346
- const config_messageLog = src_default()('firestartr:provisioner:config');
311347
311491
  /**
311348
311492
  * @description Valid plans for the account
311349
311493
  * @type {Set<string>}
@@ -311450,13 +311594,12 @@ class FirestartrGithubRepository_FirestartrGithubRepository extends Entity {
311450
311594
 
311451
311595
 
311452
311596
 
311453
- const provisioner_messageLog = src_default()('firestartr:provisioner:features:provisioner');
311454
311597
  function provisionFeatureFiles(scope, feature) {
311455
- provisioner_messageLog(`Provisioning feature ${feature.spec.type} for ${feature.spec.repositoryTarget.name}`);
311456
- provisioner_messageLog('Feature output json: %O', feature);
311598
+ provisioner_src_logger.info(`Provisioning feature ${feature.spec.type} for ${feature.spec.repositoryTarget.name}`);
311599
+ provisioner_src_logger.debug('Feature output json: %O', feature);
311457
311600
  if (feature.spec.files) {
311458
311601
  for (const file of feature.spec.files) {
311459
- provisioner_messageLog('Provisioning file %O', file);
311602
+ provisioner_src_logger.debug('Provisioning file %O', file);
311460
311603
  const lifecycleArg = file.userManaged
311461
311604
  ? { ignoreChanges: ['content'] }
311462
311605
  : {};
@@ -311491,8 +311634,6 @@ class FirestartrGithubRepositoryFeature_FirestartrGithubRepositoryFeature extend
311491
311634
  var lib_membership = __nccwpck_require__(27501);
311492
311635
  ;// CONCATENATED MODULE: ../provisioner/src/entities/firestartrgithubmembership/helpers/MembershipHelper.ts
311493
311636
 
311494
-
311495
- const MembershipHelper_messageLog = src_default()('firestartr:provisioner:modules:artifacts:userartifact');
311496
311637
  function provisionMembership(scope, fsGithubMembership) {
311497
311638
  const tfStateKey = `_${fsGithubMembership.getTfStateKey()}`;
311498
311639
  const membership = new lib_membership/* Membership */.E(scope, tfStateKey, {
@@ -311507,8 +311648,6 @@ function provisionMembership(scope, fsGithubMembership) {
311507
311648
  var team_membership = __nccwpck_require__(93268);
311508
311649
  ;// CONCATENATED MODULE: ../provisioner/src/entities/firestartrgithubmembership/helpers/MembershipAllGroupHelper.ts
311509
311650
 
311510
-
311511
- const MembershipAllGroupHelper_messageLog = src_default()('firestartr:provisioner:modules:artifacts:membership:all-group-helper');
311512
311651
  function provisionAllGroupMembershipRelation(scope, fsGithubMembership) {
311513
311652
  const tfStateKey = `_${fsGithubMembership.getTfStateKey()}`;
311514
311653
  const config = {
@@ -311539,11 +311678,10 @@ class FirestartrGithubMembership_FirestartrGithubMembership extends Entity {
311539
311678
  var lib_team = __nccwpck_require__(57889);
311540
311679
  ;// CONCATENATED MODULE: ../provisioner/src/entities/firestartrgithubgroup/helpers/TeamsHelper.ts
311541
311680
 
311542
-
311543
311681
  // import { TeamConfigAux } from '../auxiliars/TeamConfigAux';
311544
- const TeamsHelper_messageLog = src_default()('firestartr:provisioner:entities:component:helpers:repositoryhelper');
311682
+
311545
311683
  function provisionGroup(scope, fsGithubGroup) {
311546
- TeamsHelper_messageLog(`provisionGroup with name ${fsGithubGroup.metadata.name} in org ${fsGithubGroup.spec.org}`);
311684
+ provisioner_src_logger.info(`provisionGroup with name ${fsGithubGroup.metadata.name} in org ${fsGithubGroup.spec.org}`);
311547
311685
  const config = {
311548
311686
  name: fsGithubGroup.metadata.name,
311549
311687
  description: fsGithubGroup.spec.description,
@@ -311561,11 +311699,10 @@ function provisionGroup(scope, fsGithubGroup) {
311561
311699
  ;// CONCATENATED MODULE: ../provisioner/src/entities/firestartrgithubgroup/helpers/TeamMembersHelper.ts
311562
311700
 
311563
311701
 
311564
- const TeamMembersHelper_messageLog = src_default()('firestartr:provisioner:entities:component:helpers:teamsmembershiphelper');
311565
311702
  function provisionMembers(scope, team, fsGithubGroup) {
311566
- TeamMembersHelper_messageLog(`provisionMembers of group ${fsGithubGroup.metadata.name} in org ${fsGithubGroup.spec.org}`);
311703
+ provisioner_src_logger.info(`provisionMembers of group ${fsGithubGroup.metadata.name} in org ${fsGithubGroup.spec.org}`);
311567
311704
  for (const member of fsGithubGroup.spec.members) {
311568
- TeamMembersHelper_messageLog(`Provisioning user ${member.ref.name} for group ${fsGithubGroup.metadata.name}`);
311705
+ provisioner_src_logger.info(`Provisioning user ${member.ref.name} for group ${fsGithubGroup.metadata.name}`);
311569
311706
  const tfStateKey = `_${fsGithubGroup.getTfStateKey()}-${member.ref.kind}-${member.ref.name}-tr`;
311570
311707
  if (member.ref.kind === 'FirestartrGithubMembership') {
311571
311708
  const username = fsGithubGroup.resolveRef(member.ref);
@@ -311600,8 +311737,6 @@ class FirestartrGithubGroup_FirestartrGithubGroup extends Entity {
311600
311737
  var organization_webhook = __nccwpck_require__(80516);
311601
311738
  ;// CONCATENATED MODULE: ../provisioner/src/entities/firestartrgithuborgwebhook/helpers/OrgWebhookHelper.ts
311602
311739
 
311603
-
311604
- const OrgWebhookHelper_messageLog = src_default()('firestartr:provisioner:modules:artifacts:orgwebhook');
311605
311740
  function provisionOrgWebhook(scope, fsGithubOrgWebhook) {
311606
311741
  const tfStateKey = `_${fsGithubOrgWebhook.getTfStateKey()}`;
311607
311742
  const webhookConfig = {
@@ -311867,7 +312002,6 @@ var FirestartrTerraformProvider;
311867
312002
 
311868
312003
 
311869
312004
 
311870
- const GithubStack_messageLog = src_default()('firestartr:provisioner:stacks:githubstack');
311871
312005
  class GithubStack extends BaseStack {
311872
312006
  async provisionEntity(isImport, entity, deps, tfStatePath, orgConfig) {
311873
312007
  try {
@@ -311883,7 +312017,7 @@ class GithubStack extends BaseStack {
311883
312017
  }
311884
312018
  }
311885
312019
  catch (err) {
311886
- GithubStack_messageLog('Error: provisionEntity: %s', err);
312020
+ provisioner_src_logger.error('Error: provisionEntity: %s', err);
311887
312021
  throw err;
311888
312022
  }
311889
312023
  }
@@ -311904,7 +312038,6 @@ class GithubStack extends BaseStack {
311904
312038
 
311905
312039
 
311906
312040
 
311907
- const TerraformModuleStack_messageLog = src_default()('firestartr:provisioner:stacks:terraformmodulestack');
311908
312041
  class TerraformModuleStack extends BaseStack {
311909
312042
  async provisionEntity(isImport, entity, deps, tfStatePath, orgConfig) {
311910
312043
  try {
@@ -311917,7 +312050,7 @@ class TerraformModuleStack extends BaseStack {
311917
312050
  }
311918
312051
  }
311919
312052
  catch (err) {
311920
- TerraformModuleStack_messageLog('Error: provisionEntity: %s', err);
312053
+ provisioner_src_logger.error('Error: provisionEntity: %s', err);
311921
312054
  throw err;
311922
312055
  }
311923
312056
  }
@@ -311973,9 +312106,10 @@ function __calculateTFStatePath(entity) {
311973
312106
  ;// CONCATENATED MODULE: ../provisioner/src/cdktf.ts
311974
312107
 
311975
312108
 
311976
- async function runCDKTF(entityPath, action, depsPath) {
312109
+ async function runCDKTF(entityPath, action, depsPath, stream) {
311977
312110
  return new Promise((ok, ko) => {
311978
312111
  const cdktfProcess = (0,external_child_process_.spawn)('cdktf', [action, '--log-level', 'DEBUG', '--auto-approve'], {
312112
+ stdio: ['inherit', 'pipe', 'pipe'],
311979
312113
  cwd: process.env.IS_DEV_LOCAL_ENVIRONMENT
311980
312114
  ? '/library/packages/provisioner'
311981
312115
  : '/library/provisioner',
@@ -312003,10 +312137,14 @@ async function runCDKTF(entityPath, action, depsPath) {
312003
312137
  const logparsed = log.toString();
312004
312138
  if (!logparsed.includes('Synthesizing')) {
312005
312139
  output += catalog_common.io.stripAnsi(logparsed);
312140
+ if (stream)
312141
+ stream.write(catalog_common.io.stripAnsi(logparsed));
312006
312142
  }
312007
312143
  });
312008
312144
  cdktfProcess.stderr.on('data', (log) => {
312009
312145
  output += catalog_common.io.stripAnsi(log.toString());
312146
+ if (stream)
312147
+ stream.write(catalog_common.io.stripAnsi(log.toString()));
312010
312148
  });
312011
312149
  cdktfProcess.on('exit', async (code) => {
312012
312150
  if (code !== 0) {
@@ -312024,7 +312162,6 @@ async function runCDKTF(entityPath, action, depsPath) {
312024
312162
 
312025
312163
 
312026
312164
 
312027
- const installer_messageLog = src_default()('firestartr:provisioner:features:installer');
312028
312165
  async function installer_installFeaturesForComponent(component, store) {
312029
312166
  const componentFeatures = component.spec?.provisioner?.features || '[]';
312030
312167
  const componentFeaturesToInstall = componentFeatures.filter((feature) => {
@@ -312033,7 +312170,7 @@ async function installer_installFeaturesForComponent(component, store) {
312033
312170
  });
312034
312171
  if (componentFeaturesToInstall.length > 0) {
312035
312172
  for (const feature of componentFeaturesToInstall) {
312036
- installer_messageLog('Installing feature %s for component %s', feature.name, component.metadata.name);
312173
+ log.info('Installing feature %s for component %s', feature.name, component.metadata.name);
312037
312174
  // Get feature config
312038
312175
  const featureConfig = common.features.features.getFeatureRenderedConfigForComponent(component, feature.name);
312039
312176
  // prepare files
@@ -312046,7 +312183,7 @@ async function installer_installFeaturesForComponent(component, store) {
312046
312183
  }
312047
312184
  }
312048
312185
  else {
312049
- installer_messageLog(`No features to install for component ${component.metadata.name}`);
312186
+ log.error(`No features to install for component ${component.metadata.name}`);
312050
312187
  }
312051
312188
  return store;
312052
312189
  }
@@ -312073,7 +312210,7 @@ async function getFileContentFromGithubIfExists(path, repositoryName, owner) {
312073
312210
  }
312074
312211
  catch (e) {
312075
312212
  if (e.status === 404) {
312076
- installer_messageLog(`File ${path} not found in ${repositoryName}`);
312213
+ log.debug(`File ${path} not found in ${repositoryName}`);
312077
312214
  return false;
312078
312215
  }
312079
312216
  throw e;
@@ -312098,7 +312235,6 @@ function isFreshInstallation(featureName, component) {
312098
312235
 
312099
312236
 
312100
312237
 
312101
- const preparer_messageLog = src_default()('firestartr:provisioner:features:installer');
312102
312238
  async function preparer_prepareFeaturesForComponent(component, store) {
312103
312239
  // those are the features to maintain
312104
312240
  let componentFeatures = component.spec?.provisioner?.features || [];
@@ -312113,7 +312249,7 @@ async function preparer_prepareFeaturesForComponent(component, store) {
312113
312249
  if (componentFeatures.length > 0) {
312114
312250
  const entityPath = dumpArtifactYaml(component);
312115
312251
  for (const feature of componentFeatures) {
312116
- preparer_messageLog('Installing feature %s for component %s', feature.name, component.metadata.name);
312252
+ log.info('Installing feature %s for component %s', feature.name, component.metadata.name);
312117
312253
  await featuresPreparer.getFeatureConfig(feature.name, feature.version, entityPath);
312118
312254
  // Get feature config
312119
312255
  const featureConfig = common.features.features.getFeatureRenderedConfigForComponent(component, feature.name);
@@ -312137,17 +312273,17 @@ const external_node_readline_namespaceObject = __WEBPACK_EXTERNAL_createRequire(
312137
312273
 
312138
312274
 
312139
312275
 
312140
- const terraform_messageLog = src_default()('firestartr:provisioner:terraform');
312141
- async function runTerraform(entity, command) {
312276
+ async function runTerraform(entity, command, stream) {
312142
312277
  let entityID = `${entity.kind.toLowerCase()}--${entity['spec']['firestartr']['tfStateKey']}`;
312143
312278
  if (entity.kind === 'FirestartrGithubRepositoryFeature')
312144
312279
  entityID = `${entity.kind.toLowerCase()}--${entity.metadata.name}`;
312145
312280
  const workDir = external_path_.join(process.env.IS_DEV_LOCAL_ENVIRONMENT
312146
312281
  ? '/library/packages/provisioner'
312147
312282
  : '/library/provisioner', 'cdktf.out', 'stacks', entityID);
312148
- terraform_messageLog(`Running terraform with command ${command} in ${workDir}`);
312283
+ provisioner_src_logger.info(`Running terraform with command ${command} in ${workDir}`);
312149
312284
  return new Promise((ok, ko) => {
312150
312285
  const terraformProcess = (0,external_child_process_.spawn)('terraform', [...command], {
312286
+ stdio: ['inherit', 'pipe', 'pipe'],
312151
312287
  cwd: workDir,
312152
312288
  env: {
312153
312289
  PATH: process.env.PATH,
@@ -312165,17 +312301,18 @@ async function runTerraform(entity, command) {
312165
312301
  terraformProcess.stdout.on('data', (log) => {
312166
312302
  const line = catalog_common.io.stripAnsi(log.toString());
312167
312303
  output += line;
312168
- console.log(line);
312304
+ if (stream)
312305
+ stream.write(line);
312169
312306
  });
312170
312307
  terraformProcess.stderr.on('data', (log) => {
312171
312308
  const line = catalog_common.io.stripAnsi(log.toString());
312172
312309
  output += line;
312173
- console.log(line);
312310
+ if (stream)
312311
+ stream.write(line);
312174
312312
  });
312175
312313
  terraformProcess.on('exit', async (code) => {
312176
312314
  console.log(`child process exited with code ${code}`);
312177
312315
  if (code !== 0) {
312178
- console.log(output);
312179
312316
  ko(output);
312180
312317
  }
312181
312318
  else {
@@ -312184,13 +312321,13 @@ async function runTerraform(entity, command) {
312184
312321
  });
312185
312322
  });
312186
312323
  }
312187
- function terraformInit(entity) {
312188
- return runTerraform(entity, ['init', '-no-color']);
312324
+ function terraformInit(entity, stream) {
312325
+ return runTerraform(entity, ['init', '-no-color'], stream);
312189
312326
  }
312190
- function terraformPlan(entity) {
312191
- return runTerraform(entity, ['plan', '-no-color']);
312327
+ function terraformPlan(entity, stream) {
312328
+ return runTerraform(entity, ['plan', '-no-color'], stream);
312192
312329
  }
312193
- async function terraformApply(entity, isImport = false, skipPlan = false) {
312330
+ async function terraformApply(entity, isImport = false, skipPlan = false, stream) {
312194
312331
  let line = false;
312195
312332
  if (isImport && !skipPlan) {
312196
312333
  console.log(`
@@ -312209,15 +312346,15 @@ Type 'yes' to continue:`);
312209
312346
  });
312210
312347
  }
312211
312348
  if (line === 'yes' || skipPlan) {
312212
- return runTerraform(entity, ['apply', '-no-color', '-auto-approve']);
312349
+ return runTerraform(entity, ['apply', '-no-color', '-auto-approve'], stream);
312213
312350
  }
312214
312351
  else {
312215
312352
  console.log(`🚀 Skipping apply for entity ${entity.kind} ${entity.metadata.name}`);
312216
312353
  return Promise.resolve('');
312217
312354
  }
312218
312355
  }
312219
- function terraformDestroy(entity) {
312220
- return runTerraform(entity, ['destroy', '-no-color', '-auto-approve']);
312356
+ function terraformDestroy(entity, stream) {
312357
+ return runTerraform(entity, ['destroy', '-no-color', '-auto-approve'], stream);
312221
312358
  }
312222
312359
 
312223
312360
  ;// CONCATENATED MODULE: ../provisioner/src/features/uninstaller.ts
@@ -312226,12 +312363,11 @@ function terraformDestroy(entity) {
312226
312363
 
312227
312364
 
312228
312365
 
312229
- const uninstaller_messageLog = src_default()('firestartr:provisioner:features:uninstaller');
312230
312366
  async function untrackManagedFiles(feature, deps) {
312231
312367
  if (!feature.spec.files || feature.spec.files.length < 1)
312232
312368
  return;
312233
- uninstaller_messageLog('Removing managed files from the Terraform State');
312234
- uninstaller_messageLog('Synthing the project...');
312369
+ provisioner_src_logger.debug('Removing managed files from the Terraform State');
312370
+ provisioner_src_logger.debug('Synthing the project...');
312235
312371
  const randomFilenameFeature = `${catalog_common.generic.randomString(20)}.yaml`;
312236
312372
  const randomFilenameDeps = `${catalog_common.generic.randomString(20)}_deps.yaml`;
312237
312373
  catalog_common.io.writeYamlFile(randomFilenameFeature, feature, '/tmp');
@@ -312239,7 +312375,7 @@ async function untrackManagedFiles(feature, deps) {
312239
312375
  await runCDKTF(external_path_.join('/tmp', randomFilenameFeature), 'synth', external_path_.join('/tmp', randomFilenameDeps));
312240
312376
  await runTerraform(feature, ['init']);
312241
312377
  for (const file of feature.spec.files.filter((file) => file.userManaged === true)) {
312242
- uninstaller_messageLog(`Removing from the state file ${file.path}`);
312378
+ provisioner_src_logger.debug(`Removing from the state file ${file.path}`);
312243
312379
  // Terraform replaces / with -- and . with - in the state file names, so we do the same to get the state file name
312244
312380
  const stateFileName = `${feature.spec.type}-${file.path}`
312245
312381
  .replace(/\//g, '--')
@@ -312312,14 +312448,46 @@ function getNextStatus(status) {
312312
312448
 
312313
312449
 
312314
312450
 
312451
+
312452
+
312315
312453
  class Resource {
312316
312454
  setLogger(fn) {
312317
312455
  this.logFn = fn;
312318
312456
  }
312457
+ setSynthStreamLogs(callbacks) {
312458
+ this.synthStreamCallbacks = callbacks;
312459
+ }
312460
+ setTFStreamLogs(callbacks) {
312461
+ this.tfStreamCallbacks = callbacks;
312462
+ }
312463
+ async onSyncStreaming() {
312464
+ if (!this.logStream) {
312465
+ this.logStream = new external_stream_.PassThrough();
312466
+ }
312467
+ if (this.synthStreamCallbacks) {
312468
+ const callbacks = await this.synthStreamCallbacks.prepare();
312469
+ this.setLogStream(callbacks.fnData, callbacks.fnEnd);
312470
+ }
312471
+ }
312472
+ async onTFStreaming() {
312473
+ if (!this.logStream) {
312474
+ this.logStream = new external_stream_.PassThrough();
312475
+ }
312476
+ if (this.tfStreamCallbacks) {
312477
+ const callbacks = await this.tfStreamCallbacks.prepare();
312478
+ this.setLogStream(callbacks.fnData, callbacks.fnEnd);
312479
+ }
312480
+ }
312481
+ setLogStream(fnData, fnEnd, reopen = true) {
312482
+ if (reopen || !this.logStream)
312483
+ this.logStream = new external_stream_.PassThrough();
312484
+ this.logStream.on('data', (data) => fnData(data.toString()));
312485
+ this.logStream.on('end', () => fnEnd());
312486
+ }
312319
312487
  constructor(mainCR, operation, deps = []) {
312320
312488
  this.data = {};
312321
312489
  this.output = '';
312322
- this.logFn = (msg) => console.log(msg);
312490
+ this.logFn = (msg) => provisioner_src_logger.debug(msg);
312323
312491
  this.set('main_artifact', mainCR);
312324
312492
  this.set('operation', operation);
312325
312493
  this.set('deps', deps);
@@ -312329,36 +312497,46 @@ class Resource {
312329
312497
  await this.synth();
312330
312498
  await this.runTerraform();
312331
312499
  await this.postprocess();
312500
+ if (this.logStream) {
312501
+ this.logStream.end();
312502
+ this.logStream = null;
312503
+ }
312332
312504
  }
312333
312505
  artifact() {
312334
312506
  return this.get('main_artifact');
312335
312507
  }
312336
312508
  async synth() {
312509
+ await this.onSyncStreaming();
312337
312510
  const randomFilenameArtifact = `${catalog_common.generic.randomString(20)}.yaml`;
312338
312511
  const randomFilenameDeps = `${catalog_common.generic.randomString(20)}_deps.yaml`;
312339
312512
  catalog_common.io.writeYamlFile(randomFilenameArtifact, this.get('main_artifact'), '/tmp');
312340
312513
  catalog_common.io.writeYamlFile(randomFilenameDeps, this.get('deps'), '/tmp');
312341
- await runCDKTF(external_path_.join('/tmp', randomFilenameArtifact), 'synth', external_path_.join('/tmp', randomFilenameDeps));
312514
+ await runCDKTF(external_path_.join('/tmp', randomFilenameArtifact), 'synth', external_path_.join('/tmp', randomFilenameDeps), this.logStream);
312515
+ if (this.logStream) {
312516
+ this.logStream.end();
312517
+ this.logStream = null;
312518
+ }
312342
312519
  }
312343
312520
  log(msg) {
312344
312521
  this.logFn(msg);
312345
312522
  }
312346
312523
  async runTerraform() {
312524
+ await this.onTFStreaming();
312347
312525
  let output = '';
312348
- output += await terraformInit(this.get('main_artifact'));
312349
- output += await terraformPlan(this.get('main_artifact'));
312526
+ output += await terraformInit(this.get('main_artifact'), this.logStream);
312527
+ output += await terraformPlan(this.get('main_artifact'), this.logStream);
312350
312528
  if (this.get('operation') === 'CREATE' ||
312351
312529
  this.get('operation') === 'UPDATE') {
312352
- output += await terraformApply(this.get('main_artifact'), false, true);
312530
+ output += await terraformApply(this.get('main_artifact'), false, true, this.logStream);
312353
312531
  }
312354
312532
  else if (this.get('operation') === 'DELETE') {
312355
- output += await terraformDestroy(this.get('main_artifact'));
312533
+ output += await terraformDestroy(this.get('main_artifact'), this.logStream);
312356
312534
  }
312357
312535
  else if (this.get('operation') === 'IMPORT') {
312358
- output += await terraformApply(this.get('main_artifact'), true, false);
312536
+ output += await terraformApply(this.get('main_artifact'), true, false, this.logStream);
312359
312537
  }
312360
312538
  else if (this.get('operation') === 'IMPORT_SKIP_PLAN') {
312361
- output += await terraformApply(this.get('main_artifact'), true, true);
312539
+ output += await terraformApply(this.get('main_artifact'), true, true, this.logStream);
312362
312540
  }
312363
312541
  else {
312364
312542
  throw new Error(`unknown operation: ${this.get('operation')}`);
@@ -312383,7 +312561,6 @@ class Resource {
312383
312561
 
312384
312562
 
312385
312563
 
312386
- const github_feature_log = src_default()('firestartr:provisioner:github_repository_feature');
312387
312564
  class github_feature_FirestartrGithubRepositoryFeature extends Resource {
312388
312565
  static kind() {
312389
312566
  return 'FirestartrGithubRepositoryFeature';
@@ -312391,19 +312568,19 @@ class github_feature_FirestartrGithubRepositoryFeature extends Resource {
312391
312568
  async preprocess() {
312392
312569
  switch (this.get('operation')) {
312393
312570
  case 'CREATE':
312394
- github_feature_log('CREATED');
312571
+ provisioner_src_logger.debug('Creating FirestartrGithubRepositoryFeature');
312395
312572
  await this._updateManagedFiles();
312396
312573
  break;
312397
312574
  case 'UPDATE':
312398
- github_feature_log('UPDATED');
312575
+ provisioner_src_logger.debug('Updating FirestartrGithubRepositoryFeature');
312399
312576
  await this._updateManagedFiles();
312400
312577
  break;
312401
312578
  case 'DELETE':
312402
- github_feature_log('DELETE');
312579
+ provisioner_src_logger.debug('Deleting FirestartrGithubRepositoryFeature');
312403
312580
  await untrackManagedFiles(this.get('main_artifact'), this.get('deps'));
312404
312581
  break;
312405
312582
  default:
312406
- github_feature_log(`UNKNOWN: ${this.get('operation')}`);
312583
+ provisioner_src_logger.debug(`Unknown operation '${this.get('operation')}' for FirestartrGithubRepositoryFeature`);
312407
312584
  }
312408
312585
  }
312409
312586
  async _updateManagedFiles() {
@@ -312417,7 +312594,7 @@ class github_feature_FirestartrGithubRepositoryFeature extends Resource {
312417
312594
  file.content = newContent;
312418
312595
  }
312419
312596
  catch (e) {
312420
- github_feature_log(`File ${file.path} not found in repo ${repoInfo.ref.name} on branch ${repoInfo.branch}. File content not updated`);
312597
+ provisioner_src_logger.error(`File ${file.path} not found in repo ${repoInfo.ref.name} on branch ${repoInfo.branch}. File content not updated`);
312421
312598
  }
312422
312599
  }
312423
312600
  }
@@ -312497,7 +312674,6 @@ async function provisionRegularBranch(repo, branchName, sourceBranch, org) {
312497
312674
 
312498
312675
 
312499
312676
 
312500
- const github_repository_log = src_default()('firestartr:provisioner:github_repository');
312501
312677
  class github_repository_FirestartrGithubRepository extends Resource {
312502
312678
  static kind() {
312503
312679
  return 'FirestartrGithubRepository';
@@ -312505,22 +312681,23 @@ class github_repository_FirestartrGithubRepository extends Resource {
312505
312681
  async preprocess() {
312506
312682
  switch (this.get('operation')) {
312507
312683
  case 'CREATE':
312508
- github_repository_log('CREATE');
312684
+ provisioner_src_logger.debug('Creating FirestartrGithubRepository');
312509
312685
  break;
312510
312686
  case 'UPDATE':
312511
- github_repository_log('UPDATED');
312687
+ provisioner_src_logger.debug('Updating FirestartrGithubRepository');
312512
312688
  break;
312513
312689
  case 'DELETE':
312514
- github_repository_log('DELETED');
312690
+ provisioner_src_logger.debug('Deleted FirestartrGithubRepository');
312515
312691
  break;
312516
312692
  case 'IMPORT':
312517
- github_repository_log('IMPORT');
312693
+ provisioner_src_logger.debug('Importing FirestartrGithubRepository');
312518
312694
  break;
312519
312695
  case 'IMPORT_SKIP_PLAN':
312520
- github_repository_log('IMPORT_SKIP_PLAN');
312696
+ provisioner_src_logger.debug('Import skipping plan for FirestartrGithubRepository');
312521
312697
  break;
312522
312698
  default:
312523
- github_repository_log('UNKNOWN');
312699
+ provisioner_src_logger.debug('Unknown operation for FirestartrGithubRepository ' +
312700
+ this.get('operation'));
312524
312701
  }
312525
312702
  }
312526
312703
  async postprocess() {
@@ -312528,20 +312705,20 @@ class github_repository_FirestartrGithubRepository extends Resource {
312528
312705
  switch (this.get('operation')) {
312529
312706
  case 'CREATE':
312530
312707
  case 'UPDATE':
312531
- github_repository_log('CREATE & UPDATE');
312708
+ provisioner_src_logger.debug(`Created and updated FirestartrGithubRepository ${cr.metadata.name}`);
312532
312709
  await provisionAdditionalBranches(cr);
312533
312710
  break;
312534
312711
  case 'DELETE':
312535
- github_repository_log('DELETED');
312712
+ provisioner_src_logger.debug(`Deleted FirestartrGithubRepository ${cr.metadata.name}`);
312536
312713
  break;
312537
312714
  case 'IMPORT':
312538
- github_repository_log('IMPORT');
312715
+ provisioner_src_logger.debug(`Imported FirestartrGithubRepository ${cr.metadata.name}`);
312539
312716
  break;
312540
312717
  case 'IMPORT_SKIP_PLAN':
312541
- github_repository_log('IMPORT_SKIP_PLAN');
312718
+ provisioner_src_logger.debug(`Imported skipped plan FirestartrGithubRepository ${cr.metadata.name}`);
312542
312719
  break;
312543
312720
  default:
312544
- github_repository_log('UNKNOWN');
312721
+ provisioner_src_logger.debug(`Finished for unknown operation ${this.get('operation')} for FirestartrGithubRepository`);
312545
312722
  }
312546
312723
  }
312547
312724
  }
@@ -312549,7 +312726,6 @@ class github_repository_FirestartrGithubRepository extends Resource {
312549
312726
  ;// CONCATENATED MODULE: ../provisioner/src/resources/github_membership/index.ts
312550
312727
 
312551
312728
 
312552
- const github_membership_log = src_default()('firestartr:provisioner:github_membership');
312553
312729
  class github_membership_FirestartrGithubMembership extends Resource {
312554
312730
  static kind() {
312555
312731
  return 'FirestartrGithubMembership';
@@ -312557,22 +312733,23 @@ class github_membership_FirestartrGithubMembership extends Resource {
312557
312733
  async preprocess() {
312558
312734
  switch (this.get('operation')) {
312559
312735
  case 'CREATE':
312560
- github_membership_log('CREATE');
312736
+ provisioner_src_logger.debug('Creating FirestartrGithubMembership');
312561
312737
  break;
312562
312738
  case 'UPDATE':
312563
- github_membership_log('UPDATED');
312739
+ provisioner_src_logger.debug('Updating FirestartrGithubMembership');
312564
312740
  break;
312565
312741
  case 'DELETE':
312566
- github_membership_log('DELETED');
312742
+ provisioner_src_logger.debug('Deleted FirestartrGithubMembership');
312567
312743
  break;
312568
312744
  case 'IMPORT':
312569
- github_membership_log('IMPORT');
312745
+ provisioner_src_logger.debug('Importing FirestartrGithubMembership');
312570
312746
  break;
312571
312747
  case 'IMPORT_SKIP_PLAN':
312572
- github_membership_log('IMPORT_SKIP_PLAN');
312748
+ provisioner_src_logger.debug('Import skipping plan for FirestartrGithubMembership');
312573
312749
  break;
312574
312750
  default:
312575
- github_membership_log('UNKNOWN');
312751
+ provisioner_src_logger.debug('Unknown operation for FirestartrGithubMembership ' +
312752
+ this.get('operation'));
312576
312753
  }
312577
312754
  }
312578
312755
  }
@@ -312580,7 +312757,6 @@ class github_membership_FirestartrGithubMembership extends Resource {
312580
312757
  ;// CONCATENATED MODULE: ../provisioner/src/resources/github_group/index.ts
312581
312758
 
312582
312759
 
312583
- const github_group_log = src_default()('firestartr:provisioner:github_group');
312584
312760
  class github_group_FirestartrGithubGroup extends Resource {
312585
312761
  static kind() {
312586
312762
  return 'FirestartrGithubGroup';
@@ -312588,22 +312764,23 @@ class github_group_FirestartrGithubGroup extends Resource {
312588
312764
  async preprocess() {
312589
312765
  switch (this.get('operation')) {
312590
312766
  case 'CREATE':
312591
- github_group_log('CREATE');
312767
+ provisioner_src_logger.debug('Creating FirestartrGithubGroup');
312592
312768
  break;
312593
312769
  case 'UPDATE':
312594
- github_group_log('UPDATED');
312770
+ provisioner_src_logger.debug('Updating FirestartrGithubGroup');
312595
312771
  break;
312596
312772
  case 'DELETE':
312597
- github_group_log('DELETED');
312773
+ provisioner_src_logger.debug('Deleted FirestartrGithubGroup');
312598
312774
  break;
312599
312775
  case 'IMPORT':
312600
- github_group_log('IMPORT');
312776
+ provisioner_src_logger.debug('Importing FirestartrGithubGroup');
312601
312777
  break;
312602
312778
  case 'IMPORT_SKIP_PLAN':
312603
- github_group_log('IMPORT_SKIP_PLAN');
312779
+ provisioner_src_logger.debug('Import skipping plan for FirestartrGithubGroup');
312604
312780
  break;
312605
312781
  default:
312606
- github_group_log('UNKNOWN');
312782
+ provisioner_src_logger.debug('Unknown operation for FirestartrGithubGroup ' +
312783
+ this.get('operation'));
312607
312784
  }
312608
312785
  }
312609
312786
  }
@@ -312611,14 +312788,13 @@ class github_group_FirestartrGithubGroup extends Resource {
312611
312788
  ;// CONCATENATED MODULE: ../provisioner/src/resources/terraform_module/index.ts
312612
312789
 
312613
312790
 
312614
- const terraform_module_log = src_default()('firestartr:provisioner:terraform_module');
312615
312791
  class FirestartrTerraformModule extends Resource {
312616
312792
  static kind() {
312617
312793
  return 'FirestartrTerraformModule';
312618
312794
  }
312619
312795
  async preprocess() {
312620
312796
  const operation = this.get('operation');
312621
- terraform_module_log(operation);
312797
+ provisioner_src_logger.debug(`Running operation '${operation}' for FirestartrTerraformModule`);
312622
312798
  switch (operation) {
312623
312799
  case 'CREATE':
312624
312800
  break;
@@ -312639,7 +312815,6 @@ class FirestartrTerraformModule extends Resource {
312639
312815
  ;// CONCATENATED MODULE: ../provisioner/src/resources/github_orgWebhook/index.ts
312640
312816
 
312641
312817
 
312642
- const github_orgWebhook_log = src_default()('firestartr:provisioner:github_orgWebhook');
312643
312818
  class github_orgWebhook_FirestartrGithubOrgWebhook extends Resource {
312644
312819
  static kind() {
312645
312820
  return 'FirestartrGithubOrgWebhook';
@@ -312647,22 +312822,23 @@ class github_orgWebhook_FirestartrGithubOrgWebhook extends Resource {
312647
312822
  async preprocess() {
312648
312823
  switch (this.get('operation')) {
312649
312824
  case 'CREATE':
312650
- github_orgWebhook_log('CREATE');
312825
+ provisioner_src_logger.debug('Creating FirestartrGithubOrgWebhook');
312651
312826
  break;
312652
312827
  case 'UPDATE':
312653
- github_orgWebhook_log('UPDATED');
312828
+ provisioner_src_logger.debug('Updating FirestartrGithubOrgWebhook');
312654
312829
  break;
312655
312830
  case 'DELETE':
312656
- github_orgWebhook_log('DELETED');
312831
+ provisioner_src_logger.debug('Deleted FirestartrGithubOrgWebhook');
312657
312832
  break;
312658
312833
  case 'IMPORT':
312659
- github_orgWebhook_log('IMPORT');
312834
+ provisioner_src_logger.debug('Importing FirestartrGithubOrgWebhook');
312660
312835
  break;
312661
312836
  case 'IMPORT_SKIP_PLAN':
312662
- github_orgWebhook_log('IMPORT_SKIP_PLAN');
312837
+ provisioner_src_logger.debug('Import skipping plan for FirestartrGithubOrgWebhook');
312663
312838
  break;
312664
312839
  default:
312665
- github_orgWebhook_log('UNKNOWN');
312840
+ provisioner_src_logger.debug('Unknown operation for FirestartrGithubOrgWebhook ' +
312841
+ this.get('operation'));
312666
312842
  }
312667
312843
  }
312668
312844
  }
@@ -312699,6 +312875,12 @@ async function runProvisioner(data, opts) {
312699
312875
  ? 'DELETE'
312700
312876
  : 'UNKNOWN';
312701
312877
  const resource = createInstanceOf(mainCr, operation, deps);
312878
+ if ('logStreamCallbacksCDKTF' in opts) {
312879
+ resource.setSynthStreamLogs(opts['logStreamCallbacksCDKTF']);
312880
+ }
312881
+ if ('logStreamCallbacksTF' in opts) {
312882
+ resource.setTFStreamLogs(opts['logStreamCallbacksTF']);
312883
+ }
312702
312884
  await resource.run();
312703
312885
  return resource;
312704
312886
  }
@@ -312717,7 +312899,6 @@ function createInstanceOf(entity, op, deps) {
312717
312899
 
312718
312900
 
312719
312901
 
312720
- const provisioner_messageLog_0 = src_default()('firestartr:provisioner:main');
312721
312902
  async function deploy(app) {
312722
312903
  const entity = catalog_common.io.fromYaml(external_fs_.readFileSync(catalog_common.environment.getFromEnvironment(catalog_common.types.envVars.cdktfEntityPath), 'utf8'));
312723
312904
  const deps = catalog_common.io.fromYaml(external_fs_.readFileSync(catalog_common.environment.getFromEnvironment(catalog_common.types.envVars.cdktfDepsPath), 'utf8'));
@@ -312726,7 +312907,7 @@ async function deploy(app) {
312726
312907
  : false;
312727
312908
  catalog_common.io.removeFile(catalog_common.environment.getFromEnvironment(catalog_common.types.envVars.cdktfEntityPath));
312728
312909
  catalog_common.io.removeFile(catalog_common.environment.getFromEnvironment(catalog_common.types.envVars.cdktfDepsPath));
312729
- provisioner_messageLog_0('Entity to provision: %O', entity);
312910
+ provisioner_src_logger.info(`Entity to provision: ${entity}`);
312730
312911
  const orgConfig = {
312731
312912
  bucket: catalog_common.environment.getFromEnvironment(catalog_common.types.envVars.s3Bucket),
312732
312913
  dynamodbTable: catalog_common.environment.getFromEnvironment(catalog_common.types.envVars.s3Lock),
@@ -312744,7 +312925,7 @@ async function deploy(app) {
312744
312925
  app.synth();
312745
312926
  }
312746
312927
  catch (e) {
312747
- void provisioner_messageLog_0('Error: deploy: %s', e);
312928
+ provisioner_src_logger.error('Error: deploy: %s', e);
312748
312929
  throw e;
312749
312930
  }
312750
312931
  }
@@ -312766,26 +312947,20 @@ if (process.env.RUN_PROVISIONER) {
312766
312947
  async function tryPublishApply(item, planOutput, kind) {
312767
312948
  try {
312768
312949
  if (!('firestartr.dev/last-state-pr' in item.metadata.annotations)) {
312769
- src_logger.debug('USER_FEEDBACK_PUBLISH_APPLY_NO_LAST_STATE', {
312770
- metadata: { name: item.metadata.name, kind },
312771
- });
312950
+ operator_src_logger.debug(`The user feedback for the '${kind}/${item.metadata.name}' apply operation could not be published because the last state was not found.`);
312772
312951
  return;
312773
312952
  }
312774
312953
  await publishApply(item, planOutput, kind);
312775
312954
  }
312776
312955
  catch (e) {
312777
- src_logger.error('USER_FEEDBACK_PUBLISH_APPLY_ERROR', {
312778
- metadata: { name: item.metadata.name, kind, error: e },
312779
- });
312956
+ operator_src_logger.error(`The user feedback for the '${kind}/${item.metadata.name}' apply operation failed to publish due to an error: '${e}'.`);
312780
312957
  }
312781
312958
  }
312782
312959
  async function tryPublishDestroy(item, destroyOutput) {
312783
312960
  let lastPr = null;
312784
312961
  try {
312785
312962
  const { repo, org } = extractPrInfo(item);
312786
- src_logger.debug('USER_FEEDBACK_PUBLISH_DESTROY', {
312787
- metadata: { item, repo, org },
312788
- });
312963
+ operator_src_logger.debug(`The user feedback for the '${item.kind}/${item.metadata.name}' destroy operation is being published for repository '${repo}' in organization '${org}'.`);
312789
312964
  lastPr = await github_0.pulls.filterPrBy({
312790
312965
  title: `hydrate: ${item.metadata.name}`,
312791
312966
  state: 'closed',
@@ -312796,9 +312971,7 @@ async function tryPublishDestroy(item, destroyOutput) {
312796
312971
  maxRetries: 3,
312797
312972
  });
312798
312973
  if (!lastPr) {
312799
- src_logger.debug('USER_FEEDBACK_PUBLISH_DESTROY_NO_LAST_STATE', {
312800
- metadata: { item },
312801
- });
312974
+ operator_src_logger.debug(`The user feedback for the '${item.kind}/${item.metadata.name}' destroy operation could not be published because the last state was not found.`);
312802
312975
  return;
312803
312976
  }
312804
312977
  const dividedOutput = github_0.pulls.divideCommentIntoChunks(destroyOutput, 250);
@@ -312816,20 +312989,14 @@ async function tryPublishDestroy(item, destroyOutput) {
312816
312989
  ${commentContent}
312817
312990
  \`\`\`
312818
312991
  </details>`;
312819
- src_logger.debug('USER_FEEDBACK_PUBLISH_COMMENT', {
312820
- metadata: { lastPr: lastPr.number, repo, org, item },
312821
- });
312992
+ operator_src_logger.debug(`The user feedback for item '${item.kind}/${item.metadata.name}' is being published as a comment on pull request '${lastPr.number}' for repository '${repo}' in organization '${org}'.`);
312822
312993
  await github_0.pulls.commentInPR(comment, lastPr.number, repo, org);
312823
- src_logger.debug('USER_FEEDBACK_PUBLISH_DESTROY_COMMENT', {
312824
- metadata: { lastPr: lastPr.number, item },
312825
- });
312994
+ operator_src_logger.debug(`The user feedback for the '${item.kind}/${item.metadata.name}' destroy operation is being published as a comment on pull request '${lastPr.number}'.`);
312826
312995
  currentCommentNo += 1;
312827
312996
  }
312828
312997
  }
312829
312998
  catch (e) {
312830
- src_logger.error('USER_FEEDBACK_PUBLISH_ERROR', {
312831
- metadata: { lastPr: lastPr.number, item, error: e },
312832
- });
312999
+ operator_src_logger.error(`An error occurred while publishing user feedback for item '${item.kind}/${item.metadata.name}' on pull request '${lastPr.number}': '${e}'.`);
312833
313000
  }
312834
313001
  }
312835
313002
  async function publishApply(item, applyOutput, kind) {
@@ -312866,9 +313033,7 @@ function tryCreateErrorSummary(title, errorMsg) {
312866
313033
  return summaryText;
312867
313034
  }
312868
313035
  catch (e) {
312869
- src_logger.error('USER_FEEDBACK_GETTING_ERROR_SUMMARY', {
312870
- metadata: { error: e, title, errorMsg },
312871
- });
313036
+ operator_src_logger.error(`An error occurred while getting the error summary for '${title}'. The error was '${e}', with the message: '${errorMsg}'.`);
312872
313037
  return `Error when getting error summary: ${e}`;
312873
313038
  }
312874
313039
  }
@@ -312890,9 +313055,7 @@ async function tryPublishError(item, reason, message) {
312890
313055
  await publishError(item, reason, message);
312891
313056
  }
312892
313057
  catch (e) {
312893
- src_logger.error('USER_FEEDBACK_TRY_PUBLISH_ERROR', {
312894
- metadata: { item, error: e, reason },
312895
- });
313058
+ operator_src_logger.error(`The user feedback for item '${item.kind}/${item.metadata.name}' failed to publish due to an error: '${e}'. Reason: '${reason}'.`);
312896
313059
  }
312897
313060
  }
312898
313061
  async function publishError(item, reason, message) {
@@ -312930,6 +313093,53 @@ ${commentContent}
312930
313093
  }
312931
313094
  }
312932
313095
 
313096
+ ;// CONCATENATED MODULE: ../operator/src/user-feedback-ops/gh-checkrun.ts
313097
+
313098
+ async function GHCheckRun(cmd, item) {
313099
+ const prInfo = gh_checkrun_extractPrInfo(item);
313100
+ if (!prInfo.prNumber) {
313101
+ throw new Error('TFCheckRun: prNumber not retrievable');
313102
+ }
313103
+ const checkRun = await github_0.feedback.createCheckRun(prInfo.org, prInfo.repo, helperCreateCheckRunName(cmd, item), {
313104
+ //Number(pr_number),
313105
+ pullNumber: Number(prInfo.prNumber),
313106
+ includeCheckRunComment: true,
313107
+ checkRunComment: `The Github ${item.kind} is being processed (cmd=${cmd}). Details: `,
313108
+ });
313109
+ checkRun.mdOptionsDetails({
313110
+ quotes: 'terraform',
313111
+ });
313112
+ checkRun.update('Initiating', 'queued');
313113
+ return {
313114
+ fnData: (d) => {
313115
+ checkRun.update(d.toString(), 'in_progress');
313116
+ },
313117
+ fnEnd: () => {
313118
+ checkRun.close('OK', true);
313119
+ },
313120
+ fnOnError: (err) => {
313121
+ checkRun.close('KO', false);
313122
+ },
313123
+ };
313124
+ }
313125
+ function helperCreateCheckRunName(cmd, item) {
313126
+ return `Github Provisioner / ${item.kind} - ${cmd}`;
313127
+ }
313128
+ function gh_checkrun_extractPrInfo(item) {
313129
+ const prInfo = item.metadata.annotations['firestartr.dev/last-state-pr'];
313130
+ const prNumber = prInfo.split('#')[1];
313131
+ if (!prNumber)
313132
+ throw new Error('No PR number found in CR');
313133
+ const orgRepo = prInfo.split('#')[0];
313134
+ const org = orgRepo.split('/')[0];
313135
+ if (!org)
313136
+ throw new Error('No org found in CR');
313137
+ const repo = orgRepo.split('/')[1];
313138
+ if (!repo)
313139
+ throw new Error('No repo found in CR');
313140
+ return { prNumber, repo, org };
313141
+ }
313142
+
312933
313143
  ;// CONCATENATED MODULE: ../operator/cdktf.ts
312934
313144
 
312935
313145
 
@@ -312939,8 +313149,8 @@ ${commentContent}
312939
313149
 
312940
313150
 
312941
313151
 
312942
- const cdktf_log = src_default()('firestartr:operator:cdktf');
312943
313152
  function processOperation(item, op, handler) {
313153
+ operator_src_logger.info(`Processing operation ${op} on ${item.kind}/${item.metadata?.name}`);
312944
313154
  try {
312945
313155
  switch (op) {
312946
313156
  case OperationType.UPDATED:
@@ -312962,7 +313172,7 @@ function processOperation(item, op, handler) {
312962
313172
  }
312963
313173
  }
312964
313174
  catch (e) {
312965
- cdktf_log(`Operation ${op} failed: ${e}`);
313175
+ operator_src_logger.error(`Operation ${op} failed: ${e}`);
312966
313176
  throw e;
312967
313177
  }
312968
313178
  }
@@ -313013,6 +313223,9 @@ async function* sync(item, op, handler) {
313013
313223
  };
313014
313224
  }
313015
313225
  async function* markedToDeletion(item, op, handler) {
313226
+ // here we store the current callbacks that
313227
+ // are being used (synth|tf-apply...)
313228
+ let checkRunCtl;
313016
313229
  try {
313017
313230
  void cleanTerraformState();
313018
313231
  const type = 'DELETING';
@@ -313037,15 +313250,38 @@ async function* markedToDeletion(item, op, handler) {
313037
313250
  status: 'True',
313038
313251
  message: 'Destroying process started',
313039
313252
  };
313040
- if (item.metadata.annotations['firestartr.dev/last-state-pr'] || false) {
313041
- await addDestroyCommitStatus(item, 'pending', 'Performing destroy operation...', `Terraform Destroy ${item.metadata.name}`);
313042
- }
313043
313253
  const deps = await handler.resolveReferences();
313254
+ const annotation = 'firestartr.dev/last-state-pr';
313255
+ const statePr = item?.metadata?.annotations?.[annotation];
313256
+ const hasStatePr = typeof statePr === 'string' && statePr.trim().length > 0;
313257
+ if (!hasStatePr) {
313258
+ operator_src_logger.warn(`CR ${item?.kind ?? 'UnknownKind'}/${item?.metadata?.name ?? 'unknown'} ` +
313259
+ `has no "${annotation}" annotation; skipping GitHub Check Runs (synth, terraform apply).`);
313260
+ }
313261
+ else {
313262
+ operator_src_logger.debug(`CR ${item.kind}/${item.metadata.name} uses "${annotation}" = ${statePr}`);
313263
+ }
313044
313264
  const destroyOutput = await provisioner.runProvisioner({
313045
313265
  mainCr: item,
313046
313266
  deps,
313047
313267
  }, {
313048
313268
  delete: true,
313269
+ ...(hasStatePr
313270
+ ? {
313271
+ logStreamCallbacksCDKTF: {
313272
+ prepare: async () => {
313273
+ checkRunCtl = await GHCheckRun('synth', item);
313274
+ return checkRunCtl;
313275
+ },
313276
+ },
313277
+ logStreamCallbacksTF: {
313278
+ prepare: async () => {
313279
+ checkRunCtl = await GHCheckRun('terraform destroy', item);
313280
+ return checkRunCtl;
313281
+ },
313282
+ },
313283
+ }
313284
+ : {}),
313049
313285
  });
313050
313286
  const output = destroyOutput.output;
313051
313287
  await tryPublishDestroy(item, output);
@@ -313071,10 +313307,11 @@ async function* markedToDeletion(item, op, handler) {
313071
313307
  status: 'True',
313072
313308
  message: e.toString(),
313073
313309
  };
313310
+ // if there is a current checkRun working
313311
+ // we close it with an error
313312
+ if (checkRunCtl)
313313
+ checkRunCtl.fnOnError(e);
313074
313314
  await handler.writeTerraformOutputInTfResult(item, e);
313075
- if (item.metadata.annotations['firestartr.dev/last-state-pr'] || false) {
313076
- await addDestroyCommitStatus(item, 'failure', 'Destroy operation failed', `Terraform Destroy ${item.metadata.name}`);
313077
- }
313078
313315
  void handler.error();
313079
313316
  }
313080
313317
  }
@@ -313093,6 +313330,9 @@ async function* nothing(item, op, handler) {
313093
313330
  * @param handler -
313094
313331
  */
313095
313332
  async function* doApply(item, op, handler) {
313333
+ // here we store the current callbacks that
313334
+ // are being used (synth|tf-apply...)
313335
+ let checkRunCtl;
313096
313336
  try {
313097
313337
  cleanTerraformState();
313098
313338
  yield {
@@ -313134,16 +313374,41 @@ async function* doApply(item, op, handler) {
313134
313374
  opts['create'] = true;
313135
313375
  }
313136
313376
  const deps = await handler.resolveReferences();
313137
- cdktf_log('Item %s has the following dependencies: %O', item.metadata.name, deps);
313138
- if (item.metadata.annotations['firestartr.dev/last-state-pr'] || false) {
313139
- await addApplyCommitStatus(item, 'pending', catalog_common.generic.getPrLinkFromAnnotationValue(item.metadata.annotations['firestartr.dev/last-state-pr']), 'Performing apply operation...', `Terraform Apply ${item.metadata.name}`);
313377
+ operator_src_logger.info(`Item ${item.metadata.name} has the following dependencies: ${deps}`);
313378
+ const annotation = 'firestartr.dev/last-state-pr';
313379
+ const statePr = item?.metadata?.annotations?.[annotation];
313380
+ const hasStatePr = typeof statePr === 'string' && statePr.trim().length > 0;
313381
+ if (!hasStatePr) {
313382
+ operator_src_logger.warn(`CR ${item?.kind ?? 'UnknownKind'}/${item?.metadata?.name ?? 'unknown'} ` +
313383
+ `has no "${annotation}" annotation; skipping GitHub Check Runs (synth, terraform apply).`);
313384
+ }
313385
+ else {
313386
+ operator_src_logger.debug(`CR ${item.kind}/${item.metadata.name} uses "${annotation}" = ${statePr}`);
313140
313387
  }
313141
313388
  const applyOutput = await provisioner.runProvisioner({
313142
313389
  mainCr: item,
313143
313390
  deps,
313144
- }, opts);
313391
+ }, {
313392
+ ...opts,
313393
+ ...(hasStatePr
313394
+ ? {
313395
+ logStreamCallbacksCDKTF: {
313396
+ prepare: async () => {
313397
+ checkRunCtl = await GHCheckRun('synth', item);
313398
+ return checkRunCtl;
313399
+ },
313400
+ },
313401
+ logStreamCallbacksTF: {
313402
+ prepare: async () => {
313403
+ checkRunCtl = await GHCheckRun('terraform apply', item);
313404
+ return checkRunCtl;
313405
+ },
313406
+ },
313407
+ }
313408
+ : {}),
313409
+ });
313145
313410
  await tryPublishApply(item, applyOutput?.data?.output, item.kind);
313146
- const terraformOutputJson = await provisioner.runTerraform(item, ['output', '-json']);
313411
+ const terraformOutputJson = await provisioner.runTerraform(item, ['output', '-json'], null);
313147
313412
  if (!terraformOutputJson) {
313148
313413
  throw new Error(`Terraform output is empty for ${item.kind}/${item.metadata.name}`);
313149
313414
  }
@@ -313171,9 +313436,6 @@ async function* doApply(item, op, handler) {
313171
313436
  message: 'doApply',
313172
313437
  };
313173
313438
  await handler.writeTerraformOutputInTfResult(item, output);
313174
- if (item.metadata.annotations['firestartr.dev/last-state-pr'] || false) {
313175
- await addApplyCommitStatus(item, 'success', catalog_common.generic.getPrLinkFromAnnotationValue(item.metadata.annotations['firestartr.dev/last-state-pr']), 'Apply operation completed', `Terraform Apply ${item.metadata.name}`);
313176
- }
313177
313439
  handler.success();
313178
313440
  }
313179
313441
  catch (e) {
@@ -313185,7 +313447,11 @@ async function* doApply(item, op, handler) {
313185
313447
  error = e;
313186
313448
  }
313187
313449
  await tryPublishApply(item, error, item.kind);
313188
- cdktf_log('Error applying item %s: %O', item.metadata.name, error);
313450
+ // if there is a current checkRun working
313451
+ // we close it with an error
313452
+ if (checkRunCtl)
313453
+ checkRunCtl.fnOnError(error);
313454
+ operator_src_logger.error(`Error applying item ${item.metadata.name}: ${error}`);
313189
313455
  yield {
313190
313456
  item,
313191
313457
  reason: op,
@@ -313207,9 +313473,6 @@ async function* doApply(item, op, handler) {
313207
313473
  status: 'False',
313208
313474
  message: error.toString(),
313209
313475
  };
313210
- if (item.metadata.annotations['firestartr.dev/last-state-pr'] || false) {
313211
- await addApplyCommitStatus(item, 'failure', catalog_common.generic.getPrLinkFromAnnotationValue(item.metadata.annotations['firestartr.dev/last-state-pr']), 'Apply operation failed', `Terraform Apply ${item.metadata.name}`);
313212
- }
313213
313476
  handler.error();
313214
313477
  if (error) {
313215
313478
  await handler.writeTerraformOutputInTfResult(item, error);
@@ -313326,57 +313589,72 @@ class TFPlanItemVersion extends TFPlanItem {
313326
313589
  }
313327
313590
  }
313328
313591
 
313592
+ ;// CONCATENATED MODULE: ../terraform_provisioner/src/logger.ts
313593
+
313594
+ /* harmony default export */ const terraform_provisioner_src_logger = (catalog_common.logger);
313595
+
313329
313596
  ;// CONCATENATED MODULE: ../terraform_provisioner/src/utils.ts
313330
313597
 
313331
313598
 
313332
- //import Debug from "debug"
313333
313599
 
313334
- //const infolog: Debug.Debugger = Debug('firestartr:operator:cmd:terraform')
313600
+
313335
313601
  async function utils_validate(path, secrets) {
313336
313602
  return await tfExec(path, ['validate'], secrets);
313337
313603
  }
313338
- async function init(path, secrets) {
313339
- return await tfExec(path, ['init'], secrets);
313604
+ async function init(path, secrets, stream) {
313605
+ return await tfExec(path, ['init'], secrets, ['-input=false'], stream);
313340
313606
  }
313341
- async function initFromModule(path, source, secrets) {
313342
- return tfExec(path, ['init', `-from-module=${source}`], secrets, []);
313607
+ async function initFromModule(path, source, secrets, stream) {
313608
+ return tfExec(path, ['init', `-from-module=${source}`], secrets, [], stream);
313343
313609
  }
313344
- async function plan(path, secrets, format, args = ['plan']) {
313345
- const plan = await tfExec(path, args.concat(format === 'json' ? ['-json'] : []), secrets);
313610
+ async function plan(path, secrets, format, args = ['plan'], stream) {
313611
+ terraform_provisioner_src_logger.info(`Running terraform plan with ${format} in path ${path}`);
313612
+ const plan = await tfExec(path, args.concat(format === 'json' ? ['-json'] : []), secrets, ['-input=false'], stream);
313346
313613
  if (format === 'json') {
313347
313614
  const tfPlan = planGet(plan);
313348
313615
  return tfPlan;
313349
313616
  }
313350
313617
  return plan;
313351
313618
  }
313352
- async function apply(path, secrets) {
313353
- return await tfExec(path, ['apply', '-auto-approve'], secrets);
313619
+ async function apply(path, secrets, stream) {
313620
+ terraform_provisioner_src_logger.debug(`Running terraform apply in path ${path}`);
313621
+ return await tfExec(path, ['apply', '-auto-approve'], secrets, ['-input=false'], stream);
313354
313622
  }
313355
- async function destroy(path, secrets) {
313356
- return await tfExec(path, ['destroy', '-auto-approve'], secrets);
313623
+ async function destroy(path, secrets, stream) {
313624
+ terraform_provisioner_src_logger.debug(`Running terraform destroy in path ${path}`);
313625
+ return await tfExec(path, ['destroy', '-auto-approve'], secrets, ['-input=false'], stream);
313357
313626
  }
313358
313627
  async function output(path, secrets) {
313628
+ terraform_provisioner_src_logger.debug(`Running terraform output in path ${path}`);
313359
313629
  return await tfExec(path, ['output', '-json'], secrets, []);
313360
313630
  }
313361
- async function tfExec(path, args, secrets, extraArgs = ['-input=false']) {
313631
+ async function tfExec(path, args, secrets, extraArgs = ['-input=false'], stream) {
313362
313632
  // Format to TF_VAR variables -> https://developer.hashicorp.com/terraform/cli/config/environment-variables#tf_var_name
313363
313633
  for (const secret of secrets) {
313364
313634
  process.env[`${secret.key}`] = secret.value;
313365
313635
  }
313636
+ terraform_provisioner_src_logger.info(`Spawning terraform process ['terraform ${args.concat(extraArgs).join(' ')}'] in path '${path}'`);
313366
313637
  process.env['TF_PLUGIN_CACHE_DIR'] = '/home/terraform-plugins-cache';
313367
313638
  return new Promise((ok, ko) => {
313368
- const tfProcess = (0,external_child_process_.spawn)('terraform', args.concat(extraArgs), { cwd: path });
313369
- tfProcess.stdout.pipe(process.stdout);
313370
- tfProcess.stderr.pipe(process.stderr);
313639
+ const tfProcess = (0,external_child_process_.spawn)('terraform', args.concat(extraArgs), {
313640
+ cwd: path,
313641
+ stdio: ['inherit', 'pipe', 'pipe'],
313642
+ });
313371
313643
  let output = '';
313372
313644
  let flagStdoutEnd = false;
313373
313645
  let flagStderrEnd = false;
313374
313646
  let outputErrors = '';
313375
313647
  tfProcess.stdout.on('data', (log) => {
313376
- output += catalog_common.io.stripAnsi(log.toString());
313648
+ const line = catalog_common.io.stripAnsi(log.toString());
313649
+ output += line;
313650
+ if (stream)
313651
+ stream.write(line);
313377
313652
  });
313378
313653
  tfProcess.stderr.on('data', (log) => {
313379
- outputErrors += catalog_common.io.stripAnsi(log.toString());
313654
+ const line = catalog_common.io.stripAnsi(log.toString());
313655
+ outputErrors += line;
313656
+ if (stream)
313657
+ stream.write(line);
313380
313658
  });
313381
313659
  tfProcess.stdout.on('end', () => {
313382
313660
  flagStdoutEnd = true;
@@ -313391,9 +313669,12 @@ async function tfExec(path, args, secrets, extraArgs = ['-input=false']) {
313391
313669
  await catalog_common.generic.sleep(500);
313392
313670
  }
313393
313671
  if (code !== 0) {
313394
- ko(output + outputErrors);
313672
+ terraform_provisioner_src_logger.error(`Terraform output ${path}: ${output + outputErrors}`);
313673
+ terraform_provisioner_src_logger.error(`Terraform output ${path}: ${[output, outputErrors].join('')}`);
313674
+ ko([output, outputErrors].join(''));
313395
313675
  }
313396
313676
  else {
313677
+ terraform_provisioner_src_logger.info(`Terraform output ${path}: ${output}`);
313397
313678
  ok(output);
313398
313679
  }
313399
313680
  });
@@ -313407,7 +313688,9 @@ async function configureGit(ghToken) {
313407
313688
  'url."https://' + ghToken + '@github.com".insteadOf',
313408
313689
  'https://github.com',
313409
313690
  ];
313410
- const gitProcess = spawn('git', options);
313691
+ const gitProcess = spawn('git', options, {
313692
+ stdio: ['inherit', 'pipe', 'pipe'],
313693
+ });
313411
313694
  let output = '';
313412
313695
  gitProcess.on('data', (log) => {
313413
313696
  output += common.io.stripAnsi(log.toString());
@@ -313762,6 +314045,7 @@ function fCheckString(keys, refs) {
313762
314045
 
313763
314046
 
313764
314047
 
314048
+
313765
314049
  class project_tf_TFProjectManager {
313766
314050
  constructor(ctx) {
313767
314051
  this.tfOutput = '';
@@ -313770,6 +314054,14 @@ class project_tf_TFProjectManager {
313770
314054
  this.tfVarsJsonWriter = new WriterTfVarsJson(ctx.values, ctx.references);
313771
314055
  this.secrets = ctx.secrets;
313772
314056
  }
314057
+ setStreamCallbacks(fnData, fnEnd, reopen = true) {
314058
+ if (reopen || !this.stream)
314059
+ this.stream = new external_stream_.PassThrough();
314060
+ this.stream.on('data', (data) => {
314061
+ fnData(data.toString());
314062
+ });
314063
+ this.stream.on('end', fnEnd);
314064
+ }
313773
314065
  getOutput() {
313774
314066
  return this.tfOutput;
313775
314067
  }
@@ -313780,10 +314072,10 @@ class project_tf_TFProjectManager {
313780
314072
  this.tfVarsJsonWriter.writeToTerraformProject(external_path_.join(this.projectPath, 'terraform.tfvars.json'));
313781
314073
  }
313782
314074
  async __init() {
313783
- this.tfOutput += await init(this.projectPath, this.secrets);
314075
+ this.tfOutput += await init(this.projectPath, this.secrets, this.stream);
313784
314076
  }
313785
314077
  async __initFromModule() {
313786
- this.tfOutput += await init(this.projectPath, this.secrets);
314078
+ this.tfOutput += await init(this.projectPath, this.secrets, this.stream);
313787
314079
  }
313788
314080
  async validate() {
313789
314081
  await this.__init();
@@ -313793,24 +314085,27 @@ class project_tf_TFProjectManager {
313793
314085
  await this.__init();
313794
314086
  if (format === 'json')
313795
314087
  this.tfOutput = null;
313796
- this.tfOutput = await plan(this.projectPath, this.secrets, format);
314088
+ this.tfOutput = await plan(this.projectPath, this.secrets, format, ['plan'], this.stream);
314089
+ if (this.stream)
314090
+ this.stream.end();
313797
314091
  }
313798
314092
  async planDestroy(format) {
313799
314093
  await this.__init();
313800
314094
  if (format === 'json')
313801
314095
  this.tfOutput = null;
313802
- this.tfOutput = await plan(this.projectPath, this.secrets, format, [
313803
- 'plan',
313804
- '-destroy',
313805
- ]);
314096
+ this.tfOutput = await plan(this.projectPath, this.secrets, format, ['plan', '-destroy'], this.stream);
313806
314097
  }
313807
314098
  async apply() {
313808
314099
  await this.__init();
313809
- this.tfOutput += await apply(this.projectPath, this.secrets);
314100
+ this.tfOutput += await apply(this.projectPath, this.secrets, this.stream);
314101
+ if (this.stream)
314102
+ this.stream.end();
313810
314103
  }
313811
314104
  async destroy() {
313812
314105
  await this.__init();
313813
- this.tfOutput += await destroy(this.projectPath, this.secrets);
314106
+ this.tfOutput += await destroy(this.projectPath, this.secrets, this.stream);
314107
+ if (this.stream)
314108
+ this.stream.end();
313814
314109
  }
313815
314110
  async output() {
313816
314111
  await this.__init();
@@ -313904,6 +314199,7 @@ var lib_ajv_default = /*#__PURE__*/__nccwpck_require__.n(lib_ajv);
313904
314199
 
313905
314200
 
313906
314201
 
314202
+
313907
314203
  class TFProjectManagerRemote {
313908
314204
  constructor(ctx) {
313909
314205
  this.tfOutput = '';
@@ -313916,6 +314212,14 @@ class TFProjectManagerRemote {
313916
314212
  getOutput() {
313917
314213
  return this.tfOutput;
313918
314214
  }
314215
+ setStreamCallbacks(fnData, fnEnd, reopen = true) {
314216
+ if (reopen || !this.stream)
314217
+ this.stream = new external_stream_.PassThrough();
314218
+ this.stream.on('data', (data) => {
314219
+ fnData(data.toString());
314220
+ });
314221
+ this.stream.on('end', fnEnd);
314222
+ }
313919
314223
  async build() {
313920
314224
  external_fs_.rmSync(this.projectPath, { recursive: true, force: true });
313921
314225
  await this.__configGit();
@@ -313947,19 +314251,25 @@ insteadOf = https://github.com`);
313947
314251
  async plan(format) {
313948
314252
  await this.__init();
313949
314253
  if (format === 'json') {
313950
- this.tfOutput = await plan(this.projectPath, this.secrets, format);
314254
+ this.tfOutput = await plan(this.projectPath, this.secrets, format, ['plan'], this.stream);
313951
314255
  }
313952
314256
  else {
313953
- this.tfOutput += await plan(this.projectPath, this.secrets, format);
314257
+ this.tfOutput += await plan(this.projectPath, this.secrets, format, ['plan'], this.stream);
313954
314258
  }
314259
+ if (this.stream)
314260
+ this.stream.end();
313955
314261
  }
313956
314262
  async apply() {
313957
314263
  await this.__init();
313958
- this.tfOutput += await apply(this.projectPath, this.secrets);
314264
+ this.tfOutput += await apply(this.projectPath, this.secrets, this.stream);
314265
+ if (this.stream)
314266
+ this.stream.end();
313959
314267
  }
313960
314268
  async destroy() {
313961
314269
  await this.__init();
313962
- this.tfOutput += await destroy(this.projectPath, this.secrets);
314270
+ this.tfOutput += await destroy(this.projectPath, this.secrets, this.stream);
314271
+ if (this.stream)
314272
+ this.stream.end();
313963
314273
  }
313964
314274
  async planDestroy(format) {
313965
314275
  await this.__init();
@@ -313988,6 +314298,7 @@ insteadOf = https://github.com`);
313988
314298
 
313989
314299
 
313990
314300
 
314301
+
313991
314302
  const terraform_provisioner_ajv = new (lib_ajv_default())();
313992
314303
  const terraform_provisioner_validate = terraform_provisioner_ajv.compile(terraform_provisioner_src_schema);
313993
314304
  function validateContext(context) {
@@ -314005,7 +314316,8 @@ async function run() {
314005
314316
  await execCommand(command, tfProject);
314006
314317
  }
314007
314318
  // Programatic API
314008
- async function runTerraformProvisioner(context, command = 'init') {
314319
+ async function runTerraformProvisioner(context, command = 'init', streaming) {
314320
+ terraform_provisioner_src_logger.info(`Running command ${command} on a ${context.type} project`);
314009
314321
  validateContext(context);
314010
314322
  let tfProject = {};
314011
314323
  if (context.type === 'Inline') {
@@ -314014,10 +314326,14 @@ async function runTerraformProvisioner(context, command = 'init') {
314014
314326
  else if (context.type === 'Remote') {
314015
314327
  tfProject = new TFProjectManagerRemote(context);
314016
314328
  }
314329
+ if (streaming) {
314330
+ tfProject.setStreamCallbacks(streaming.fnData, streaming.fnEnd);
314331
+ }
314017
314332
  const output = await execCommand(command, tfProject);
314018
314333
  return output;
314019
314334
  }
314020
314335
  async function execCommand(command, tfProject) {
314336
+ terraform_provisioner_src_logger.info(`Executing command ${command} on ${tfProject.projectPath}`);
314021
314337
  await tfProject.build();
314022
314338
  switch (command) {
314023
314339
  case 'init':
@@ -314126,6 +314442,52 @@ async function* errorPolicyCompatibility(syncPolicy, generalPolicy, item, op) {
314126
314442
  await tryPublishError(item, op, message);
314127
314443
  }
314128
314444
 
314445
+ ;// CONCATENATED MODULE: ../operator/src/user-feedback-ops/tf-checkrun.ts
314446
+
314447
+ async function TFCheckRun(cmd, item) {
314448
+ const prInfo = tf_checkrun_extractPrInfo(item);
314449
+ if (!prInfo.prNumber) {
314450
+ throw new Error('TFCheckRun: prNumber not retrievable');
314451
+ }
314452
+ const checkRun = await github_0.feedback.createCheckRun(prInfo.org, prInfo.repo, tf_checkrun_helperCreateCheckRunName(cmd), {
314453
+ //Number(pr_number),
314454
+ pullNumber: Number(prInfo.prNumber),
314455
+ includeCheckRunComment: true,
314456
+ checkRunComment: `The TFWorkspace is being processed (cmd=${cmd}). Details: `,
314457
+ });
314458
+ checkRun.mdOptionsDetails({
314459
+ quotes: 'terraform',
314460
+ });
314461
+ checkRun.update('Initiating', 'queued');
314462
+ return {
314463
+ fnData: (d) => {
314464
+ checkRun.update(d.toString(), 'in_progress');
314465
+ },
314466
+ fnEnd: () => {
314467
+ checkRun.close('OK', true);
314468
+ },
314469
+ fnOnError: (err) => {
314470
+ checkRun.close('KO', false);
314471
+ },
314472
+ };
314473
+ }
314474
+ function tf_checkrun_helperCreateCheckRunName(cmd) {
314475
+ return `TFWorkspace - ${cmd}`;
314476
+ }
314477
+ function tf_checkrun_extractPrInfo(item) {
314478
+ const prInfo = item.metadata.annotations['firestartr.dev/last-state-pr'];
314479
+ const prNumber = prInfo.split('#')[1];
314480
+ if (!prNumber)
314481
+ throw new Error('No PR number found in CR');
314482
+ const org = prInfo.split('#')[0].split('/')[0];
314483
+ if (!org)
314484
+ throw new Error('No org found in CR');
314485
+ const repo = prInfo.split('#')[0].split('/')[1];
314486
+ if (!repo)
314487
+ throw new Error('No repo found in CR');
314488
+ return { prNumber, repo, org };
314489
+ }
314490
+
314129
314491
  ;// CONCATENATED MODULE: ../operator/src/tfworkspaces/process-operation.ts
314130
314492
 
314131
314493
 
@@ -314136,6 +314498,7 @@ async function* errorPolicyCompatibility(syncPolicy, generalPolicy, item, op) {
314136
314498
 
314137
314499
 
314138
314500
 
314501
+
314139
314502
  const TF_PROJECTS_PATH = '/tmp/tfworkspaces';
314140
314503
  function process_operation_processOperation(item, op, handler) {
314141
314504
  try {
@@ -314169,7 +314532,7 @@ function process_operation_processOperation(item, op, handler) {
314169
314532
  }
314170
314533
  }
314171
314534
  catch (e) {
314172
- src_logger.error('TERRAFORM_PROCESSOR_OP_ERROR', { metadata: { op, error: e } });
314535
+ operator_src_logger.error(`The Terraform processor encountered an error during operation '${op}': '${e}'.`);
314173
314536
  throw e;
314174
314537
  }
314175
314538
  }
@@ -314202,9 +314565,7 @@ async function* doPlanJSONFormat(item, op, handler) {
314202
314565
  message: 'Planning process started',
314203
314566
  };
314204
314567
  const deps = await handler.resolveReferences();
314205
- src_logger.info('TERRAFORM_PROCESSOR_PLAN_ASSESS_DEPS', {
314206
- metadata: { item, deps },
314207
- });
314568
+ operator_src_logger.info(`The Terraform processor is planning to assess dependencies for item '${item.kind}/${item.metadata.name}' with dependencies: '${deps}'.`);
314208
314569
  const context = buildProvisionerContext(item, deps);
314209
314570
  let planType = 'plan-json';
314210
314571
  if ('deletionTimestamp' in item.metadata) {
@@ -314267,9 +314628,7 @@ async function* doPlanJSONFormat(item, op, handler) {
314267
314628
  }
314268
314629
  catch (e) {
314269
314630
  console.error(e);
314270
- src_logger.error('TERRAFORM_PROCESSOR_PLAN_OBSERVE_ERROR', {
314271
- metadata: { item, error: e },
314272
- });
314631
+ operator_src_logger.error(`The Terraform processor encountered an error while observing the plan for item '${item.kind}/${item.metadata.name}': '${e}'.`);
314273
314632
  yield {
314274
314633
  item,
314275
314634
  reason: op,
@@ -314355,9 +314714,7 @@ async function* process_operation_sync(item, op, handler, syncPolicy, generalPol
314355
314714
  message: 'Sync process started',
314356
314715
  };
314357
314716
  if (!syncPolicy) {
314358
- src_logger.debug('TERRAFORM_PROCESSOR_NO_SYNC_POLICY_ONLY_OBSERVE', {
314359
- metadata: { op, item },
314360
- });
314717
+ operator_src_logger.debug(`The Terraform processor is only observing item '${item.kind}/${item.metadata.name}' because no sync policy was found for operation '${op}'.`);
314361
314718
  yield* doPlanJSONFormat(item, op, handler);
314362
314719
  return;
314363
314720
  }
@@ -314376,9 +314733,7 @@ async function* process_operation_sync(item, op, handler, syncPolicy, generalPol
314376
314733
  break;
314377
314734
  }
314378
314735
  default: {
314379
- src_logger.debug('TERRAFORM_PROCESSOR_POLICY_NOT_SUPPORTED', {
314380
- metadata: { syncPolicy, item },
314381
- });
314736
+ operator_src_logger.debug(`The Terraform processor detected a sync policy '${syncPolicy}' for item '${item.kind}/${item.metadata.name}' that is not supported.`);
314382
314737
  yield* doPlanJSONFormat(item, op, handler);
314383
314738
  break;
314384
314739
  }
@@ -314503,6 +314858,7 @@ async function* process_operation_nothing(item, op, handler) {
314503
314858
  * @param handler -
314504
314859
  */
314505
314860
  async function* process_operation_doApply(item, op, handler) {
314861
+ const checkRunCtl = await TFCheckRun('apply', item);
314506
314862
  try {
314507
314863
  yield {
314508
314864
  item,
@@ -314549,14 +314905,9 @@ async function* process_operation_doApply(item, op, handler) {
314549
314905
  message: 'Provisioning process started',
314550
314906
  };
314551
314907
  const deps = await handler.resolveReferences();
314552
- src_logger.info('TERRAFORM_PROCESSOR_APPLY_ASSESS_DEPS', {
314553
- metadata: { item, deps },
314554
- });
314908
+ operator_src_logger.info(`The Terraform processor is applying and assessing dependencies for item '${item.kind}/${item.metadata.name}' with dependencies: '${deps}'.`);
314555
314909
  const context = buildProvisionerContext(item, deps);
314556
- if (item.metadata.annotations['firestartr.dev/last-state-pr'] || false) {
314557
- await addApplyCommitStatus(item, 'pending', catalog_common.generic.getPrLinkFromAnnotationValue(item.metadata.annotations['firestartr.dev/last-state-pr']), 'Performing apply operation...', `Terraform Apply ${item.metadata.name}`);
314558
- }
314559
- const applyOutput = await runTerraformProvisioner(context, 'apply');
314910
+ const applyOutput = await runTerraformProvisioner(context, 'apply', checkRunCtl);
314560
314911
  await tryPublishApply(item, applyOutput, 'TFWorkspace');
314561
314912
  const terraformOutputJson = await runTerraformProvisioner(context, 'output');
314562
314913
  if (!terraformOutputJson) {
@@ -314588,17 +314939,13 @@ async function* process_operation_doApply(item, op, handler) {
314588
314939
  message: 'doApply',
314589
314940
  };
314590
314941
  await handler.writeTerraformOutputInTfResult(item, output);
314591
- if (item.metadata.annotations['firestartr.dev/last-state-pr'] || false) {
314592
- await addApplyCommitStatus(item, 'success', catalog_common.generic.getPrLinkFromAnnotationValue(item.metadata.annotations['firestartr.dev/last-state-pr']), 'Apply operation completed', `Terraform Apply ${item.metadata.name}`);
314593
- }
314594
314942
  handler.success();
314595
314943
  }
314596
314944
  catch (e) {
314945
+ checkRunCtl.fnOnError(e);
314597
314946
  console.error(e);
314598
314947
  await tryPublishApply(item, e, 'TFWorkspace');
314599
- src_logger.error('TERRAFORM_PROCESSOR_APPLY_ERROR', {
314600
- metadata: { item, op, error: e },
314601
- });
314948
+ operator_src_logger.error(`The Terraform processor encountered an error during operation '${op}' for item '${item.kind}/${item.metadata.name}': '${e}'.`);
314602
314949
  yield {
314603
314950
  item,
314604
314951
  reason: op,
@@ -314620,9 +314967,6 @@ async function* process_operation_doApply(item, op, handler) {
314620
314967
  status: 'False',
314621
314968
  message: JSON.stringify(e),
314622
314969
  };
314623
- if (item.metadata.annotations['firestartr.dev/last-state-pr'] || false) {
314624
- await addApplyCommitStatus(item, 'failure', catalog_common.generic.getPrLinkFromAnnotationValue(item.metadata.annotations['firestartr.dev/last-state-pr']), 'Apply operation failed', `Terraform Apply ${item.metadata.name}`);
314625
- }
314626
314970
  handler.error();
314627
314971
  if (e) {
314628
314972
  await handler.writeTerraformOutputInTfResult(item, e);
@@ -314927,30 +315271,22 @@ async function tryAcquireOrRenewLease(namespace, leaseDurationSeconds) {
314927
315271
  const name = 'firestartr-lease';
314928
315272
  const currentPod = await getCurrentPod(namespace);
314929
315273
  try {
314930
- src_logger.debug('LEADER_ELECTION_TRYING_ACQUIRE_LEASE', {
314931
- metadata: { name, namespace },
314932
- });
315274
+ operator_src_logger.debug(`Attempting to acquire the leader election lease for '${name}' in namespace '${namespace}'.`);
314933
315275
  const lease = await k8sApi.readNamespacedLease(name, namespace);
314934
315276
  const weAreTheLeader = lease.body.metadata.ownerReferences[0].uid === currentPod.metadata.uid;
314935
315277
  if (!weAreTheLeader) {
314936
- src_logger.debug('LEADER_ELECTION_LEASE_ACQUIRED_BY_ANOTHER_POD', {
314937
- metadata: { name, namespace },
314938
- });
315278
+ operator_src_logger.debug(`Another pod has acquired the leader election lease for '${name}' in namespace '${namespace}'.`);
314939
315279
  throw new LeaseAcquisitionError('Lease already acquired by another pod');
314940
315280
  }
314941
315281
  lease.body.spec.acquireTime = new client_node_dist.V1MicroTime();
314942
315282
  lease.body.spec.renewTime = new client_node_dist.V1MicroTime();
314943
315283
  lease.body.spec.leaseDurationSeconds = 30;
314944
- src_logger.debug('LEADER_ELECTION_LEASE_RENEWING', {
314945
- metadata: { name, namespace },
314946
- });
315284
+ operator_src_logger.debug(`Renewing the leader election lease for '${name}' in namespace '${namespace}'.`);
314947
315285
  await k8sApi.replaceNamespacedLease(name, namespace, lease.body);
314948
315286
  }
314949
315287
  catch (err) {
314950
315288
  if (err.response && err.response.statusCode === 404) {
314951
- src_logger.debug('LEADER_ELECTION_LEASE_NOT_FOUND_CREATING', {
314952
- metadata: { name, namespace },
314953
- });
315289
+ operator_src_logger.debug(`The leader election lease for '${name}' in namespace '${namespace}' was not found. Creating a new one.`);
314954
315290
  const lease = {
314955
315291
  apiVersion: 'coordination.k8s.io/v1',
314956
315292
  kind: 'Lease',
@@ -314973,16 +315309,12 @@ async function tryAcquireOrRenewLease(namespace, leaseDurationSeconds) {
314973
315309
  },
314974
315310
  };
314975
315311
  await k8sApi.createNamespacedLease(namespace, lease);
314976
- src_logger.debug('LEADER_ELECTION_LEASE_CREATED', {
314977
- metadata: { name, namespace },
314978
- });
315312
+ operator_src_logger.debug(`A new leader election lease has been created for '${name}' in namespace '${namespace}'.`);
314979
315313
  }
314980
315314
  else {
314981
315315
  if (err.response)
314982
315316
  console.log(err.response);
314983
- src_logger.debug('LEADER_ELECTION_LEASE_RENEWAL_ERROR', {
314984
- metadata: { name, namespace, error: err },
314985
- });
315317
+ operator_src_logger.debug(`An error occurred while renewing the leader election lease for '${name}' in namespace '${namespace}': '${err}'.`);
314986
315318
  throw err;
314987
315319
  }
314988
315320
  }
@@ -314993,9 +315325,7 @@ async function tryAcquireOrRenewLease(namespace, leaseDurationSeconds) {
314993
315325
  async function acquireLease(namespace, cb, interval = 10000) {
314994
315326
  try {
314995
315327
  await tryAcquireOrRenewLease(namespace, interval / 1000);
314996
- src_logger.debug('LEADER_ELECTION_LEASE_ACQUIRED_EXEC_CALLBACK', {
314997
- metadata: { namespace },
314998
- });
315328
+ operator_src_logger.debug(`Successfully acquired the leader election lease in namespace '${namespace}'. Executing the callback.`);
314999
315329
  cb();
315000
315330
  }
315001
315331
  catch (err) {
@@ -315003,9 +315333,7 @@ async function acquireLease(namespace, cb, interval = 10000) {
315003
315333
  if (err instanceof LeaseAcquisitionError) {
315004
315334
  console.error(`Failed to acquire Lease, retrying in ${interval / 1000} seconds`);
315005
315335
  }
315006
- src_logger.silly('LEADER_ELECTION_LEASE_ACQUIRED_FAILED_RETRY', {
315007
- metadata: { retryIn: interval / 1000 },
315008
- });
315336
+ operator_src_logger.silly(`Failed to acquire the leader election lease; will retry in '${interval / 1000}' seconds.`);
315009
315337
  await setTimeout(() => acquireLease(namespace, cb), interval);
315010
315338
  }
315011
315339
  }
@@ -315034,7 +315362,7 @@ function processOperationPlan(item, op, handler) {
315034
315362
  }
315035
315363
  }
315036
315364
  catch (e) {
315037
- src_logger.error('TFWORKSPACE_PROCESSOR_PLAN_ERROR', {
315365
+ operator_src_logger.error('TFWORKSPACE_PROCESSOR_PLAN_ERROR', {
315038
315366
  metadata: { item, error: e, op },
315039
315367
  });
315040
315368
  throw e;
@@ -315085,7 +315413,7 @@ async function* doPlanPlainTextFormat(item, op, handler, action) {
315085
315413
  message: 'Planning process started',
315086
315414
  };
315087
315415
  const deps = await handler.resolveReferences();
315088
- src_logger.info('TFWORKSPACE_PROCESSOR_PLAN_ASSESS_DEPS', {
315416
+ operator_src_logger.info('TFWORKSPACE_PROCESSOR_PLAN_ASSESS_DEPS', {
315089
315417
  metadata: { item, deps },
315090
315418
  });
315091
315419
  const context = processOperationPlan_buildProvisionerContext(item, deps);
@@ -315121,7 +315449,7 @@ async function* doPlanPlainTextFormat(item, op, handler, action) {
315121
315449
  }
315122
315450
  catch (e) {
315123
315451
  await processOperationPlan_publishPlan(item, JSON.stringify(e));
315124
- src_logger.error('TFWORKSPACE_PROCESSOR_PLAN_OBSERVING_ERROR', {
315452
+ operator_src_logger.error('TFWORKSPACE_PROCESSOR_PLAN_OBSERVING_ERROR', {
315125
315453
  metadata: { item, error: e },
315126
315454
  });
315127
315455
  yield {
@@ -315179,7 +315507,7 @@ async function* processOperationPlan_doPlanJSONFormat(item, op, handler, action)
315179
315507
  message: 'Planning process started',
315180
315508
  };
315181
315509
  const deps = await handler.resolveReferences();
315182
- src_logger.info('TFWORKSPACE_PROCESSOR_PLAN_DO_PLAN_ASSESS_DEPS', {
315510
+ operator_src_logger.info('TFWORKSPACE_PROCESSOR_PLAN_DO_PLAN_ASSESS_DEPS', {
315183
315511
  metadata: { item, deps },
315184
315512
  });
315185
315513
  const context = processOperationPlan_buildProvisionerContext(item, deps);
@@ -315239,7 +315567,7 @@ async function* processOperationPlan_doPlanJSONFormat(item, op, handler, action)
315239
315567
  }
315240
315568
  catch (e) {
315241
315569
  console.error(e);
315242
- src_logger.error('TFWORKSPACE_PROCESSOR_PLAN_DO_PLAN_ERROR', {
315570
+ operator_src_logger.error('TFWORKSPACE_PROCESSOR_PLAN_DO_PLAN_ERROR', {
315243
315571
  metadata: { item, error: e },
315244
315572
  });
315245
315573
  yield {
@@ -315606,42 +315934,30 @@ async function ctx_buildContext(claim, namespace, command) {
315606
315934
  let cr = null;
315607
315935
  let deps = null;
315608
315936
  compute['resolveDeps'] = async () => {
315609
- src_logger.debug('TFWORKSPACE_RESOLVE_DEPS_FOR_CLAIM', {
315610
- metadata: { name: claim.name },
315611
- });
315937
+ operator_src_logger.debug(`The Terraform workspace is resolving dependencies for the claim '${claim.name}'.`);
315612
315938
  // First, we bring the previous CR, if any, to get the tfStateKey
315613
- src_logger.debug('TFWORKSPACE_RESOLVE_GET_PREVIOUS_CR', {
315614
- metadata: { name: claim.name },
315615
- });
315939
+ operator_src_logger.debug(`The Terraform workspace is resolving and getting the previous custom resource for claim '${claim.name}'.`);
315616
315940
  previousCR = await getCRfromClaimRef(claim.kind, claim.name, namespace);
315617
315941
  let tfStateKey = null;
315618
315942
  if (previousCR) {
315619
- src_logger.debug('TFWORKSPACE_RESOLVE_PREVIOUS_CR_FOUND', {
315620
- metadata: { name: claim.name },
315621
- });
315943
+ operator_src_logger.debug(`The Terraform workspace found a previous custom resource for claim '${claim.name}'.`);
315622
315944
  tfStateKey = previousCR.spec.firestartr.tfStateKey;
315623
315945
  }
315624
315946
  else
315625
- src_logger.debug('TFWORKSPACE_RESOLVE_PREVIOUS_CR_NOT_FOUND', {
315626
- metadata: { name: claim.name },
315627
- });
315947
+ operator_src_logger.debug(`The Terraform workspace did not find a previous custom resource for claim '${claim.name}'.`);
315628
315948
  // Then we render the claim passing a function to resolve the refs in the k8s API
315629
- src_logger.debug('TFWORKSPACE_RESOLVE_START_RENDERING', {
315630
- metadata: { name: claim.name },
315631
- });
315949
+ operator_src_logger.debug(`The Terraform workspace is starting the rendering process for claim '${claim.name}'.`);
315632
315950
  cr = await cdk8s_renderer.renderTfWorkspace(claim, tfStateKey, getTFWorkspaceRefs, namespace);
315633
315951
  cr['metadata']['namespace'] = namespace;
315634
- src_logger.debug('TFWORKSPACE_RESOLVE_CR_RENDERED', { metadata: { cr } });
315952
+ operator_src_logger.debug(`The Terraform workspace has finished rendering the custom resource '${cr.kind}/${cr.metadata.name}' in namespace '${cr.metadata.namespace}'.`);
315635
315953
  // Finally, we resolve the deps in the rendered CR
315636
315954
  deps = await resolve(cr, getItemByItemPath, getSecret, namespace);
315637
- src_logger.debug('TFWORKSPACE_RESOLVE_DEPS_RESOLVED', {
315638
- metadata: { name: claim.name },
315639
- });
315955
+ operator_src_logger.debug(`The Terraform workspace has finished resolving all dependencies for claim '${claim.name}'.`);
315640
315956
  };
315641
315957
  compute['dryRunExec'] = async () => {
315642
315958
  // We assume that if there is no previous CR, we are creating a new one
315643
315959
  // This will be preceeded by the resolveDeps function
315644
- src_logger.debug('TFWORKSPACE_DRY_RUN_VALIDATING_CR', { metadata: { cr } });
315960
+ operator_src_logger.debug(`The Terraform workspace is dry-running the validation for custom resource '${cr.kind}/${cr.metadata.name}' in namespace '${cr.metadata.namespace}'.`);
315645
315961
  if (!previousCR) {
315646
315962
  await createDryRun(cr, namespace);
315647
315963
  }
@@ -315649,17 +315965,15 @@ async function ctx_buildContext(claim, namespace, command) {
315649
315965
  cr.metadata.resourceVersion = previousCR.metadata.resourceVersion;
315650
315966
  await updateDryRun(cr, namespace);
315651
315967
  }
315652
- src_logger.debug('TFWORKSPACE_DRY_RUN_VALIDATED_CR', { metadata: { cr } });
315968
+ operator_src_logger.debug(`The Terraform workspace has finished validating the custom resource '${cr.kind}/${cr.metadata.name}' in namespace '${cr.metadata.namespace}'.`);
315653
315969
  };
315654
315970
  compute['runProvision'] = async () => {
315655
- src_logger.debug('TFWORKSPACE_RUN_PROVISION_INIT_TERRAFORM', {
315971
+ operator_src_logger.debug('TFWORKSPACE_RUN_PROVISION_INIT_TERRAFORM', {
315656
315972
  metadata: { cr, command },
315657
315973
  });
315658
315974
  const data = await buildProvisionerContext(cr, deps);
315659
315975
  const result = await runTerraformProvisioner(data, command);
315660
- src_logger.debug('TFWORKSPACE_RUN_PROVISION_FINISHED_TERRAFORM', {
315661
- metadata: { cr, command },
315662
- });
315976
+ operator_src_logger.debug(`The Terraform workspace has finished the '${command}' command for provisioning custom resource '${cr.kind}/${cr.metadata.name}' in namespace '${cr.metadata.namespace}'.`);
315663
315977
  return result;
315664
315978
  };
315665
315979
  return new Ctx({}, compute);
@@ -315916,7 +316230,7 @@ class CRStateMetrics {
315916
316230
  catch (err) {
315917
316231
  console.log(`CRStateMetrics: update ${err}`);
315918
316232
  this.onUpdate = false;
315919
- src_logger.error('CR_METRICS_UPDATE', { error: err });
316233
+ operator_src_logger.error('CR_METRICS_UPDATE', { error: err });
315920
316234
  }
315921
316235
  this.onUpdate = false;
315922
316236
  }
@@ -316010,7 +316324,7 @@ async function startCRStates(meter, kindList, namespace) {
316010
316324
 
316011
316325
  const deploymentName = catalog_common.environment.getFromEnvironment(catalog_common.types.envVars.operatorDeploymentName) || 'firestartr-firestartr-controller';
316012
316326
  const DEFAULT_OPERATOR_DEPLOY = (/* unused pure expression or super */ null && (deploymentName));
316013
- async function tfPlanner(claimFilePath, claim, namespace, debug, jobTtl, cmd = 'plan') {
316327
+ async function tfPlanner(claimFilePath, claim, namespace, debug, jobTtl = 300, cmd = 'plan') {
316014
316328
  const { kc } = await getConnection();
316015
316329
  const k8sApi = kc.makeApiClient(client.AppsV1Api);
316016
316330
  const batchV1Api = kc.makeApiClient(client.BatchV1Api);
@@ -316030,10 +316344,12 @@ async function tfPlanner(claimFilePath, claim, namespace, debug, jobTtl, cmd = '
316030
316344
  ? '/library/scripts/run.sh'
316031
316345
  : '/library/run.sh';
316032
316346
  job.spec = new client.V1JobSpec();
316033
- if (jobTtl)
316034
- job.spec.ttlSecondsAfterFinished = jobTtl;
316347
+ job.spec.ttlSecondsAfterFinished = jobTtl;
316035
316348
  job.spec.template = controllerDeploy.body.spec
316036
316349
  .template;
316350
+ // set activeDeadlineSeconds to force terminate jobs that exceed this time
316351
+ // see https://kubernetes.io/docs/concepts/workloads/controllers/job/#job-termination-and-cleanup
316352
+ job.spec.activeDeadlineSeconds = 3600;
316037
316353
  job.spec.template.spec.containers[0].command = [
316038
316354
  'sh',
316039
316355
  '-c',
@@ -316045,9 +316361,12 @@ async function tfPlanner(claimFilePath, claim, namespace, debug, jobTtl, cmd = '
316045
316361
  }
316046
316362
  job.spec.template.spec.restartPolicy = 'Never';
316047
316363
  job.metadata = metadata;
316364
+ // we exclude logs to be sent to datadog
316365
+ job.spec.template.metadata.annotations = {
316366
+ 'ad.datadoghq.com/logs_exclude': 'true',
316367
+ };
316048
316368
  await batchV1Api.createNamespacedJob(namespace, job);
316049
316369
  await copyClaimAndGetLogs(namespace, job.metadata.name, claimFilePath);
316050
- await batchV1Api.deleteNamespacedJob(job.metadata.name, namespace);
316051
316370
  }
316052
316371
  async function copyClaimAndGetLogs(namespace, jobName, sourcePath) {
316053
316372
  const { kc } = await getConnection();
@@ -316201,7 +316520,7 @@ function runOperator(opts) {
316201
316520
  importModeActive = importMode;
316202
316521
  if (importModeSkipPlan)
316203
316522
  importModeSkipPlanActive = importModeSkipPlan;
316204
- src_logger.info('START_OPERATOR', { ...opts });
316523
+ operator_src_logger.info(`started the operator with options ${JSON.stringify(opts)}`);
316205
316524
  const run = ignoreLease
316206
316525
  ? (_namespace, cb) => cb()
316207
316526
  : acquireLease;
@@ -316221,7 +316540,7 @@ function runOperator(opts) {
316221
316540
  .catch((e) => {
316222
316541
  console.log('exit catch kind', kind);
316223
316542
  console.error(e);
316224
- src_logger.error('CRASHED', { kind, error: e });
316543
+ operator_src_logger.error('CRASHED', { kind, error: e });
316225
316544
  })
316226
316545
  .finally(() => {
316227
316546
  console.log('kind', kind);
@@ -316253,7 +316572,7 @@ function getProvisionImplementation(plural) {
316253
316572
  }
316254
316573
  if (!implementation)
316255
316574
  throw new Error(`No implementation found for ${plural}`);
316256
- src_logger.info('GOT_PROVISION_IMPL', { kind: plural });
316575
+ operator_src_logger.info(`Retrieved the provision implementation for the kind '${plural}'`);
316257
316576
  return implementation;
316258
316577
  }
316259
316578