@firestartr/cli 1.47.0 → 1.48.0-hotfix-2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (28) hide show
  1. package/build/index.js +1332 -772
  2. package/build/packages/catalog_common/index.d.ts +8 -0
  3. package/build/packages/catalog_common/src/io/write.d.ts +2 -2
  4. package/build/packages/catalog_common/src/logger/index.d.ts +2 -0
  5. package/build/packages/catalog_common/src/logger/logger.d.ts +9 -0
  6. package/build/packages/catalog_common/src/logger/utils.d.ts +1 -0
  7. package/build/packages/catalog_common/src/types/envvars.d.ts +1 -0
  8. package/build/packages/cdk8s_renderer/src/validations/crSize.d.ts +2 -0
  9. package/build/packages/features_preparer/src/logger.d.ts +9 -0
  10. package/build/packages/features_renderer/index.d.ts +10 -1
  11. package/build/packages/features_renderer/src/auxiliar.d.ts +71 -0
  12. package/build/packages/features_renderer/src/render.d.ts +2 -0
  13. package/build/packages/github/index.d.ts +5 -0
  14. package/build/packages/github/src/check_run.d.ts +83 -0
  15. package/build/packages/github/src/logger.d.ts +9 -0
  16. package/build/packages/operator/src/logger.d.ts +2 -2
  17. package/build/packages/operator/src/user-feedback-ops/gh-checkrun.d.ts +5 -0
  18. package/build/packages/operator/src/user-feedback-ops/tf-checkrun.d.ts +5 -0
  19. package/build/packages/provisioner/src/cdktf.d.ts +3 -1
  20. package/build/packages/provisioner/src/logger.d.ts +9 -0
  21. package/build/packages/provisioner/src/resources/resource.d.ts +10 -0
  22. package/build/packages/provisioner/src/terraform.d.ts +7 -5
  23. package/build/packages/terraform_provisioner/index.d.ts +1 -1
  24. package/build/packages/terraform_provisioner/src/logger.d.ts +9 -0
  25. package/build/packages/terraform_provisioner/src/project_tf.d.ts +4 -0
  26. package/build/packages/terraform_provisioner/src/project_tf_remote.d.ts +4 -0
  27. package/build/packages/terraform_provisioner/src/utils.d.ts +8 -6
  28. package/package.json +1 -1
package/build/index.js CHANGED
@@ -288941,14 +288941,131 @@ var external_path_ = __nccwpck_require__(71017);
288941
288941
  var external_path_default = /*#__PURE__*/__nccwpck_require__.n(external_path_);
288942
288942
  // EXTERNAL MODULE: ../../node_modules/yaml/dist/index.js
288943
288943
  var yaml_dist = __nccwpck_require__(8447);
288944
- // EXTERNAL MODULE: ../../node_modules/debug/src/index.js
288945
- var src = __nccwpck_require__(67984);
288946
- var src_default = /*#__PURE__*/__nccwpck_require__.n(src);
288944
+ // EXTERNAL MODULE: ../../node_modules/winston/lib/winston.js
288945
+ var winston = __nccwpck_require__(66752);
288946
+ var winston_default = /*#__PURE__*/__nccwpck_require__.n(winston);
288947
+ ;// CONCATENATED MODULE: ../catalog_common/src/logger/utils.ts
288948
+ // https://siderite.dev/blog/jsonstringify-with-circular-references.html/#at2011170946
288949
+ function fixCircularReferences(o) {
288950
+ const weirdTypes = [
288951
+ Int8Array,
288952
+ Uint8Array,
288953
+ Uint8ClampedArray,
288954
+ Int16Array,
288955
+ Uint16Array,
288956
+ Int32Array,
288957
+ Uint32Array,
288958
+ BigInt64Array,
288959
+ BigUint64Array,
288960
+ Float32Array,
288961
+ Float64Array,
288962
+ ArrayBuffer,
288963
+ SharedArrayBuffer,
288964
+ DataView,
288965
+ ];
288966
+ const defs = new Map();
288967
+ return (k, v) => {
288968
+ if (k && v === o) {
288969
+ return `[${String(k)} is the same as original object]`;
288970
+ }
288971
+ if (v === undefined || v === null) {
288972
+ return v;
288973
+ }
288974
+ // Check for the Timeout constructor. This will also catch TimersList indirectly
288975
+ // since TimersList is part of the circular structure *of* a Timeout object.
288976
+ if (v && v.constructor && v.constructor.name === 'Timeout') {
288977
+ return '[Node.js internal timer object]';
288978
+ }
288979
+ // An alternative check could be `v instanceof Timeout` but the constructor name
288980
+ // check is more reliable for these internal types.
288981
+ const weirdType = weirdTypes.find((t) => v instanceof t);
288982
+ if (weirdType) {
288983
+ return weirdType.toString();
288984
+ }
288985
+ if (typeof v === 'function') {
288986
+ return v.toString();
288987
+ }
288988
+ if (v && typeof v === 'object') {
288989
+ const def = defs.get(v);
288990
+ if (def) {
288991
+ return `[${String(k)} is the same as ${def}]`;
288992
+ }
288993
+ defs.set(v, String(k));
288994
+ }
288995
+ return v;
288996
+ };
288997
+ }
288998
+
288999
+ ;// CONCATENATED MODULE: ../catalog_common/src/logger/logger.ts
289000
+
289001
+
289002
+ const validLogLevels = [
289003
+ 'error',
289004
+ 'warn',
289005
+ 'info',
289006
+ 'debug',
289007
+ 'verbose',
289008
+ 'silly',
289009
+ ];
289010
+ let initiated = false;
289011
+ let logger = null;
289012
+ // Type guard to check if a value is a valid LogLevel
289013
+ function isValidLogLevel(level) {
289014
+ return (typeof level === 'string' && validLogLevels.includes(level));
289015
+ }
289016
+ function initLogger() {
289017
+ if (initiated)
289018
+ return;
289019
+ const logLevel = process.env.LOG_LEVEL && isValidLogLevel(process.env.LOG_LEVEL)
289020
+ ? process.env.LOG_LEVEL
289021
+ : 'info';
289022
+ logger = winston_default().createLogger({
289023
+ level: logLevel,
289024
+ exitOnError: false,
289025
+ format: winston.format.combine(winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }), winston.format.json()),
289026
+ transports: [
289027
+ new winston.transports.Console({
289028
+ level: logLevel,
289029
+ }),
289030
+ ],
289031
+ });
289032
+ initiated = true;
289033
+ }
289034
+ function doLog(level, args) {
289035
+ initLogger();
289036
+ const [message, data] = args;
289037
+ let finalMessage = message;
289038
+ if (data) {
289039
+ const fx = fixCircularReferences(data.metadata);
289040
+ try {
289041
+ finalMessage =
289042
+ finalMessage + ' | ' + JSON.stringify(data?.metadata, fx, 2);
289043
+ }
289044
+ catch (err) {
289045
+ console.error(`Serializing ${message}: ${err}`);
289046
+ return;
289047
+ }
289048
+ }
289049
+ logger[level].apply(logger, [finalMessage]);
289050
+ }
289051
+ const logger_log = {
289052
+ error: (...args) => doLog('error', args),
289053
+ warn: (...args) => doLog('warn', args),
289054
+ info: (...args) => doLog('info', args),
289055
+ debug: (...args) => doLog('debug', args),
289056
+ verbose: (...args) => doLog('verbose', args),
289057
+ silly: (...args) => doLog('silly', args),
289058
+ };
289059
+ /* harmony default export */ const logger_logger = (logger_log);
289060
+
289061
+ ;// CONCATENATED MODULE: ../catalog_common/src/logger/index.ts
289062
+
289063
+ /* harmony default export */ const src_logger = (logger_logger);
289064
+
288947
289065
  ;// CONCATENATED MODULE: ../catalog_common/src/io/common.ts
288948
289066
 
288949
289067
 
288950
289068
 
288951
- const messageLog = src_default()('firestartr:catalog_common:io:common');
288952
289069
  const ComponentPaths = (/* unused pure expression or super */ null && ([
288953
289070
  'apiVersion',
288954
289071
  'kind',
@@ -289033,25 +289150,25 @@ function transformKind(kind) {
289033
289150
  }
289034
289151
  }
289035
289152
  function getPath(kind, name, catalogPath) {
289036
- messageLog(`Getting path for kind ${kind} and name ${name} in catalog path ${catalogPath}`);
289153
+ src_logger.debug(`Getting path for kind ${kind} and name ${name} in catalog path ${catalogPath}`);
289037
289154
  return external_path_.join(catalogPath, transformKind(kind), name + '.yaml');
289038
289155
  }
289039
289156
  function getKindPath(kind, catalogPath) {
289040
- messageLog(`Getting path for kind ${kind} in catalog path ${catalogPath}`);
289157
+ src_logger.debug(`Getting path for kind ${kind} in catalog path ${catalogPath}`);
289041
289158
  return external_path_.join(catalogPath, transformKind(kind));
289042
289159
  }
289043
289160
  function fromYaml(data) {
289044
289161
  const result = yaml_dist.parse(data);
289045
- messageLog('Loading YAML data: %O', result);
289162
+ src_logger.debug('Loading YAML data: %O', result);
289046
289163
  return result;
289047
289164
  }
289048
289165
  function toYaml(data, opts = {}) {
289049
- messageLog('opts', opts);
289166
+ src_logger.debug('opts', opts);
289050
289167
  const result = yaml_dist.stringify(data);
289051
289168
  return result;
289052
289169
  }
289053
289170
  function dumpYaml(data) {
289054
- messageLog('Dumping object data to YAML %O', data);
289171
+ src_logger.debug('Dumping object data to YAML %O', data);
289055
289172
  return yaml_dist.stringify(data);
289056
289173
  }
289057
289174
 
@@ -289059,7 +289176,6 @@ function dumpYaml(data) {
289059
289176
  var external_child_process_ = __nccwpck_require__(32081);
289060
289177
  ;// CONCATENATED MODULE: ../catalog_common/src/generic/random.ts
289061
289178
 
289062
- const random_messageLog = src_default()('firestartr:catalog_common:generic:random');
289063
289179
  function randomString(length = 10) {
289064
289180
  let result = '';
289065
289181
  const characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
@@ -289069,7 +289185,7 @@ function randomString(length = 10) {
289069
289185
  result += characters.charAt(Math.floor(Math.random() * charactersLength));
289070
289186
  counter += 1;
289071
289187
  }
289072
- random_messageLog('Generated random string %s', result);
289188
+ src_logger.debug(`Generated random string ${result}`);
289073
289189
  return result;
289074
289190
  }
289075
289191
  function shuffleArray(array) {
@@ -289104,17 +289220,16 @@ function shuffleObject(obj, shuffleArrays = false) {
289104
289220
 
289105
289221
 
289106
289222
 
289107
- const clone_catalog_messageLog = src_default()('firestartr:catalog_common:io:clone_catalog');
289108
289223
  function cloneCatalog(catalogPath, dest = _calculateRandomDestination()) {
289109
- clone_catalog_messageLog(`Cloning catalog from ${catalogPath} to ${dest}`);
289224
+ src_logger.info(`Cloning catalog from ${catalogPath} to ${dest}`);
289110
289225
  return new Promise((ok, ko) => {
289111
289226
  (0,external_child_process_.exec)(`cp -a ${catalogPath} ${dest}`, (error, _stdout, _stderr) => {
289112
289227
  if (error) {
289113
- clone_catalog_messageLog(`Error cloning catalog: ${error.message}`);
289228
+ src_logger.error(`Error cloning catalog: ${error.message}`);
289114
289229
  return ko(error.message);
289115
289230
  }
289116
289231
  else {
289117
- clone_catalog_messageLog(`Catalog cloned to successfully to ${dest}`);
289232
+ src_logger.info(`Catalog cloned to successfully to ${dest}`);
289118
289233
  return ok(dest);
289119
289234
  }
289120
289235
  });
@@ -289133,29 +289248,28 @@ var external_fs_default = /*#__PURE__*/__nccwpck_require__.n(external_fs_);
289133
289248
 
289134
289249
 
289135
289250
 
289136
- const write_messageLog = src_default()('firestartr:catalog_common:io:write');
289137
289251
  function writeEntity(entity, path) {
289138
289252
  try {
289139
289253
  entity['metadata']['annotations']['fire-starter.dev/timestamp'] =
289140
289254
  Math.floor(Date.now() / 1000).toString();
289141
289255
  //If we have an status, we remove it
289142
- write_messageLog(`Writing to catalog ${path} entity %O`, entity);
289256
+ src_logger.debug(`Writing to catalog ${path} entity ${entity}`);
289143
289257
  external_fs_.writeFileSync(getPath(entity['kind'], entity['metadata']['name'], path), dumpYaml(entity));
289144
289258
  }
289145
289259
  catch (err) {
289146
- write_messageLog('Error writing entity, error %O', err);
289260
+ src_logger.error(`Error writing entity '${entity.kind}', error ${err}`);
289147
289261
  throw `writeEntity: ${entity.kind} ${err}`;
289148
289262
  }
289149
289263
  }
289150
289264
  function writeClaim(claim, claimsPath) {
289151
289265
  try {
289152
289266
  const kindFolder = `${claim['kind']}s`.toLowerCase().replace('claim', '');
289153
- write_messageLog(`Writing to gitops ${claimsPath}/${kindFolder} claim %O`, claim);
289267
+ src_logger.debug(`Writing to gitops ${claimsPath}/${kindFolder} claim ${claim}`);
289154
289268
  external_fs_.mkdirSync(external_path_.join(claimsPath, kindFolder), { recursive: true });
289155
289269
  external_fs_.writeFileSync(getPathClaim(claim['kind'], claim['name'], claimsPath), dumpYaml(claim));
289156
289270
  }
289157
289271
  catch (err) {
289158
- write_messageLog('Error writing claim, error %O', err);
289272
+ src_logger.error(`Error writing claim, error ${err}`);
289159
289273
  throw `writeClaim: ${claim.kind} ${err}`;
289160
289274
  }
289161
289275
  }
@@ -289172,7 +289286,7 @@ function writeYamlFile(fileName, data, pathFile = '/tmp') {
289172
289286
  external_fs_.writeFileSync(external_path_.join(pathFile, fileName), dumpYaml(data));
289173
289287
  }
289174
289288
  catch (err) {
289175
- write_messageLog('Error writing yaml file, error %O', err);
289289
+ src_logger.error(`Error writing yaml file, error ${err}`);
289176
289290
  throw `writeYamlFile: ${fileName} ${err}`;
289177
289291
  }
289178
289292
  }
@@ -289181,23 +289295,23 @@ function getPathClaim(kind, name, claimsPath) {
289181
289295
  }
289182
289296
  function renameEntity(entity, catalogPath, oldname) {
289183
289297
  try {
289184
- write_messageLog('Renaming oldname %s in %O', oldname, entity);
289298
+ src_logger.debug(`Renaming oldname ${oldname} in ${entity}`);
289185
289299
  const oldPath = getPath(entity.kind, oldname, catalogPath);
289186
289300
  const newPath = getPath(entity.kind, entity.metadata.name, catalogPath);
289187
289301
  external_fs_.renameSync(oldPath, newPath);
289188
289302
  }
289189
289303
  catch (err) {
289190
- write_messageLog('Error writing entity, error %O', err);
289304
+ src_logger.error(`Error writing entity, error ${err}`);
289191
289305
  throw `renameEntity: ${entity.kind} ${err}`;
289192
289306
  }
289193
289307
  }
289194
289308
  function removeEntity(entity, catalogPath) {
289195
289309
  try {
289196
- write_messageLog(`Removing entity ${entity.kind}/${entity.metadata.name} in catalog ${catalogPath}`);
289310
+ src_logger.debug(`Removing entity ${entity.kind}/${entity.metadata.name} in catalog ${catalogPath}`);
289197
289311
  external_fs_.rmSync(getPath(entity.kind, entity.metadata.name, catalogPath));
289198
289312
  }
289199
289313
  catch (err) {
289200
- write_messageLog(`Error removing entity ${entity.kind}/${entity.metadata.name} in catalog ${catalogPath}: ${err}`);
289314
+ src_logger.error(`Error removing entity ${entity.kind}/${entity.metadata.name} in catalog ${catalogPath}: ${err}`);
289201
289315
  throw `removeEntity: ${entity.kind} ${err}`;
289202
289316
  }
289203
289317
  }
@@ -289213,21 +289327,21 @@ function moveFile(oldPath, newPath) {
289213
289327
  external_fs_.cpSync(oldPath, newPath);
289214
289328
  external_fs_.rmSync(oldPath);
289215
289329
  }
289216
- function writeFunctionLog(functionName, log) {
289330
+ function writeFunctionLog(functionName, logStream) {
289217
289331
  try {
289218
- external_fs_.writeFileSync(external_path_.join('/tmp', `${functionName}.${randomString(5)}.log`), log + '\n');
289332
+ external_fs_.writeFileSync(external_path_.join('/tmp', `${functionName}.${randomString(5)}.log`), logStream + '\n');
289219
289333
  }
289220
289334
  catch (err) {
289221
- write_messageLog('Error writing log, error %O', err);
289335
+ src_logger.error(`Error writing log, error ${err}`);
289222
289336
  throw `writeLog: ${functionName} ${err}`;
289223
289337
  }
289224
289338
  }
289225
- function writeLogFile(fileName, log) {
289339
+ function writeLogFile(fileName, logStream) {
289226
289340
  try {
289227
- external_fs_.appendFileSync(external_path_.join('/tmp', fileName + '.log'), log + '\n');
289341
+ external_fs_.appendFileSync(external_path_.join('/tmp', fileName + '.log'), logStream + '\n');
289228
289342
  }
289229
289343
  catch (err) {
289230
- write_messageLog('Error writing log, error %O', err);
289344
+ src_logger.error(`Error writing log, error ${err}`);
289231
289345
  throw `writeLog: ${fileName} ${err}`;
289232
289346
  }
289233
289347
  }
@@ -289237,7 +289351,6 @@ function writeLogFile(fileName, log) {
289237
289351
 
289238
289352
 
289239
289353
 
289240
- const read_messageLog = src_default()('firestartr:catalog_common:io:read');
289241
289354
  function readEntity(kind, name, catalogPaths) {
289242
289355
  try {
289243
289356
  if (typeof catalogPaths === 'string') {
@@ -289246,7 +289359,7 @@ function readEntity(kind, name, catalogPaths) {
289246
289359
  let data = false;
289247
289360
  for (const catalogPath of catalogPaths) {
289248
289361
  try {
289249
- read_messageLog(`Reading entity ${kind}/${name} from catalog ${catalogPath}`);
289362
+ src_logger.debug(`Reading entity ${kind}/${name} from catalog ${catalogPath}`);
289250
289363
  const entityPath = getPath(kind, name, catalogPath);
289251
289364
  if (external_fs_.existsSync(entityPath)) {
289252
289365
  if (data) {
@@ -289256,7 +289369,7 @@ function readEntity(kind, name, catalogPaths) {
289256
289369
  }
289257
289370
  }
289258
289371
  catch (err) {
289259
- read_messageLog('readEntity: cached error %s', err);
289372
+ src_logger.debug('readEntity: cached error %s', err);
289260
289373
  if (err === 'DUPLICATED') {
289261
289374
  throw `Error reading entity: Duplicated ${kind}/${name} in ${catalogPaths.join(', ')}`;
289262
289375
  }
@@ -289268,7 +289381,7 @@ function readEntity(kind, name, catalogPaths) {
289268
289381
  return fromYaml(data);
289269
289382
  }
289270
289383
  catch (err) {
289271
- read_messageLog(err);
289384
+ src_logger.error(err);
289272
289385
  throw `readEntity->: ${kind}/${name}: ${err}`;
289273
289386
  }
289274
289387
  }
@@ -289276,13 +289389,13 @@ function listByKind(kind, catalogPaths, callback, exclude = []) {
289276
289389
  if (typeof catalogPaths === 'string') {
289277
289390
  catalogPaths = [catalogPaths];
289278
289391
  }
289279
- read_messageLog('CATALOGS_PATHS_ %O', catalogPaths);
289392
+ src_logger.debug(`CATALOGS_PATHS_ ${catalogPaths}`);
289280
289393
  const list = [];
289281
289394
  catalogPaths.forEach((catalogPath) => {
289282
289395
  list.push(...external_fs_.readdirSync(getKindPath(kind, catalogPath)));
289283
289396
  });
289284
- read_messageLog('LIST_ %O', list);
289285
- read_messageLog(`Listing entities of kind ${kind} from catalogs`);
289397
+ src_logger.debug(`LIST_ ${list}`);
289398
+ src_logger.debug(`Listing entities of kind ${kind} from catalogs`);
289286
289399
  return list
289287
289400
  .filter((file) => file.match(/\.yaml$/))
289288
289401
  .filter((file) => exclude.indexOf(file.replace(/\.yaml/, '')) === -1)
@@ -290213,6 +290326,9 @@ class CsvWriter {
290213
290326
  }
290214
290327
  /* harmony default export */ const csv_generator = (CsvWriter);
290215
290328
 
290329
+ // EXTERNAL MODULE: ../../node_modules/debug/src/index.js
290330
+ var src = __nccwpck_require__(67984);
290331
+ var src_default = /*#__PURE__*/__nccwpck_require__.n(src);
290216
290332
  ;// CONCATENATED MODULE: ../catalog_common/src/generic/logger.ts
290217
290333
 
290218
290334
 
@@ -290275,9 +290391,8 @@ var lodash_default = /*#__PURE__*/__nccwpck_require__.n(lodash);
290275
290391
 
290276
290392
 
290277
290393
  const { camelCase } = (lodash_default());
290278
- const name_log = src_default()('firestartr:catalog_common:generic:name');
290279
290394
  function normalizeName(name) {
290280
- name_log('Normalizing name %s', name);
290395
+ src_logger.debug(`Normalizing name ${name}`);
290281
290396
  return name.replace(/[^a-z0-9]/gi, '-').toLowerCase();
290282
290397
  }
290283
290398
  function transformKeysToCamelCase(obj) {
@@ -290452,6 +290567,7 @@ var envVars;
290452
290567
  envVars["githubAppInstallationIdPrefapp"] = "GITHUB_APP_INSTALLATION_ID_PREFAPP";
290453
290568
  envVars["githubAppPemFile"] = "GITHUB_APP_PEM_FILE";
290454
290569
  // ---- PREFAPP BOT VARIABLES -----------------------------------------------
290570
+ envVars["avoidPAT"] = "AVOID_PAT";
290455
290571
  envVars["githubAppPatPrefapp"] = "PREFAPP_BOT_PAT";
290456
290572
  // ---- GENERAL CLI VARIABLES -----------------------------------------------
290457
290573
  envVars["firestartrImageKind"] = "FIRESTARTR_IMAGE_KIND";
@@ -290618,7 +290734,6 @@ const ExternalSecretsApiGroup = 'external-secrets.io';
290618
290734
 
290619
290735
  ;// CONCATENATED MODULE: ../catalog_common/src/environment/index.ts
290620
290736
 
290621
- const environment_messageLog = src_default()('firestartr:catalog_common:environment');
290622
290737
  function getFromEnvironment(envVar) {
290623
290738
  return process.env[envVar];
290624
290739
  }
@@ -290636,7 +290751,7 @@ function getFromEnvironmentAsBoolean(envVar) {
290636
290751
  }
290637
290752
  function checkExistOnEnvironment(envVar) {
290638
290753
  const environmentValue = getFromEnvironment(envVar);
290639
- environment_messageLog(`Checking if environment variable ${envVar} exists: ${environmentValue}`);
290754
+ src_logger.debug(`Checking if environment variable ${envVar} exists: ${environmentValue}`);
290640
290755
  if (!environmentValue || environmentValue === '') {
290641
290756
  return false;
290642
290757
  }
@@ -290682,30 +290797,29 @@ const fullMembersTeam = getFromEnvironmentWithDefault(envVars.fullOrgGroup, `${o
290682
290797
  ;// CONCATENATED MODULE: ../catalog_common/src/features/tarballs.ts
290683
290798
 
290684
290799
 
290685
- const tarballs_messageLog = src_default()('firestartr:catalog_common:features:tarballs');
290686
290800
  function getFeatureZipDownloadPath(featureName, version, owner, repo) {
290687
290801
  const featureDownloadPath = `/tmp/${basicFeaturePath(featureName, version, owner, repo)}-zipball.zip`;
290688
- tarballs_messageLog('Feature tarball download path %s', featureDownloadPath);
290802
+ src_logger.debug(`Feature tarball download path ${featureDownloadPath}`);
290689
290803
  return featureDownloadPath;
290690
290804
  }
290691
290805
  function removeFeatureTarball(featureName, version, owner, repo) {
290692
290806
  const featurePath = getFeatureZipDownloadPath(featureName, version, owner, repo);
290693
- tarballs_messageLog('Removing feature tarball %s', featurePath);
290807
+ src_logger.debug(`Removing feature tarball ${featurePath}`);
290694
290808
  external_fs_.unlinkSync(featurePath);
290695
- tarballs_messageLog(`Removed tarball for feature ${featureName} and version ${version}: ${featurePath}`);
290809
+ src_logger.debug(`Removed tarball for feature ${featureName} and version ${version}: ${featurePath}`);
290696
290810
  }
290697
290811
  function featureTarballExists(featureName, version, owner, repo) {
290698
290812
  const featurePath = getFeatureZipDownloadPath(featureName, version, owner, repo);
290699
290813
  const exists = external_fs_.existsSync(featurePath);
290700
- tarballs_messageLog(`Tarball ${featurePath} exists? ${exists}`);
290814
+ src_logger.debug(`Tarball ${featurePath} exists? ${exists}`);
290701
290815
  return exists;
290702
290816
  }
290703
290817
  function getFeaturesExtractPath(featureName, version, owner, repo, options = {}) {
290704
290818
  const { createIfNotExists } = options;
290705
290819
  const extractPath = `/tmp/${basicFeaturePath(featureName, version, owner, repo)}-extract`;
290706
- tarballs_messageLog('Extract path %s', extractPath);
290820
+ src_logger.debug(`Extract path ${extractPath}`);
290707
290821
  if (createIfNotExists && !external_fs_.existsSync(extractPath)) {
290708
- tarballs_messageLog('Extract path %s does not exist, creating', extractPath);
290822
+ src_logger.debug(`Extract path ${extractPath} does not exist, creating`);
290709
290823
  external_fs_.mkdirSync(extractPath, { recursive: true });
290710
290824
  }
290711
290825
  return extractPath;
@@ -290722,17 +290836,16 @@ function trasformLeg(leg) {
290722
290836
 
290723
290837
 
290724
290838
 
290725
- const features_io_messageLog = src_default()('firestartr:catalog_common:features:features_io');
290726
290839
  function getFeatureRenderedPathForEntity(entity, featureName, basePath = '/tmp') {
290727
290840
  const entityFolderName = `${entity.metadata.name}`.toLowerCase();
290728
290841
  return external_path_default().join(basePath, entityFolderName, featureName);
290729
290842
  }
290730
290843
  function getFeatureRenderedConfigForComponent(entity, featureName, basePath = '/tmp/features') {
290731
- features_io_messageLog('Getting rendered config for component %s and feature %s', entity.name, featureName);
290844
+ src_logger.info(`Getting rendered config for component ${entity.name}and feature ${featureName}`);
290732
290845
  const workdir = getFeatureRenderedPathForEntity(entity, featureName, basePath);
290733
290846
  const config = JSON.parse(external_fs_.readFileSync(`${workdir}/output.json`, { encoding: 'utf8' }));
290734
- features_io_messageLog('Feature output: %O', config);
290735
- features_io_messageLog(`Rendered feature ${featureName} for component ${entity.name}. Result: ${(JSON.stringify, config)}`);
290847
+ src_logger.debug(`Feature output: ${config}`);
290848
+ src_logger.debug(`Rendered feature ${featureName} for component ${entity.name}. Result: ${(JSON.stringify, config)}`);
290736
290849
  return config;
290737
290850
  }
290738
290851
 
@@ -290746,7 +290859,6 @@ function getFeatureRenderedConfigForComponent(entity, featureName, basePath = '/
290746
290859
 
290747
290860
  ;// CONCATENATED MODULE: ../catalog_common/src/policies/policies.ts
290748
290861
 
290749
- const policies_log = src_default()('firestartr:catalog_common:policies');
290750
290862
  const FIRESTARTR_POLICIES = [
290751
290863
  {
290752
290864
  name: 'full-control',
@@ -290785,17 +290897,17 @@ function getPolicyByName(policyName) {
290785
290897
  return FIRESTARTR_POLICIES.find((p) => p.name === policyName || p.aliases.includes(policyName));
290786
290898
  }
290787
290899
  function policiesAreCompatible(syncPolicy, generalPolicy) {
290788
- policies_log('Validating policy compatibility: %s %s', syncPolicy, generalPolicy);
290900
+ src_logger.debug('Validating policy compatibility: %s %s', syncPolicy, generalPolicy);
290789
290901
  const syncPolicyWeight = getPolicyByName(syncPolicy)?.weight;
290790
290902
  const generalPolicyWeight = getPolicyByName(generalPolicy)?.weight;
290791
290903
  if (!syncPolicyWeight || !generalPolicyWeight) {
290792
290904
  throw new Error(`Policy ${syncPolicy} or ${generalPolicy} not found`);
290793
290905
  }
290794
290906
  if (generalPolicyWeight >= syncPolicyWeight) {
290795
- policies_log('Policies %s %s are compatible', syncPolicy, generalPolicy);
290907
+ src_logger.debug('Policies %s %s are compatible', syncPolicy, generalPolicy);
290796
290908
  return true;
290797
290909
  }
290798
- policies_log('Policies %s %s are not compatible', syncPolicy, generalPolicy);
290910
+ src_logger.debug('Policies %s %s are not compatible', syncPolicy, generalPolicy);
290799
290911
  return false;
290800
290912
  }
290801
290913
 
@@ -290815,6 +290927,7 @@ function policiesAreCompatible(syncPolicy, generalPolicy) {
290815
290927
 
290816
290928
 
290817
290929
 
290930
+
290818
290931
  /* harmony default export */ const catalog_common = ({
290819
290932
  io: io,
290820
290933
  generic: generic,
@@ -290823,6 +290936,7 @@ function policiesAreCompatible(syncPolicy, generalPolicy) {
290823
290936
  defaults: defaults,
290824
290937
  features: features,
290825
290938
  policies: policies,
290939
+ logger: logger_logger,
290826
290940
  });
290827
290941
 
290828
290942
  ;// CONCATENATED MODULE: ../../node_modules/universal-user-agent/index.js
@@ -297310,12 +297424,17 @@ async function checkIfInstalledForOrg(org = 'default') {
297310
297424
  return installation !== undefined;
297311
297425
  }
297312
297426
 
297427
+ ;// CONCATENATED MODULE: ../github/src/logger.ts
297428
+
297429
+ /* harmony default export */ const github_src_logger = (catalog_common.logger);
297430
+
297313
297431
  ;// CONCATENATED MODULE: ../github/src/auth.ts
297314
297432
 
297315
297433
 
297316
297434
 
297317
297435
 
297318
297436
 
297437
+
297319
297438
  const generateGithubAppToken = async (config) => {
297320
297439
  try {
297321
297440
  const { appId, privateKey, installationOrgId } = config;
@@ -297349,8 +297468,11 @@ async function getOctokitForOrg(org, paginated = false, genGithubAppToken = gene
297349
297468
  if (org === '') {
297350
297469
  throw 'getOctokitForOrg: "org" has to be passed';
297351
297470
  }
297352
- if (org === 'prefapp')
297471
+ if (org === 'prefapp' && !process.env[catalog_common.types.envVars.avoidPAT]) {
297472
+ github_src_logger.info("Using Prefapp's PAT token");
297353
297473
  return getOctokitFromPat(catalog_common.types.envVars.githubAppPatPrefapp);
297474
+ }
297475
+ github_src_logger.info('Using Github APP token');
297354
297476
  const auth = await genGithubAppToken({
297355
297477
  appId: catalog_common.environment.getFromEnvironment(catalog_common.types.envVars.githubAppId),
297356
297478
  privateKey: catalog_common.environment.getFromEnvironment(catalog_common.types.envVars.githubAppPemFile),
@@ -297372,10 +297494,9 @@ async function getOctokitFromPat(envVar) {
297372
297494
  ;// CONCATENATED MODULE: ../github/src/organization.ts
297373
297495
 
297374
297496
 
297375
- const organization_messageLog = src_default()('firestartr:github:organization');
297376
297497
  const defaultPerPage = 100;
297377
297498
  async function getRepositoryList(org, perPageEntries = defaultPerPage) {
297378
- organization_messageLog(`Getting repository list for ${org} with ${perPageEntries} entries per page`);
297499
+ github_src_logger.info(`Getting repository list for ${org} with ${perPageEntries} entries per page`);
297379
297500
  const octokit = await getOctokitForOrg(org);
297380
297501
  const options = octokit.repos.listForOrg.endpoint.merge({
297381
297502
  org: org,
@@ -297385,7 +297506,7 @@ async function getRepositoryList(org, perPageEntries = defaultPerPage) {
297385
297506
  return await doPaginatedRequest(options);
297386
297507
  }
297387
297508
  async function getTeamList(org, perPageEntries = defaultPerPage) {
297388
- organization_messageLog(`Getting team list for ${org} with ${perPageEntries} entries per page`);
297509
+ github_src_logger.info(`Getting team list for ${org} with ${perPageEntries} entries per page`);
297389
297510
  const octokit = await getOctokitForOrg(org);
297390
297511
  const options = octokit.rest.teams.list.endpoint.merge({
297391
297512
  org: org,
@@ -297394,7 +297515,7 @@ async function getTeamList(org, perPageEntries = defaultPerPage) {
297394
297515
  return await doPaginatedRequest(options);
297395
297516
  }
297396
297517
  async function getUserList(org, perPageEntries = defaultPerPage) {
297397
- organization_messageLog(`Getting user list for ${org} with ${perPageEntries} entries per page`);
297518
+ github_src_logger.info(`Getting user list for ${org} with ${perPageEntries} entries per page`);
297398
297519
  const octokit = await getOctokitForOrg(org);
297399
297520
  const options = await octokit.rest.orgs.listMembers.endpoint.merge({
297400
297521
  org: org,
@@ -297403,7 +297524,7 @@ async function getUserList(org, perPageEntries = defaultPerPage) {
297403
297524
  return await doPaginatedRequest(options);
297404
297525
  }
297405
297526
  async function validateMember(username, org) {
297406
- organization_messageLog(`Validating ${username} is a member of ${org}`);
297527
+ github_src_logger.debug(`Validating ${username} is a member of ${org}`);
297407
297528
  const octokit = await getOctokitForOrg(org);
297408
297529
  const result = await octokit.orgs.checkMembershipForUser({
297409
297530
  org: org,
@@ -297412,7 +297533,7 @@ async function validateMember(username, org) {
297412
297533
  return result;
297413
297534
  }
297414
297535
  async function getUserRoleInOrg(username, org) {
297415
- organization_messageLog(`Getting user ${username} role in ${org}`);
297536
+ github_src_logger.info(`Getting user ${username} role in ${org}`);
297416
297537
  const octokit = await getOctokitForOrg(org);
297417
297538
  const membership = await octokit.orgs.getMembershipForUser({
297418
297539
  org: org,
@@ -297421,13 +297542,13 @@ async function getUserRoleInOrg(username, org) {
297421
297542
  return membership.data.role;
297422
297543
  }
297423
297544
  async function getOrgInfo(org) {
297424
- organization_messageLog(`Getting info for org ${org}`);
297545
+ github_src_logger.info(`Getting info for org ${org}`);
297425
297546
  const octokit = await getOctokitForOrg(org);
297426
297547
  const orgInfo = await octokit.orgs.get({ org });
297427
297548
  return orgInfo.data;
297428
297549
  }
297429
297550
  async function getOrgPlanName(org) {
297430
- organization_messageLog(`Getting plan for org ${org}`);
297551
+ github_src_logger.info(`Getting plan for org ${org}`);
297431
297552
  const orgInfo = await getOrgInfo(org);
297432
297553
  return orgInfo.plan.name;
297433
297554
  }
@@ -297451,9 +297572,8 @@ async function doPaginatedRequest(options) {
297451
297572
 
297452
297573
 
297453
297574
 
297454
- const repository_messageLog = src_default()('firestartr:github:repository');
297455
297575
  async function listReleases(repo, owner = 'prefapp') {
297456
- repository_messageLog(`Getting releases for ${owner}/${repo}`);
297576
+ github_src_logger.info(`Getting releases for ${owner}/${repo}`);
297457
297577
  const octokit = await getOctokitForOrg(owner);
297458
297578
  const response = await octokit.rest.repos.listReleases({
297459
297579
  owner,
@@ -297464,7 +297584,7 @@ async function listReleases(repo, owner = 'prefapp') {
297464
297584
  return response.data;
297465
297585
  }
297466
297586
  async function getReleaseByTag(releaseTag, repo, owner = 'prefapp') {
297467
- repository_messageLog(`Getting release ${releaseTag} for ${owner}/${repo}`);
297587
+ github_src_logger.info(`Getting release ${releaseTag} for ${owner}/${repo}`);
297468
297588
  const octokit = await getOctokitForOrg(owner);
297469
297589
  const response = await octokit.rest.repos.getReleaseByTag({
297470
297590
  owner,
@@ -297479,7 +297599,7 @@ async function getFileFromGithub(path, repo, owner = 'prefapp') {
297479
297599
  return await octokit.rest.repos.getContent({ owner, repo, path });
297480
297600
  }
297481
297601
  async function getContent(path, repo, owner = 'prefapp', ref = '') {
297482
- repository_messageLog(`Getting content for ${owner}/${repo}/${path}`);
297602
+ github_src_logger.info(`Getting content for ${owner}/${repo}/${path}`);
297483
297603
  const octokit = await getOctokitForOrg(owner);
297484
297604
  const opts = {
297485
297605
  owner,
@@ -297493,19 +297613,19 @@ async function getContent(path, repo, owner = 'prefapp', ref = '') {
297493
297613
  return Buffer.from(content.data.content, 'base64').toString('utf8');
297494
297614
  }
297495
297615
  async function getRepoInfo(owner, name) {
297496
- repository_messageLog(`Getting repo info for ${owner}/${name}`);
297616
+ github_src_logger.info(`Getting repo info for ${owner}/${name}`);
297497
297617
  const octokit = await getOctokitForOrg(owner);
297498
297618
  const res = await octokit.repos.get({ owner: owner, repo: name });
297499
297619
  return res['data'];
297500
297620
  }
297501
297621
  async function getPages(owner, name) {
297502
- repository_messageLog(`Getting pages for ${owner}/${name}`);
297622
+ github_src_logger.info(`Getting pages for ${owner}/${name}`);
297503
297623
  const octokit = await getOctokitForOrg(owner);
297504
297624
  const res = await octokit.repos.getPages({ owner: owner, repo: name });
297505
297625
  return res['data'];
297506
297626
  }
297507
297627
  async function getOIDCRepo(owner, name) {
297508
- repository_messageLog(`Getting repo info for ${owner}/${name}`);
297628
+ github_src_logger.info(`Getting repo info for ${owner}/${name}`);
297509
297629
  const octokit = await getOctokitForOrg(owner);
297510
297630
  return await octokit.request(`GET /repos/${owner}/${name}/actions/oidc/customization/sub`, {
297511
297631
  owner: owner,
@@ -297516,7 +297636,7 @@ async function getOIDCRepo(owner, name) {
297516
297636
  });
297517
297637
  }
297518
297638
  async function getBranchProtection(owner, repo, branch = 'main') {
297519
- repository_messageLog(`Getting branch protection for ${owner}/${repo}/${branch}`);
297639
+ github_src_logger.info(`Getting branch protection for ${owner}/${repo}/${branch}`);
297520
297640
  const octokit = await getOctokitForOrg(owner);
297521
297641
  const res = await octokit.repos.getBranchProtection({
297522
297642
  owner: owner,
@@ -297526,13 +297646,13 @@ async function getBranchProtection(owner, repo, branch = 'main') {
297526
297646
  return res['data'];
297527
297647
  }
297528
297648
  async function getTeams(owner, repo) {
297529
- repository_messageLog(`Getting teams for ${owner}/${repo}`);
297649
+ github_src_logger.info(`Getting teams for ${owner}/${repo}`);
297530
297650
  const octokit = await getOctokitForOrg(owner);
297531
297651
  const res = await octokit.repos.listTeams({ owner: owner, repo: repo });
297532
297652
  return res['data'];
297533
297653
  }
297534
297654
  async function getCollaborators(owner, repo, affiliation = 'direct') {
297535
- repository_messageLog(`Getting collaborators for ${owner}/${repo}`);
297655
+ github_src_logger.info(`Getting collaborators for ${owner}/${repo}`);
297536
297656
  const octokit = await getOctokitForOrg(owner);
297537
297657
  const res = await octokit.repos.listCollaborators({
297538
297658
  owner: owner,
@@ -297543,7 +297663,7 @@ async function getCollaborators(owner, repo, affiliation = 'direct') {
297543
297663
  }
297544
297664
  async function setContent(path, fileContent, repo, owner = 'prefapp', branch = 'main', message = '') {
297545
297665
  const base64Content = Buffer.from(fileContent, 'utf8').toString('base64');
297546
- repository_messageLog(`Setting content for ${owner}/${repo}/${path}`);
297666
+ github_src_logger.info(`Setting content for ${owner}/${repo}/${path}`);
297547
297667
  if (message === '') {
297548
297668
  message = `Update ${path}`;
297549
297669
  }
@@ -297551,10 +297671,10 @@ async function setContent(path, fileContent, repo, owner = 'prefapp', branch = '
297551
297671
  try {
297552
297672
  const currentContent = await getFileFromGithub(path, repo, owner);
297553
297673
  sha = currentContent.data.sha;
297554
- repository_messageLog('File already exists, updating it');
297674
+ github_src_logger.debug('File already exists, updating it');
297555
297675
  }
297556
297676
  catch {
297557
- repository_messageLog('File does not exists, creating it');
297677
+ github_src_logger.debug('File does not exist, creating it');
297558
297678
  }
297559
297679
  const octokit = await getOctokitForOrg(owner);
297560
297680
  await octokit.rest.repos.createOrUpdateFileContents({
@@ -297569,7 +297689,7 @@ async function setContent(path, fileContent, repo, owner = 'prefapp', branch = '
297569
297689
  }
297570
297690
  async function uploadFile(destinationPath, filePath, repo, owner = 'prefapp', branch = 'main', message = '') {
297571
297691
  if (!external_fs_.existsSync(filePath)) {
297572
- repository_messageLog(`File ${filePath} does not exists or is not readable`);
297692
+ github_src_logger.error(`File ${filePath} does not exists or is not readable`);
297573
297693
  throw `${filePath} does not exists or is not readable`;
297574
297694
  }
297575
297695
  // Read file contents and call setContent
@@ -297578,16 +297698,16 @@ async function uploadFile(destinationPath, filePath, repo, owner = 'prefapp', br
297578
297698
  }
297579
297699
  async function deleteFile(path, repo, owner = 'prefapp', branch = 'main', message = '') {
297580
297700
  let sha = undefined;
297581
- repository_messageLog(`Deleting file ${owner}/${repo}/${path}`);
297701
+ github_src_logger.info(`Deleting file ${owner}/${repo}/${path}`);
297582
297702
  try {
297583
297703
  const currentContent = await getFileFromGithub(path, repo, owner);
297584
297704
  sha = currentContent.data.sha;
297585
297705
  }
297586
297706
  catch {
297587
- repository_messageLog(`File ${path} does not exist in ${repo}`);
297707
+ github_src_logger.error(`File ${path} does not exist in ${repo}`);
297588
297708
  }
297589
297709
  if (!sha) {
297590
- repository_messageLog(`File ${path} does not exist in ${repo}`);
297710
+ github_src_logger.error(`File ${path} does not exist in ${repo}`);
297591
297711
  throw `File ${path} does not exist in ${repo}`;
297592
297712
  }
297593
297713
  if (message === '') {
@@ -297604,7 +297724,7 @@ async function deleteFile(path, repo, owner = 'prefapp', branch = 'main', messag
297604
297724
  });
297605
297725
  }
297606
297726
  async function addStatusCheck(output, is_failure, head_sha, name, status, repo, owner = 'prefapp') {
297607
- repository_messageLog(`Adding status checks to commit ${head_sha} in ${owner}/${repo}`);
297727
+ github_src_logger.info(`Adding status checks to commit ${head_sha} in ${owner}/${repo}`);
297608
297728
  const octokit = await getOctokitForOrg(owner);
297609
297729
  const payload = { output, head_sha, name, owner, repo, status };
297610
297730
  if (status === 'completed') {
@@ -297613,7 +297733,7 @@ async function addStatusCheck(output, is_failure, head_sha, name, status, repo,
297613
297733
  await octokit.rest.checks.create(payload);
297614
297734
  }
297615
297735
  async function addCommitStatus(state, sha, repo, owner = 'prefapp', target_url = '', description = '', context = '') {
297616
- repository_messageLog(`Adding commit status with state ${state} to SHA ${sha} in ${owner}/${repo}`);
297736
+ github_src_logger.info(`Adding commit status with state ${state} to SHA ${sha} in ${owner}/${repo}`);
297617
297737
  const octokit = await getOctokitForOrg(owner);
297618
297738
  await octokit.rest.repos.createCommitStatus({
297619
297739
  owner,
@@ -297645,9 +297765,8 @@ async function addCommitStatus(state, sha, repo, owner = 'prefapp', target_url =
297645
297765
  ;// CONCATENATED MODULE: ../github/src/team.ts
297646
297766
 
297647
297767
 
297648
- const team_messageLog = src_default()('firestartr:github:team');
297649
297768
  async function getTeamMembers(team, org) {
297650
- team_messageLog(`Getting members for ${org}/${team}`);
297769
+ github_src_logger.info(`Getting members for ${org}/${team}`);
297651
297770
  const octokit = await getOctokitForOrg(org);
297652
297771
  const res = await octokit.rest.teams.listMembersInOrg({
297653
297772
  org: org,
@@ -297656,13 +297775,13 @@ async function getTeamMembers(team, org) {
297656
297775
  return res['data'];
297657
297776
  }
297658
297777
  async function getTeamInfo(team, org) {
297659
- team_messageLog(`Getting info for ${org}/${team}`);
297778
+ github_src_logger.info(`Getting info for ${org}/${team}`);
297660
297779
  const octokit = await getOctokitForOrg(org);
297661
297780
  const res = await octokit.rest.teams.getByName({ org: org, team_slug: team });
297662
297781
  return res['data'];
297663
297782
  }
297664
297783
  async function getTeamRoleUser(org, team, username) {
297665
- team_messageLog(`Getting role for ${username} in ${org}/${team}`);
297784
+ github_src_logger.info(`Getting role for ${username} in ${org}/${team}`);
297666
297785
  const octokit = await getOctokitForOrg(org);
297667
297786
  const res = await octokit.rest.teams.getMembershipForUserInOrg({
297668
297787
  org: org,
@@ -297672,7 +297791,7 @@ async function getTeamRoleUser(org, team, username) {
297672
297791
  return res['data'];
297673
297792
  }
297674
297793
  async function create(org, team, privacy = 'closed') {
297675
- team_messageLog(`Creating team ${org}/${team}`);
297794
+ github_src_logger.info(`Creating team ${org}/${team}`);
297676
297795
  const octokit = await getOctokitForOrg(org);
297677
297796
  return await octokit.rest.teams.create({
297678
297797
  org: org,
@@ -297681,7 +297800,7 @@ async function create(org, team, privacy = 'closed') {
297681
297800
  });
297682
297801
  }
297683
297802
  async function addOrUpdateMember(org, team, username, role = 'member') {
297684
- team_messageLog(`Adding or updating ${username} in ${org}/${team} with role ${role}`);
297803
+ github_src_logger.info(`Adding or updating ${username} in ${org}/${team} with role ${role}`);
297685
297804
  const octokit = await getOctokitForOrg(org);
297686
297805
  return await octokit.rest.teams.addOrUpdateMembershipForUserInOrg({
297687
297806
  org: org,
@@ -297691,7 +297810,7 @@ async function addOrUpdateMember(org, team, username, role = 'member') {
297691
297810
  });
297692
297811
  }
297693
297812
  async function removeMember(org, team, username) {
297694
- team_messageLog(`Removing ${username} from ${org}/${team}`);
297813
+ github_src_logger.info(`Removing ${username} from ${org}/${team}`);
297695
297814
  const octokit = await getOctokitForOrg(org);
297696
297815
  return await octokit.teams.removeMembershipForUserInOrg({
297697
297816
  org: org,
@@ -297711,9 +297830,8 @@ async function removeMember(org, team, username) {
297711
297830
  ;// CONCATENATED MODULE: ../github/src/user.ts
297712
297831
 
297713
297832
 
297714
- const user_messageLog = src_default()('firestartr:github:user');
297715
297833
  async function getUserInfo(name) {
297716
- user_messageLog(`Getting user ${name} info`);
297834
+ github_src_logger.info(`Getting user ${name} info`);
297717
297835
  const octokit = await getOctokitForOrg(name);
297718
297836
  return await octokit.users.getByUsername({ username: name });
297719
297837
  }
@@ -297724,11 +297842,10 @@ async function getUserInfo(name) {
297724
297842
  ;// CONCATENATED MODULE: ../github/src/pull_request.ts
297725
297843
 
297726
297844
 
297727
- const pull_request_messageLog = src_default()('firestartr:github:pull_request');
297728
297845
  const commentMaxSize = 65535;
297729
297846
  async function commentInPR(comment, pr_number, repo, owner = 'prefapp') {
297730
297847
  try {
297731
- pull_request_messageLog(`Commenting ${comment} in PR ${pr_number} of ${owner}/${repo}`);
297848
+ github_src_logger.info(`Commenting ${comment} in PR ${pr_number} of ${owner}/${repo}`);
297732
297849
  const octokit = await getOctokitForOrg(owner);
297733
297850
  await octokit.rest.issues.createComment({
297734
297851
  owner,
@@ -297747,12 +297864,12 @@ async function getPrData(pull_number, repo, owner) {
297747
297864
  return await octokit.rest.pulls.get({ owner, repo, pull_number });
297748
297865
  }
297749
297866
  async function getPrLastCommitSHA(pull_number, repo, owner = 'prefapp') {
297750
- pull_request_messageLog(`Getting last commit SHA for PR ${pull_number} of ${owner}/${repo}`);
297867
+ github_src_logger.info(`Getting last commit SHA for PR ${pull_number} of ${owner}/${repo}`);
297751
297868
  const prData = await getPrData(pull_number, repo, owner);
297752
297869
  return prData.data.head.sha;
297753
297870
  }
297754
297871
  async function getPrMergeCommitSHA(pull_number, repo, owner = 'prefapp') {
297755
- pull_request_messageLog(`Getting merge commit SHA for PR ${pull_number} of ${owner}/${repo}`);
297872
+ github_src_logger.info(`Getting merge commit SHA for PR ${pull_number} of ${owner}/${repo}`);
297756
297873
  const prData = await getPrData(pull_number, repo, owner);
297757
297874
  if (prData.data.merge_commit_sha !== null) {
297758
297875
  return prData.data.merge_commit_sha;
@@ -297790,7 +297907,7 @@ function divideCommentIntoChunks(comment, sizeReduction = 0) {
297790
297907
  return result;
297791
297908
  }
297792
297909
  async function getPrFiles(pr_number, repo, owner = 'prefapp') {
297793
- pull_request_messageLog(`Getting PR details of PR ${pr_number} of ${owner}/${repo}`);
297910
+ github_src_logger.info(`Getting PR details of PR ${pr_number} of ${owner}/${repo}`);
297794
297911
  const octokit = await getOctokitForOrg(owner);
297795
297912
  return await octokit.rest.pulls.listFiles({
297796
297913
  owner,
@@ -297832,9 +297949,8 @@ async function filterPrBy(filter, opts) {
297832
297949
  ;// CONCATENATED MODULE: ../github/src/issues.ts
297833
297950
 
297834
297951
 
297835
- const issues_log = src_default()('firestartr:github:issues');
297836
297952
  async function issues_create(owner, repo, title, body, labels = []) {
297837
- issues_log(`Creating issue in ${owner}/${repo}`);
297953
+ github_src_logger.info(`Creating issue in ${owner}/${repo}`);
297838
297954
  const octokit = await getOctokitForOrg(owner);
297839
297955
  return await octokit.rest.issues.create({
297840
297956
  owner,
@@ -297845,7 +297961,7 @@ async function issues_create(owner, repo, title, body, labels = []) {
297845
297961
  });
297846
297962
  }
297847
297963
  async function update(owner, repo, issue_number, title, body, labels = []) {
297848
- issues_log(`Updating issue ${issue_number} in ${owner}/${repo}`);
297964
+ github_src_logger.info(`Updating issue ${issue_number} in ${owner}/${repo}`);
297849
297965
  const octokit = await getOctokitForOrg(owner);
297850
297966
  return await octokit.rest.issues.update({
297851
297967
  owner,
@@ -297857,7 +297973,7 @@ async function update(owner, repo, issue_number, title, body, labels = []) {
297857
297973
  });
297858
297974
  }
297859
297975
  async function filterBy(owner, repo, title, labels, state = 'open', creator = undefined, assignee = undefined) {
297860
- issues_log(`Filtering issues by title in ${owner}/${repo}`);
297976
+ github_src_logger.info(`Filtering issues by title in ${owner}/${repo}`);
297861
297977
  const octokit = await getOctokitForOrg(owner);
297862
297978
  const resp = await octokit.rest.issues.listForRepo({
297863
297979
  owner,
@@ -297872,7 +297988,7 @@ async function filterBy(owner, repo, title, labels, state = 'open', creator = un
297872
297988
  return resp.data.filter((issue) => issue.title.includes(title));
297873
297989
  }
297874
297990
  async function upsertByTitle(owner, repo, title, body, labels = []) {
297875
- issues_log(`Upserting issue by title in ${owner}/${repo}`);
297991
+ github_src_logger.info(`Upserting issue by title in ${owner}/${repo}`);
297876
297992
  const foundIssues = await filterBy(owner, repo, title, labels.join(','));
297877
297993
  if (foundIssues.length > 0) {
297878
297994
  return update(owner, repo, foundIssues[0].number, title, body, labels);
@@ -297882,7 +297998,7 @@ async function upsertByTitle(owner, repo, title, body, labels = []) {
297882
297998
  }
297883
297999
  }
297884
298000
  async function issues_close(owner, repo, issue_number) {
297885
- issues_log(`Closing issue ${issue_number} in ${owner}/${repo}`);
298001
+ github_src_logger.info(`Closing issue ${issue_number} in ${owner}/${repo}`);
297886
298002
  const octokit = await getOctokitForOrg(owner);
297887
298003
  return await octokit.rest.issues.update({
297888
298004
  owner,
@@ -297902,10 +298018,9 @@ async function issues_close(owner, repo, issue_number) {
297902
298018
  ;// CONCATENATED MODULE: ../github/src/branches.ts
297903
298019
 
297904
298020
 
297905
- const branches_messageLog = src_default()('firestartr:github:branches');
297906
298021
  const SHA1_EMPTY_TREE = '4b825dc642cb6eb9a060e54bf8d69288fbee4904';
297907
298022
  async function listBranches(repo, owner = 'prefapp') {
297908
- branches_messageLog(`Getting branches for ${owner}/${repo}`);
298023
+ github_src_logger.info(`Getting branches for ${owner}/${repo}`);
297909
298024
  const octokit = await getOctokitForOrg(owner);
297910
298025
  const response = await octokit.rest.repos.listBranches({
297911
298026
  owner,
@@ -297916,7 +298031,7 @@ async function listBranches(repo, owner = 'prefapp') {
297916
298031
  return response.data;
297917
298032
  }
297918
298033
  async function getBranch(repo, branch, owner = 'prefapp') {
297919
- branches_messageLog(`Getting branch ${branch} for ${owner}/${repo}`);
298034
+ github_src_logger.info(`Getting branch ${branch} for ${owner}/${repo}`);
297920
298035
  const octokit = await getOctokitForOrg(owner);
297921
298036
  const response = await octokit.rest.repos.getBranch({
297922
298037
  owner,
@@ -297926,7 +298041,7 @@ async function getBranch(repo, branch, owner = 'prefapp') {
297926
298041
  return response.data;
297927
298042
  }
297928
298043
  async function createBranch(repo, branch, sha, owner = 'prefapp') {
297929
- branches_messageLog(`Creating branch ${branch} for ${owner}/${repo}`);
298044
+ github_src_logger.info(`Creating branch ${branch} for ${owner}/${repo}`);
297930
298045
  const octokit = await getOctokitForOrg(owner);
297931
298046
  const response = await octokit.rest.git.createRef({
297932
298047
  owner,
@@ -297937,7 +298052,7 @@ async function createBranch(repo, branch, sha, owner = 'prefapp') {
297937
298052
  return response.data;
297938
298053
  }
297939
298054
  async function createOrphanBranch(repo, branch, owner = 'prefapp') {
297940
- branches_messageLog(`Creating orphan branch ${branch} for ${owner}/${repo}`);
298055
+ github_src_logger.info(`Creating orphan branch ${branch} for ${owner}/${repo}`);
297941
298056
  const octokit = await getOctokitForOrg(owner);
297942
298057
  // Create a commit with an empty tree
297943
298058
  const { data: commit } = await octokit.request('POST /repos/{owner}/{repo}/git/commits', {
@@ -297963,6 +298078,268 @@ async function createOrphanBranch(repo, branch, owner = 'prefapp') {
297963
298078
  createOrphanBranch,
297964
298079
  });
297965
298080
 
298081
+ ;// CONCATENATED MODULE: ../github/src/check_run.ts
298082
+
298083
+
298084
+ const FLUSH_TIMEOUT = 4; // seconds
298085
+ const GITHUB_OUTPUT_TEXT_LIMIT = 65000; // ~65k hard limit for output.text
298086
+ /**
298087
+ * Streams text updates to a callback on a fixed cadence, with a size-triggered early flush.
298088
+ * Does NOT clear content on flush (so the consumer can send the full, current log each time).
298089
+ */
298090
+ class CheckRunBuffer {
298091
+ constructor(initial = '', onFlush, opts) {
298092
+ this.content = initial;
298093
+ this.updated = Boolean(initial);
298094
+ this.onFlush = onFlush;
298095
+ this.flushIntervalMs = (opts?.intervalSec ?? FLUSH_TIMEOUT) * 1000;
298096
+ this.timer = setInterval(() => {
298097
+ this.flush();
298098
+ }, this.flushIntervalMs);
298099
+ if (initial)
298100
+ this.flush();
298101
+ }
298102
+ stop() {
298103
+ if (this.timer !== null) {
298104
+ clearInterval(this.timer);
298105
+ this.timer = null;
298106
+ }
298107
+ }
298108
+ update(data = '') {
298109
+ if (!data)
298110
+ return;
298111
+ this.content += data;
298112
+ this.updated = true;
298113
+ }
298114
+ flush() {
298115
+ if (!this.updated)
298116
+ return;
298117
+ try {
298118
+ this.onFlush(this.content);
298119
+ }
298120
+ finally {
298121
+ this.updated = false;
298122
+ }
298123
+ }
298124
+ snapshot() {
298125
+ return this.content;
298126
+ }
298127
+ }
298128
+ class GithubCheckRun {
298129
+ constructor(octokit, params) {
298130
+ this.hasCommented = false;
298131
+ this.closing = false;
298132
+ this.closed = false;
298133
+ this.lastStatus = 'in_progress';
298134
+ this.detailsFormatter = (s) => s;
298135
+ this.octokit = octokit;
298136
+ this.owner = params.owner;
298137
+ this.repo = params.repo;
298138
+ this.headSHA = params.headSHA;
298139
+ this.name = params.name;
298140
+ this.detailsUrl = params.detailsUrl;
298141
+ this.title = params.title ?? params.name;
298142
+ if (params.summary)
298143
+ this._summaryOverride = params.summary;
298144
+ this.pullNumber = params.pullNumber;
298145
+ this.includeCheckRunComment = Boolean(params.includeCheckRunComment);
298146
+ this.checkRunComment = params.checkRunComment;
298147
+ this.buffer = new CheckRunBuffer('', (data) => this.__updateCheckRun(data).catch(() => { }), { intervalSec: FLUSH_TIMEOUT });
298148
+ }
298149
+ /**
298150
+ * Configure markdown formatting for the details (output.text).
298151
+ * Example: ch.mdOptionsDetails({ quotes: 'terraform' })
298152
+ * Result:
298153
+ * ```terraform
298154
+ * <log>
298155
+ * ```
298156
+ */
298157
+ mdOptionsDetails(opts) {
298158
+ const lang = (opts?.quotes ?? '').trim();
298159
+ if (!lang) {
298160
+ this.detailsFormatter = (s) => s;
298161
+ return;
298162
+ }
298163
+ const fenceOpen = '```' + lang + '\n';
298164
+ const fenceClose = '\n```';
298165
+ const overhead = fenceOpen.length + fenceClose.length;
298166
+ this.detailsFormatter = (body) => {
298167
+ const maxBody = Math.max(0, GITHUB_OUTPUT_TEXT_LIMIT - overhead);
298168
+ const safeBody = body.length > maxBody ? truncateRight(body, maxBody) : body;
298169
+ return fenceOpen + safeBody + fenceClose;
298170
+ };
298171
+ }
298172
+ set summary(data) {
298173
+ this._summaryOverride = data;
298174
+ // Push an immediate update if already created and not closed.
298175
+ if (!this.closed && this.checkRunId) {
298176
+ // do not mutate buffer flags; just send current snapshot using new summary
298177
+ this.__updateCheckRun(this.buffer.snapshot()).catch(() => { });
298178
+ }
298179
+ }
298180
+ get summary() {
298181
+ return this._summaryOverride;
298182
+ }
298183
+ /**
298184
+ * Append log text and optionally set status ('queued' | 'in_progress').
298185
+ */
298186
+ update(text, status) {
298187
+ if (this.closed)
298188
+ return;
298189
+ if (status)
298190
+ this.lastStatus = status;
298191
+ if (text)
298192
+ this.buffer.update(text);
298193
+ }
298194
+ /**
298195
+ * Finalize the check with a conclusion. Flushes buffered text, marks completed.
298196
+ */
298197
+ async close(finalText, ok) {
298198
+ if (this.closed || this.closing)
298199
+ return;
298200
+ this.closing = true;
298201
+ this.buffer.stop();
298202
+ const finalContent = this.buffer.snapshot() + (finalText || '');
298203
+ try {
298204
+ await this.__ensureCreated();
298205
+ const { text, summary } = this.buildOutputTextAndSummary(finalContent);
298206
+ await this.octokit.rest.checks.update({
298207
+ owner: this.owner,
298208
+ repo: this.repo,
298209
+ check_run_id: this.checkRunId,
298210
+ conclusion: ok ? 'success' : 'failure',
298211
+ completed_at: new Date().toISOString(),
298212
+ output: {
298213
+ title: this.title,
298214
+ summary,
298215
+ text,
298216
+ },
298217
+ });
298218
+ this.closed = true;
298219
+ }
298220
+ finally {
298221
+ this.closing = false;
298222
+ }
298223
+ }
298224
+ // -------------------- Internals --------------------
298225
+ async __ensureCreated() {
298226
+ if (this.checkRunId)
298227
+ return;
298228
+ const startedAt = new Date().toISOString();
298229
+ const res = await this.octokit.rest.checks.create({
298230
+ owner: this.owner,
298231
+ repo: this.repo,
298232
+ name: this.name,
298233
+ head_sha: this.headSHA,
298234
+ status: 'in_progress',
298235
+ started_at: startedAt,
298236
+ details_url: this.detailsUrl,
298237
+ output: {
298238
+ title: this.title,
298239
+ summary: this._summaryOverride ?? '',
298240
+ text: undefined,
298241
+ },
298242
+ });
298243
+ this.checkRunId = res.data.id;
298244
+ if (this.includeCheckRunComment &&
298245
+ this.pullNumber !== undefined &&
298246
+ !this.hasCommented) {
298247
+ const link = this.__buildCheckRunUrl();
298248
+ const formattedLink = `[here](${link})`;
298249
+ const base = this.checkRunComment ?? '';
298250
+ const body = base ? `${base}${formattedLink}` : formattedLink;
298251
+ await this.octokit.rest.issues.createComment({
298252
+ owner: this.owner,
298253
+ repo: this.repo,
298254
+ issue_number: this.pullNumber,
298255
+ body,
298256
+ });
298257
+ this.hasCommented = true;
298258
+ }
298259
+ }
298260
+ async __updateCheckRun(allContent) {
298261
+ if (this.closed || this.closing)
298262
+ return;
298263
+ await this.__ensureCreated();
298264
+ const { text, summary } = this.buildOutputTextAndSummary(allContent);
298265
+ await this.octokit.rest.checks.update({
298266
+ owner: this.owner,
298267
+ repo: this.repo,
298268
+ check_run_id: this.checkRunId,
298269
+ status: this.lastStatus,
298270
+ output: {
298271
+ title: this.title,
298272
+ summary,
298273
+ text,
298274
+ },
298275
+ });
298276
+ }
298277
+ __buildCheckRunUrl() {
298278
+ if (this.checkRunId) {
298279
+ return `https://github.com/${this.owner}/${this.repo}/runs/${this.checkRunId}?check_suite_focus=true`;
298280
+ }
298281
+ return `https://github.com/${this.owner}/${this.repo}/commit/${this.headSHA}/checks?check_suite_focus=true`;
298282
+ }
298283
+ buildOutputTextAndSummary(full) {
298284
+ if (!full) {
298285
+ return {
298286
+ text: undefined,
298287
+ summary: this._summaryOverride ?? '',
298288
+ };
298289
+ }
298290
+ let text = this.detailsFormatter(full);
298291
+ let truncated = false;
298292
+ if (text.length > GITHUB_OUTPUT_TEXT_LIMIT) {
298293
+ text = truncateRight(text, GITHUB_OUTPUT_TEXT_LIMIT);
298294
+ truncated = true;
298295
+ }
298296
+ else {
298297
+ truncated = text.length < full.length;
298298
+ }
298299
+ let summary = this._summaryOverride ?? '';
298300
+ if (this._summaryOverride && truncated) {
298301
+ summary = `${summary}\n\n... (log truncated to ~${GITHUB_OUTPUT_TEXT_LIMIT.toLocaleString()} chars)`;
298302
+ }
298303
+ return { text, summary };
298304
+ }
298305
+ }
298306
+ // -------------------- Helpers --------------------
298307
+ function truncateRight(s, max) {
298308
+ if (s.length <= max)
298309
+ return s;
298310
+ const HARD = Math.max(0, max - 3);
298311
+ return s.slice(0, HARD) + '...';
298312
+ }
298313
+ /**
298314
+ * Factory: build a GithubCheckRun using an installation token for the given org.
298315
+ */
298316
+ async function createCheckRunForOrg(org, owner, repo, name, opts) {
298317
+ const octokit = await getOctokitForOrg(org);
298318
+ let headSHA = opts?.headSHA;
298319
+ if (!headSHA && typeof opts?.pullNumber === 'number') {
298320
+ headSHA = await getPrMergeCommitSHA(opts.pullNumber, repo, owner);
298321
+ }
298322
+ if (!headSHA) {
298323
+ throw new Error('createCheckRunForOrg: either opts.headSHA or opts.pullNumber must be provided');
298324
+ }
298325
+ return new GithubCheckRun(octokit, {
298326
+ owner,
298327
+ repo,
298328
+ headSHA,
298329
+ name,
298330
+ detailsUrl: opts?.detailsUrl,
298331
+ title: opts?.title,
298332
+ summary: opts?.summary,
298333
+ pullNumber: opts?.pullNumber,
298334
+ includeCheckRunComment: Boolean(opts?.includeCheckRunComment),
298335
+ checkRunComment: opts?.checkRunComment,
298336
+ });
298337
+ }
298338
+ async function createCheckRun(owner, repo, name, opts) {
298339
+ return createCheckRunForOrg(owner, owner, repo, name, opts);
298340
+ }
298341
+ const CheckRun = GithubCheckRun;
298342
+
297966
298343
  ;// CONCATENATED MODULE: ../github/index.ts
297967
298344
 
297968
298345
 
@@ -297973,6 +298350,7 @@ async function createOrphanBranch(repo, branch, owner = 'prefapp') {
297973
298350
 
297974
298351
 
297975
298352
 
298353
+
297976
298354
  /* harmony default export */ const github_0 = ({
297977
298355
  org: organization,
297978
298356
  repo: repository,
@@ -297986,6 +298364,10 @@ async function createOrphanBranch(repo, branch, owner = 'prefapp') {
297986
298364
  pulls: pull_request,
297987
298365
  issues: issues,
297988
298366
  branches: branches,
298367
+ feedback: {
298368
+ createCheckRun: createCheckRun,
298369
+ CheckRun: CheckRun,
298370
+ },
297989
298371
  });
297990
298372
 
297991
298373
  ;// CONCATENATED MODULE: ../cdk8s_renderer/src/patches/base.ts
@@ -300247,11 +300629,17 @@ class TFWorkspaceNormalizer extends Normalizer {
300247
300629
  }
300248
300630
  async function normalizeModuleContent(tfRootModulePath) {
300249
300631
  let content = '';
300632
+ const files = {};
300250
300633
  await crawl(tfRootModulePath, (entry) => {
300251
300634
  return entry.endsWith('.tf');
300252
300635
  }, (entry, data) => {
300636
+ files[entry] = data;
300637
+ });
300638
+ Object.keys(files)
300639
+ .sort()
300640
+ .forEach((entry) => {
300253
300641
  content += `# ${external_path_.basename(entry)}
300254
- ${data}
300642
+ ${files[entry]}
300255
300643
  `;
300256
300644
  });
300257
300645
  return content;
@@ -301500,14 +301888,14 @@ async function loadSchema(schemaURI) {
301500
301888
 
301501
301889
 
301502
301890
  const ajv = new (_2020_default())({ useDefaults: true });
301503
- let initiated = false;
301891
+ let validation_initiated = false;
301504
301892
  const validations = {};
301505
301893
  function prepareValidation(schemaId) {
301506
- if (!initiated)
301894
+ if (!validation_initiated)
301507
301895
  ajv.addSchema(base_schemas.schemas);
301508
301896
  if (!validations[schemaId])
301509
301897
  validations[schemaId] = ajv.getSchema(schemaId);
301510
- initiated = true;
301898
+ validation_initiated = true;
301511
301899
  return validations[schemaId];
301512
301900
  }
301513
301901
  function validateClaim(data, schemaId = 'firestartr.dev://common/ClaimEnvelope') {
@@ -302081,6 +302469,20 @@ function isTerraformWorkspace(cr) {
302081
302469
  return cr.kind === 'FirestartrTerraformWorkspace';
302082
302470
  }
302083
302471
 
302472
+ ;// CONCATENATED MODULE: ../cdk8s_renderer/src/validations/crSize.ts
302473
+
302474
+ const K8S_OBJECT_SIZE_LIMIT = 1572864; // 1.5 MiB in bytes, etcd recommended limit: https://etcd.io/docs/latest/dev-guide/limit/
302475
+ function validateCrSizes(crs) {
302476
+ for (const key of Object.keys(crs)) {
302477
+ const cr = crs[key];
302478
+ const serialized = catalog_common.io.toYaml(cr);
302479
+ const size = Buffer.byteLength(serialized, 'utf8');
302480
+ if (size > K8S_OBJECT_SIZE_LIMIT) {
302481
+ throw new Error(`CR "${cr.kind}-${cr.metadata.name}" exceeds the Kubernetes object size limit by ${size - K8S_OBJECT_SIZE_LIMIT} bytes. Maximum allowed is ${K8S_OBJECT_SIZE_LIMIT} bytes (1.5MiB).`);
302482
+ }
302483
+ }
302484
+ }
302485
+
302084
302486
  ;// CONCATENATED MODULE: ../cdk8s_renderer/src/refsSorter/refsSorter.ts
302085
302487
 
302086
302488
  /**
@@ -303141,7 +303543,7 @@ function addTraceabilityStamp(context, content) {
303141
303543
 
303142
303544
 
303143
303545
 
303144
- const render_messageLog = src_default()('firestartr:features_renderer');
303546
+ const messageLog = src_default()('firestartr:features_renderer');
303145
303547
  function render(featurePath, featureRenderPath, entity, firestartrConfig = {}, featureArgs = {}) {
303146
303548
  const configData = validate_validate(featurePath);
303147
303549
  const context = buildContext(entity, configData.args, firestartrConfig, featureArgs);
@@ -303152,7 +303554,7 @@ function render(featurePath, featureRenderPath, entity, firestartrConfig = {}, f
303152
303554
  // For now let's keep upgradeable flag for backward compatibility
303153
303555
  // by default it's false
303154
303556
  const userManaged = file.user_managed ?? file.upgradeable ?? false;
303155
- render_messageLog(`Rendering ${src} to ${dest}`);
303557
+ messageLog(`Rendering ${src} to ${dest}`);
303156
303558
  // render the content of the file
303157
303559
  const content = addTraceability(context, src, renderContent(external_fs_default().readFileSync(external_path_default().join(featurePath, 'templates', src)).toString(), context));
303158
303560
  const destFilePath = external_path_default().join(`${featureRenderPath}`, dest);
@@ -303205,6 +303607,209 @@ function renderContent(template, ctx) {
303205
303607
  return mustache_mustache.render(template, ctx, {}, ['{{|', '|}}']);
303206
303608
  }
303207
303609
 
303610
+ // EXTERNAL MODULE: external "node:fs"
303611
+ var external_node_fs_ = __nccwpck_require__(87561);
303612
+ // EXTERNAL MODULE: external "node:path"
303613
+ var external_node_path_ = __nccwpck_require__(49411);
303614
+ ;// CONCATENATED MODULE: external "node:os"
303615
+ const external_node_os_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("node:os");
303616
+ ;// CONCATENATED MODULE: ../features_renderer/src/auxiliar.ts
303617
+ // src/auxiliar.ts
303618
+
303619
+
303620
+
303621
+
303622
+
303623
+
303624
+
303625
+ const renderTestsSchema = {
303626
+ $schema: 'http://json-schema.org/draft-07/schema#',
303627
+ type: 'object',
303628
+ additionalProperties: false,
303629
+ required: ['tests'],
303630
+ properties: {
303631
+ tests: {
303632
+ type: 'array',
303633
+ minItems: 1,
303634
+ items: {
303635
+ type: 'object',
303636
+ additionalProperties: false,
303637
+ required: ['name', 'cr'],
303638
+ properties: {
303639
+ name: { type: 'string', minLength: 1 },
303640
+ cr: { type: 'string', minLength: 1 },
303641
+ args: { type: 'object' },
303642
+ },
303643
+ },
303644
+ },
303645
+ },
303646
+ };
303647
+ const YAML_FILE_REGEX = /\.[yY]a?ml$/;
303648
+ /* ---------- Core helpers ---------- */
303649
+ function formatAjvErrors(errors) {
303650
+ if (!errors || errors.length === 0)
303651
+ return 'Unknown schema error';
303652
+ return errors
303653
+ .map((e) => {
303654
+ const where = e.instancePath && e.instancePath.length ? e.instancePath : '/';
303655
+ const msg = e.message ?? 'validation error';
303656
+ return `- ${where} ${msg}`;
303657
+ })
303658
+ .join('\n');
303659
+ }
303660
+ function ensureUniqueTestNames(doc) {
303661
+ const seen = new Set();
303662
+ for (const t of doc.tests) {
303663
+ if (seen.has(t.name)) {
303664
+ throw new Error(`Duplicate test name "${t.name}" in render_tests.yaml`);
303665
+ }
303666
+ seen.add(t.name);
303667
+ }
303668
+ }
303669
+ function loadAndValidateRenderTests(featurePath) {
303670
+ const file = external_node_path_.join(featurePath, 'render_tests.yaml');
303671
+ if (!external_node_fs_.existsSync(file)) {
303672
+ throw new Error(`render_tests.yaml is required but not found at ${file}`);
303673
+ }
303674
+ const raw = loadYaml(file);
303675
+ const ajv = new (ajv_default())({ allErrors: true, strict: true });
303676
+ const validate = ajv.compile(renderTestsSchema);
303677
+ const ok = validate(raw);
303678
+ if (!ok) {
303679
+ throw new Error(`render_tests.yaml schema validation failed:\n${formatAjvErrors(validate.errors ?? [])}`);
303680
+ }
303681
+ const doc = raw;
303682
+ ensureUniqueTestNames(doc);
303683
+ return doc;
303684
+ }
303685
+ function resolveCrPath(featurePath, crRelPath) {
303686
+ if (external_node_path_.isAbsolute(crRelPath)) {
303687
+ throw new Error(`CR path must be relative to the feature root, got absolute: ${crRelPath}`);
303688
+ }
303689
+ const resolved = external_node_path_.resolve(featurePath, crRelPath);
303690
+ if (!external_node_fs_.existsSync(resolved)) {
303691
+ throw new Error(`CR file not found (resolved from "${crRelPath}"): ${resolved}`);
303692
+ }
303693
+ return resolved;
303694
+ }
303695
+ function listYamlFiles(dir) {
303696
+ if (!fs.existsSync(dir))
303697
+ return [];
303698
+ let entries;
303699
+ try {
303700
+ entries = fs.readdirSync(dir, { withFileTypes: true });
303701
+ }
303702
+ catch (e) {
303703
+ const msg = e instanceof Error ? e.message : String(e);
303704
+ throw new Error(`Failed to read directory "${dir}": ${msg}`);
303705
+ }
303706
+ return entries
303707
+ .filter((e) => e.isFile() && YAML_FILE_REGEX.test(e.name))
303708
+ .map((e) => path.join(dir, e.name));
303709
+ }
303710
+ function loadYaml(file) {
303711
+ try {
303712
+ const configDataRaw = common_slurpFile(external_node_path_.join(file));
303713
+ return catalog_common.io.fromYaml(configDataRaw);
303714
+ }
303715
+ catch (e) {
303716
+ const msg = e instanceof Error ? e.message : String(e);
303717
+ throw new Error(`Failed to parse YAML "${file}": ${msg}`);
303718
+ }
303719
+ }
303720
+ function ensureSafeTmpNames(name) {
303721
+ if (typeof name !== 'string' || !name.trim()) {
303722
+ throw new Error('Test "name" must be a non-empty string');
303723
+ }
303724
+ if (name.length > 128) {
303725
+ throw new Error('Test "name" is too long (max 128 characters)');
303726
+ }
303727
+ if (external_node_path_.isAbsolute(name)) {
303728
+ throw new Error(`Test "name" must be relative, got absolute: "${name}"`);
303729
+ }
303730
+ if (name.includes('..')) {
303731
+ throw new Error('Test "name" must not contain ".."');
303732
+ }
303733
+ if (!/^[A-Za-z0-9._-]+$/.test(name)) {
303734
+ throw new Error('Test "name" may only contain letters, numbers, ".", "_", or "-"');
303735
+ }
303736
+ }
303737
+ async function mkNamedTmp(...names) {
303738
+ for (const name of names) {
303739
+ ensureSafeTmpNames(name);
303740
+ }
303741
+ const dir = external_node_path_.join(external_node_os_namespaceObject.tmpdir(), ...names);
303742
+ await promises_namespaceObject.rm(dir, { recursive: true, force: true });
303743
+ await promises_namespaceObject.mkdir(dir, { recursive: true });
303744
+ return dir;
303745
+ }
303746
+ async function mkTmp(prefix = 'feature-render-') {
303747
+ return await fsp.mkdtemp(path.join(os.tmpdir(), prefix));
303748
+ }
303749
+ function buildExpectedOutput(config, renderDir) {
303750
+ const files = (config.files || []).map((f) => ({
303751
+ localPath: external_node_path_.join(renderDir, f.dest),
303752
+ repoPath: f.dest,
303753
+ userManaged: f.user_managed,
303754
+ }));
303755
+ return {
303756
+ files,
303757
+ patches: config.patches || [],
303758
+ };
303759
+ }
303760
+ /* ---------- Context-style API for a render temp dir ---------- */
303761
+ async function createRenderContext(prefix = 'feature-render-') {
303762
+ const dir = await mkTmp(prefix);
303763
+ const join = (...p) => path.join(dir, ...p);
303764
+ return {
303765
+ getContextPath: () => dir,
303766
+ join,
303767
+ getFile: async (relPath, { yaml: asYaml = false, json: asJson = false, } = {}) => {
303768
+ const data = await fsp.readFile(join(relPath), 'utf8');
303769
+ if (asYaml)
303770
+ return common.io.fromYaml(data);
303771
+ if (asJson)
303772
+ return JSON.parse(data);
303773
+ return data;
303774
+ },
303775
+ getFilePath: (relPath) => join(relPath),
303776
+ setFile: async (relPath, contents) => {
303777
+ await fsp.mkdir(path.dirname(join(relPath)), { recursive: true });
303778
+ await fsp.writeFile(join(relPath), contents);
303779
+ },
303780
+ exists: async (relPath) => {
303781
+ try {
303782
+ await fsp.access(join(relPath));
303783
+ return true;
303784
+ }
303785
+ catch {
303786
+ return false;
303787
+ }
303788
+ },
303789
+ getOutputJson: async () => {
303790
+ const p = join('output.json');
303791
+ const raw = await fsp.readFile(p, 'utf8');
303792
+ return JSON.parse(raw);
303793
+ },
303794
+ list: async (relPath = '.') => {
303795
+ const entries = await fsp.readdir(join(relPath), {
303796
+ withFileTypes: true,
303797
+ });
303798
+ return entries.map((e) => ({ name: e.name, isDir: e.isDirectory() }));
303799
+ },
303800
+ remove: async () => {
303801
+ await fsp.rm(dir, { recursive: true, force: true });
303802
+ },
303803
+ };
303804
+ }
303805
+ /* harmony default export */ const auxiliar = ({
303806
+ mkNamedTmp,
303807
+ loadYaml,
303808
+ buildExpectedOutput,
303809
+ loadAndValidateRenderTests,
303810
+ resolveCrPath,
303811
+ });
303812
+
303208
303813
  ;// CONCATENATED MODULE: ../features_renderer/src/update_file.ts
303209
303814
 
303210
303815
 
@@ -303216,22 +303821,29 @@ function updateFileContent(featureRenderPath, filePath, content) {
303216
303821
 
303217
303822
 
303218
303823
 
303824
+
303219
303825
  /* harmony default export */ const features_renderer = ({
303220
303826
  validate: validate_validate,
303221
303827
  render: render,
303222
303828
  updateFileContent: updateFileContent,
303829
+ auxiliar: auxiliar,
303830
+ buildContext: buildContext,
303831
+ renderContent: renderContent,
303223
303832
  });
303224
303833
 
303834
+ ;// CONCATENATED MODULE: ../features_preparer/src/logger.ts
303835
+
303836
+ /* harmony default export */ const features_preparer_src_logger = (catalog_common.logger);
303837
+
303225
303838
  ;// CONCATENATED MODULE: ../features_preparer/src/renderer.ts
303226
303839
 
303227
303840
 
303228
303841
 
303229
303842
 
303230
- const renderer_messageLog = src_default()('firestartr:features_preparer:renderer');
303231
303843
  function renderFeature(featureName, version, owner, repo, featureOwner, renderPath = '/tmp', featureArgs = {}) {
303232
303844
  const extractPath = external_path_default().join(catalog_common.features.tarballs.getFeaturesExtractPath(featureName, version, owner, repo), 'packages', featureName);
303233
303845
  const renderedPath = catalog_common.features.features.getFeatureRenderedPathForEntity(featureOwner, featureName, renderPath);
303234
- renderer_messageLog(`Rendering feature ${featureName} to ${renderedPath} with component ${JSON.stringify(featureOwner)}`);
303846
+ features_preparer_src_logger.info(`Rendering feature ${featureName} to ${renderedPath} with component ${JSON.stringify(featureOwner)}`);
303235
303847
  return features_renderer.render(extractPath, renderedPath, featureOwner, {}, featureArgs);
303236
303848
  }
303237
303849
 
@@ -303270,7 +303882,6 @@ async function downloadZipBall(url, filePath) {
303270
303882
 
303271
303883
 
303272
303884
 
303273
- const installer_log = src_default()('firestartr:features_preparer:installer');
303274
303885
  async function getFeatureConfigFromRef(featureName, featureRef, featureOwner, // -> cr
303275
303886
  featureArgs = {}, repo = 'features', owner = 'prefapp') {
303276
303887
  // reference is the featureRef directly
@@ -303301,12 +303912,12 @@ async function prepareFeature(featureName, version, repo = 'features', owner = '
303301
303912
  async function downloadFeatureZip(repo, featureName, reference, owner = 'prefapp') {
303302
303913
  try {
303303
303914
  const zipballExtractPath = catalog_common.features.tarballs.getFeaturesExtractPath(featureName, reference, owner, repo, { createIfNotExists: false });
303304
- console.log(`Zipball extract path: ${zipballExtractPath}`);
303915
+ features_preparer_src_logger.debug(`Zipball extract path: ${zipballExtractPath}`);
303305
303916
  if (external_fs_.existsSync(zipballExtractPath)) {
303306
- console.log(`Zipball extract path ${zipballExtractPath} already exists, reusing it.`);
303917
+ features_preparer_src_logger.debug(`Zipball extract path ${zipballExtractPath} already exists, reusing it.`);
303307
303918
  return zipballExtractPath;
303308
303919
  }
303309
- installer_log(`Feature ${[featureName, reference, owner, repo].join('-')} has not been downloaded yet, downloading`);
303920
+ features_preparer_src_logger.info(`Feature ${[featureName, reference, owner, repo].join('-')} has not been downloaded yet, downloading`);
303310
303921
  const octokit = await github_0.getOctokitForOrg(owner);
303311
303922
  const response = await octokit.request('GET /repos/{owner}/{repo}/zipball/{reference}', {
303312
303923
  request: {
@@ -303317,28 +303928,28 @@ async function downloadFeatureZip(repo, featureName, reference, owner = 'prefapp
303317
303928
  reference,
303318
303929
  });
303319
303930
  const randomZipTmpPath = `/tmp/${catalog_common.generic.randomString(20)}.zip`;
303320
- console.log(`Downloading feature ${featureName} version ${reference} to ${randomZipTmpPath}`);
303931
+ features_preparer_src_logger.info(`Downloading feature ${featureName} version ${reference} to ${randomZipTmpPath}`);
303321
303932
  if (external_fs_.existsSync(randomZipTmpPath)) {
303322
- console.log(`Temporary zip file ${randomZipTmpPath} already exists, removing it.`);
303933
+ features_preparer_src_logger.debug(`Temporary zip file ${randomZipTmpPath} already exists, removing it.`);
303323
303934
  external_fs_.unlinkSync(randomZipTmpPath);
303324
303935
  }
303325
303936
  const randomExtractPath = `/tmp/${catalog_common.generic.randomString(20)}`;
303326
- console.log(`Extracting feature ${featureName} version ${reference} to ${randomExtractPath}`);
303937
+ features_preparer_src_logger.debug(`Extracting feature ${featureName} version ${reference} to ${randomExtractPath}`);
303327
303938
  external_fs_.rmSync(randomExtractPath, { recursive: true, force: true });
303328
303939
  await downloadZipBall(response.url, randomZipTmpPath);
303329
303940
  const zip = new (adm_zip_default())(randomZipTmpPath);
303330
303941
  const mainEntry = zip.getEntries()[0].entryName;
303331
- console.log(`Main entry in zip: ${mainEntry}`);
303332
- console.log(`Extracting zip to ${randomExtractPath}`);
303942
+ features_preparer_src_logger.debug(`Main entry in zip: ${mainEntry}`);
303943
+ features_preparer_src_logger.debug(`Extracting zip to ${randomExtractPath}`);
303333
303944
  zip.extractAllTo(randomExtractPath, true);
303334
- console.log(`Renaming main entry ${mainEntry} to ${zipballExtractPath}`);
303945
+ features_preparer_src_logger.debug(`Renaming main entry ${mainEntry} to ${zipballExtractPath}`);
303335
303946
  external_fs_.renameSync(`${randomExtractPath}/${mainEntry}`, zipballExtractPath);
303336
- console.log(`Removing temporary zip file ${randomZipTmpPath}`);
303947
+ features_preparer_src_logger.debug(`Removing temporary zip file ${randomZipTmpPath}`);
303337
303948
  external_fs_.unlinkSync(randomZipTmpPath);
303338
303949
  return zipballExtractPath;
303339
303950
  }
303340
303951
  catch (error) {
303341
- console.error(error);
303952
+ features_preparer_src_logger.error(`Error on prepare feature with tag ${reference}: ${error}`);
303342
303953
  throw new Error(`Error for feature with tag ${reference}: ${error}. GitHub response: ${error}`);
303343
303954
  }
303344
303955
  }
@@ -307224,6 +307835,7 @@ async function renderClaim(catalogScope, firestartrScope, claim, patches, previo
307224
307835
 
307225
307836
 
307226
307837
 
307838
+
307227
307839
  /*
307228
307840
  * Function called when rendering but not importing
307229
307841
  *
@@ -307239,6 +307851,7 @@ async function renderer_render(catalogScope, firestartrScope, activateReferentia
307239
307851
  const data = await loadClaimsList(claimList);
307240
307852
  const result = await renderClaims(catalogScope, firestartrScope, data);
307241
307853
  validateTfStateKeyUniqueness(result);
307854
+ validateCrSizes(result);
307242
307855
  return result;
307243
307856
  }
307244
307857
 
@@ -308927,60 +309540,9 @@ const scaffoldSubcommand = {
308927
309540
 
308928
309541
  // EXTERNAL MODULE: ../../node_modules/@kubernetes/client-node/dist/index.js
308929
309542
  var client_node_dist = __nccwpck_require__(54851);
308930
- // EXTERNAL MODULE: ../../node_modules/winston/lib/winston.js
308931
- var winston = __nccwpck_require__(66752);
308932
- var winston_default = /*#__PURE__*/__nccwpck_require__.n(winston);
308933
309543
  ;// CONCATENATED MODULE: ../operator/src/logger.ts
308934
309544
 
308935
- const validLogLevels = [
308936
- 'error',
308937
- 'warn',
308938
- 'info',
308939
- 'debug',
308940
- 'verbose',
308941
- 'silly',
308942
- ];
308943
- let logger_initiated = false;
308944
- let logger = null;
308945
- // Type guard to check if a value is a valid LogLevel
308946
- function isValidLogLevel(level) {
308947
- return (typeof level === 'string' && validLogLevels.includes(level));
308948
- }
308949
- function initLogger() {
308950
- if (logger_initiated)
308951
- return;
308952
- const logLevel = process.env.LOG_LEVEL && isValidLogLevel(process.env.LOG_LEVEL)
308953
- ? process.env.LOG_LEVEL
308954
- : 'info';
308955
- logger = winston_default().createLogger({
308956
- level: logLevel,
308957
- exitOnError: false,
308958
- format: winston.format.combine(winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }), winston.format.json()),
308959
- transports: [
308960
- new winston.transports.Console({
308961
- level: logLevel,
308962
- }),
308963
- ],
308964
- });
308965
- logger_initiated = true;
308966
- }
308967
- function doLog(level, args) {
308968
- initLogger();
308969
- const [message, metadata] = args;
308970
- // eslint-disable-next-line prefer-spread
308971
- logger[level].apply(logger, [
308972
- message + ' | ' + JSON.stringify({ ...metadata }),
308973
- ]);
308974
- }
308975
- const logger_log = {
308976
- error: (...args) => doLog('error', args),
308977
- warn: (...args) => doLog('warn', args),
308978
- info: (...args) => doLog('info', args),
308979
- debug: (...args) => doLog('debug', args),
308980
- verbose: (...args) => doLog('verbose', args),
308981
- silly: (...args) => doLog('silly', args),
308982
- };
308983
- /* harmony default export */ const src_logger = (logger_log);
309545
+ /* harmony default export */ const operator_src_logger = (catalog_common.logger);
308984
309546
 
308985
309547
  ;// CONCATENATED MODULE: ../operator/src/store.ts
308986
309548
 
@@ -308991,13 +309553,7 @@ class Store {
308991
309553
  this.kind = kind;
308992
309554
  }
308993
309555
  add(item) {
308994
- src_logger.debug('STORE_ADD_ITEM', {
308995
- metadata: {
308996
- name: item.metadata.name,
308997
- kind: this.kind,
308998
- namespace: item.metadata.namespace,
308999
- },
309000
- });
309556
+ operator_src_logger.debug(`Added item '${item.metadata.name}' of kind '${this.kind}' to the store in namespace '${item.metadata.namespace}'`);
309001
309557
  this.store[itemPath(this.kind, item)] = {
309002
309558
  item,
309003
309559
  };
@@ -309006,13 +309562,7 @@ class Store {
309006
309562
  return 'deletionTimestamp' in item.metadata;
309007
309563
  }
309008
309564
  markToDelete(item) {
309009
- src_logger.debug('STORE_MARKED_ITEM_TO_DELETE', {
309010
- metadata: {
309011
- name: item.metadata.name,
309012
- kind: this.kind,
309013
- namespace: item.metadata.namespace,
309014
- },
309015
- });
309565
+ operator_src_logger.debug(`Marked item '${item.metadata.name}' of kind '${this.kind}' for deletion in namespace '${item.metadata.namespace}'`);
309016
309566
  this.store[itemPath(this.kind, item)] = {
309017
309567
  item,
309018
309568
  markedToDelete: true,
@@ -309045,24 +309595,11 @@ class Store {
309045
309595
  item,
309046
309596
  };
309047
309597
  if (updated)
309048
- src_logger.debug('STORE_ITEM_MODIFIED', {
309049
- metadata: {
309050
- name: item.metadata.name,
309051
- kind: this.kind,
309052
- namespace: item.metadata.namespace,
309053
- patches,
309054
- },
309055
- });
309598
+ operator_src_logger.debug(`Modified item '${item.metadata.name}' of kind '${this.kind}' in namespace '${item.metadata.namespace}' with patches ${JSON.stringify(patches)}`);
309056
309599
  return updated;
309057
309600
  }
309058
309601
  remove(item) {
309059
- src_logger.debug('STORE_ITEM_REMOVED', {
309060
- metadata: {
309061
- name: item.metadata.name,
309062
- kind: this.kind,
309063
- namespace: item.metadata.namespace,
309064
- },
309065
- });
309602
+ operator_src_logger.debug(`Removed item '${item.metadata.name}' of kind '${this.kind}' from namespace '${item.metadata.namespace}'`);
309066
309603
  delete this.store[itemPath(this.kind, item)];
309067
309604
  }
309068
309605
  getItem(item) {
@@ -309109,7 +309646,7 @@ async function getItem(kind, namespace, item) {
309109
309646
  }
309110
309647
  async function getItemByItemPath(itemPath, apiGroup = catalog_common.types.controller.FirestartrApiGroup, apiVersion = 'v1') {
309111
309648
  try {
309112
- src_logger.debug('CTL_GET_ITEM', { metadata: { itemPath } });
309649
+ operator_src_logger.debug(`The ctl is getting the item at '${itemPath}'.`);
309113
309650
  const { kc, opts } = await ctl_getConnection();
309114
309651
  opts.headers['Content-Type'] = 'application/json';
309115
309652
  opts.headers['Accept'] = 'application/json';
@@ -309119,14 +309656,14 @@ async function getItemByItemPath(itemPath, apiGroup = catalog_common.types.contr
309119
309656
  const r = await fetch(url, { method: 'get', headers: opts.headers });
309120
309657
  if (!r.ok) {
309121
309658
  const err = new Error(`Error on getItemByItemPath: ${itemPath}: ${r.statusText}`);
309122
- console.log(err.stack);
309659
+ operator_src_logger.error(`Error on getItemByItemPath: ${itemPath}: ${r.statusText}`);
309123
309660
  throw err;
309124
309661
  }
309125
309662
  const jsonResponse = await r.json();
309126
309663
  return jsonResponse;
309127
309664
  }
309128
309665
  catch (e) {
309129
- console.dir(e, { depth: null });
309666
+ operator_src_logger.error(`Error on getItemByItemPath: ${e}`);
309130
309667
  throw e;
309131
309668
  }
309132
309669
  }
@@ -309146,17 +309683,15 @@ async function writeManifest(kind, namespace, item, apiSlug) {
309146
309683
  return jsonResponse;
309147
309684
  }
309148
309685
  function writeSecret(secret, namespace) {
309149
- log.debug('CTL_WRITE_SECRET', {
309150
- metadata: { namespace, name: secret.metadata.name },
309151
- });
309686
+ log.debug(`The ctl is writing the secret '${secret.metadata.name}' in namespace '${namespace}'.`);
309152
309687
  return writeManifest('secrets', namespace, secret, `api/v1/namespaces/${namespace}/secrets/${secret.metadata.name}`);
309153
309688
  }
309154
309689
  async function writeStatus(kind, namespace, item) {
309155
- src_logger.debug('CTL_WRITE_STATUS', { metadata: { item } });
309690
+ operator_src_logger.debug(`The ctl is writing the status for item '${item.kind}/${item.metadata.name}' in namespace '${item.metadata.namespace}'.`);
309156
309691
  return await writeManifest(kind, namespace, item, `apis/firestartr.dev/v1/namespaces/${namespace}/${kind}/${item.metadata.name}/status`);
309157
309692
  }
309158
309693
  function writeFinalizer(kind, namespace, item) {
309159
- log.debug('CTL_WRITE_FINALIZER', { metadata: { item } });
309694
+ log.debug(`The ctl is writing the status for item '${item.kind}/${item.metadata.name}' in namespace '${item.metadata.namespace}'.`);
309160
309695
  return writeManifest(kind, namespace, item, `apis/firestartr.dev/v1/namespaces/${namespace}/${kind}/${item.metadata.name}/metadata/finalizers`);
309161
309696
  }
309162
309697
  async function listItems(kind, namespace, kc, opts) {
@@ -309171,7 +309706,7 @@ async function listItems(kind, namespace, kc, opts) {
309171
309706
  return await r.json();
309172
309707
  }
309173
309708
  catch (err) {
309174
- console.dir(err);
309709
+ log.error(`On listItems: ${err}`);
309175
309710
  throw err;
309176
309711
  }
309177
309712
  }
@@ -309198,15 +309733,10 @@ async function* observeList(kind, namespace, revision, kc, opts) {
309198
309733
  }
309199
309734
  catch (err) {
309200
309735
  if (err instanceof TypeError) {
309201
- log.error('CTL_OBSERVE_LIST_ERROR_CHUNKS', {
309202
- metadata: { namespace, kind, revision, error: err },
309203
- });
309736
+ log.error(`The ctl encountered an error while listing chunks for '${kind}' with revision '${revision}' in namespace '${namespace}': '${err}'.`);
309204
309737
  }
309205
309738
  else {
309206
- log.error('CTL_OBSERVE_LIST_UNKNOWN_ERROR_CHUNKS', {
309207
- metadata: { namespace, kind, revision, error: err },
309208
- });
309209
- console.error(err);
309739
+ log.error(`The ctl encountered an unknown error while listing chunks for '${kind}' with revision '${revision}' in namespace '${namespace}': '${err}'.`);
309210
309740
  }
309211
309741
  }
309212
309742
  }
@@ -309229,7 +309759,7 @@ async function ctl_getConnection() {
309229
309759
  return { kc, opts };
309230
309760
  }
309231
309761
  catch (err) {
309232
- console.dir(err, { depth: null });
309762
+ operator_src_logger.error(`getConnection: ${err}`);
309233
309763
  throw err;
309234
309764
  }
309235
309765
  }
@@ -309288,9 +309818,7 @@ async function deleteSecret(secretName, namespace) {
309288
309818
  }
309289
309819
  catch (e) {
309290
309820
  if (e && e.code === 404) {
309291
- src_logger.error('CTL_DELETE_SECRET_NOT_FOUND', {
309292
- metadata: { secretName, namespace },
309293
- });
309821
+ operator_src_logger.error(`The ctl failed to delete the secret '${secretName}' in namespace '${namespace}' because it was not found.`);
309294
309822
  return null;
309295
309823
  }
309296
309824
  else {
@@ -309324,9 +309852,7 @@ async function getSecret(namespace, secretName) {
309324
309852
  }
309325
309853
  catch (e) {
309326
309854
  if (e.response && e.response.statusCode === 404) {
309327
- src_logger.error('CTL_SECRET_NOT_FOUND', {
309328
- metadata: { secretName, namespace },
309329
- });
309855
+ operator_src_logger.error(`The ctl could not find the secret '${secretName}' in namespace '${namespace}'.`);
309330
309856
  return null;
309331
309857
  }
309332
309858
  else {
@@ -309357,13 +309883,7 @@ async function getTFResult(namespace, item) {
309357
309883
  * @param {any} item - Object to check if has been renamed
309358
309884
  */
309359
309885
  async function checkIfRenamed(namespace, item) {
309360
- log.debug('CTL_CHECK_IF_RENAMED', {
309361
- metadata: {
309362
- kind: item.kind,
309363
- name: item.metadata.name,
309364
- namespace,
309365
- },
309366
- });
309886
+ log.debug(`The ctl is checking if item '${item.kind}/${item.metadata.name}' in namespace '${namespace}' has been renamed.`);
309367
309887
  const oldName = item.metadata?.labels?.[common.types.controller.FirestartrLabelOldName];
309368
309888
  // If the item does not have firestartr.dev/old-name label, it has not been renamed
309369
309889
  if (!oldName)
@@ -309379,9 +309899,7 @@ async function checkIfRenamed(namespace, item) {
309379
309899
  });
309380
309900
  if (!r.ok) {
309381
309901
  if (r.status === 404) {
309382
- log.debug('CTL_CHECK_IF_RENAMED_OLDNAME_NOT_FOUND', {
309383
- metadata: { kind: item.kind, name: item.metadata.name, namespace },
309384
- });
309902
+ log.debug(`The ctl is checking for a rename of item '${item.kind}/${item.metadata.name}' in namespace '${namespace}', but the old item name was not found.`);
309385
309903
  return false;
309386
309904
  }
309387
309905
  }
@@ -309389,21 +309907,17 @@ async function checkIfRenamed(namespace, item) {
309389
309907
  return true;
309390
309908
  }
309391
309909
  catch (err) {
309392
- console.log(err);
309910
+ log.debug(err);
309393
309911
  return false;
309394
309912
  }
309395
309913
  }
309396
309914
  async function upsertFinalizer(kind, namespace, item, finalizer) {
309397
309915
  if ('finalizers' in item.metadata &&
309398
309916
  item.metadata.finalizers.includes(finalizer)) {
309399
- src_logger.debug('CTL_UPSERT_FINALIZER_ALREADY_SET', {
309400
- metadata: { finalizer, kind, name: item.metadata.name, namespace },
309401
- });
309917
+ operator_src_logger.debug(`The ctl tried to upsert the finalizer '${finalizer}' for '${kind}/${item.metadata.name}' in namespace '${namespace}', but it was already set.`);
309402
309918
  return;
309403
309919
  }
309404
- src_logger.debug('CTL_UPSERT_FINALIZER_SETTING', {
309405
- metadata: { finalizer, kind, name: item.metadata.name, namespace },
309406
- });
309920
+ operator_src_logger.debug(`The ctl is setting the finalizer '${finalizer}' for '${kind}/${item.metadata.name}' in namespace '${namespace}'.`);
309407
309921
  const { kc, opts } = await ctl_getConnection();
309408
309922
  const url = `${kc.getCurrentCluster().server}/apis/firestartr.dev/v1/namespaces/${namespace}/${kind}/${item.metadata.name}`;
309409
309923
  opts.headers['Content-Type'] = 'application/json-patch+json';
@@ -309444,14 +309958,7 @@ async function upsertFinalizer(kind, namespace, item, finalizer) {
309444
309958
  async function unsetFinalizer(kind, namespace, item, finalizer) {
309445
309959
  const { kc, opts } = await ctl_getConnection();
309446
309960
  const name = typeof item === 'string' ? item : item.metadata.name;
309447
- src_logger.debug('CTL_REMOVE_FINALIZER', {
309448
- metadata: {
309449
- finalizer,
309450
- kind,
309451
- name,
309452
- namespace,
309453
- },
309454
- });
309961
+ operator_src_logger.debug(`The ctl is removing the finalizer '${finalizer}' from '${kind}/${name}' in namespace '${namespace}'.`);
309455
309962
  const url = `${kc.getCurrentCluster().server}/apis/firestartr.dev/v1/namespaces/${namespace}/${kind}/${name}`;
309456
309963
  opts.headers['Content-Type'] = 'application/json-patch+json';
309457
309964
  opts.headers['Accept'] = '*';
@@ -309513,8 +310020,7 @@ async function writePlanInGithubPR(prUrl, planText) {
309513
310020
  await github_0.pulls.commentInPR(message, +pr_number, repo, owner);
309514
310021
  }
309515
310022
  catch (err) {
309516
- console.error(err);
309517
- console.log('Cannot write plan in PR');
310023
+ operator_src_logger.error(`writePlanInGithubPR: Cannot write plan in PR: ${err}`);
309518
310024
  }
309519
310025
  }
309520
310026
  async function addApplyCommitStatus(cr, state, targetURL = '', description = '', context = '') {
@@ -309522,15 +310028,7 @@ async function addApplyCommitStatus(cr, state, targetURL = '', description = '',
309522
310028
  await addCommitStatusToPrMergeCommit(cr.metadata.annotations['firestartr.dev/last-state-pr'], state, targetURL, description, context);
309523
310029
  }
309524
310030
  catch (e) {
309525
- src_logger.error('CTL_ADD_APPLY_COMMIT_STATUS_ERROR', {
309526
- metadata: {
309527
- state,
309528
- targetURL,
309529
- description,
309530
- cr_metadata: cr.metadata,
309531
- error: e,
309532
- },
309533
- });
310031
+ log.error(`The ctl encountered an error while adding commit status for custom resource '${cr.metadata.name}' in namespace '${cr.metadata.namespace}'. State: '${state}'. Target URL: '${targetURL}'. Description: '${description}'. Error: '${e}'.`);
309534
310032
  }
309535
310033
  }
309536
310034
  async function addDestroyCommitStatus(cr, state, description = '', context = '') {
@@ -309539,50 +310037,33 @@ async function addDestroyCommitStatus(cr, state, description = '', context = '')
309539
310037
  await addCommitStatusToPrMergeCommit(prUrl, state, '', description, context);
309540
310038
  }
309541
310039
  catch (e) {
309542
- src_logger.error('CTL_ADD_DESTROY_COMMIT_STATUS_ERROR', {
309543
- metadata: {
309544
- state,
309545
- description,
309546
- cr_metadata: cr.metadata,
309547
- error: e,
309548
- },
309549
- });
310040
+ operator_src_logger.error(`The ctl encountered an error while adding the destroy commit status for custom resource '${cr.metadata.name}' in namespace '${cr.metadata.namespace}'. State: '${state}'. Description: '${description}'. Error: '${e}'.`);
309550
310041
  }
309551
310042
  }
309552
310043
  async function addPlanStatusCheck(prUrl, summary, status = 'in_progress', isFailure = false) {
309553
310044
  try {
309554
- src_logger.debug('CTL_ADD_PLAN_STATUS_CHECK_SUMMARY_LENGTH', {
309555
- metadata: { length: summary.length },
309556
- });
310045
+ operator_src_logger.debug(`The ctl is checking the length of the plan summary, which is '${summary.length}'.`);
309557
310046
  if (summary.length > MAX_CHARS_OUPUT_PLAN) {
309558
310047
  const mustDrop = summary.length - MAX_CHARS_OUPUT_PLAN;
309559
310048
  summary = summary.substring(mustDrop);
309560
- src_logger.debug('CTL_ADD_PLAN_STATUS_CHECK_SUMMARY_TOO_LENGTHY', {
309561
- metadata: { mustDrop, length: summary.length },
309562
- });
310049
+ operator_src_logger.debug(`The ctl found the plan summary too lengthy (length: '${summary.length}'). The summary must drop because '${mustDrop}'.`);
309563
310050
  }
309564
310051
  await ctl_addStatusCheck({ summary, title: 'Terraform Plan Results' }, isFailure, 'terraform_plan', prUrl, status);
309565
310052
  }
309566
310053
  catch (e) {
309567
- src_logger.error('CTL_ADD_PLAN_STATUS_CHECK_ERROR', {
309568
- metadata: { prUrl, status, isFailure, error: e },
309569
- });
310054
+ operator_src_logger.error(`The ctl encountered an error while adding plan status for PR '${prUrl}' with status '${status}'. Is Failure: '${isFailure}'. Error: '${e}'.`);
309570
310055
  }
309571
310056
  }
309572
310057
  async function ctl_addStatusCheck(output, isFailure, name, prAnnotationValue, status) {
309573
310058
  const { owner, repo, prNumber } = catalog_common.generic.getOwnerRepoPrNumberFromAnnotationValue(prAnnotationValue);
309574
310059
  const branchSha = await github_0.pulls.getPrLastCommitSHA(prNumber, repo, owner);
309575
- src_logger.info('CTL_ADD_STATUS_CHECK', {
309576
- metadata: { owner, repo, branchSha, prAnnotationValue, name },
309577
- });
310060
+ operator_src_logger.info(`The ctl is adding a status check for '${owner}/${repo}' on branch '${branchSha}' with PR annotation value '${prAnnotationValue}' and name '${name}'.`);
309578
310061
  await github_0.repo.addStatusCheck(output, isFailure, branchSha, name, status, repo, owner);
309579
310062
  }
309580
310063
  async function addCommitStatusToPrMergeCommit(prAnnotationValue, state, targetURL, description, context) {
309581
310064
  const { owner, repo, prNumber } = catalog_common.generic.getOwnerRepoPrNumberFromAnnotationValue(prAnnotationValue);
309582
310065
  const branchSha = await github_0.pulls.getPrMergeCommitSHA(prNumber, repo, owner);
309583
- src_logger.info('CTL_ADD_COMMIT_STATUS', {
309584
- metadata: { owner, repo, branchSha, state, targetURL },
309585
- });
310066
+ operator_src_logger.info(`The ctl is adding a commit status for '${owner}/${repo}' on branch '${branchSha}'. State: '${state}'. Target URL: '${targetURL}'.`);
309586
310067
  await github_0.repo.addCommitStatus(state, branchSha, repo, owner, targetURL, description, context);
309587
310068
  }
309588
310069
  async function getLastStatePrInfo(cr) {
@@ -309650,62 +310131,42 @@ async function observe(plural, namespace, onAdd, onChange, onDelete, _onRename)
309650
310131
  informer.on('add', (obj) => {
309651
310132
  store.add(obj);
309652
310133
  if (store.hasDeletionTimestamp(obj)) {
309653
- src_logger.info('REFLECTOR_ITEM_MARKED_TO_DELETION', {
309654
- metadata: { kind: obj.kind, name: obj.metadata.name },
309655
- });
310134
+ operator_src_logger.info(`Reflector has marked item '${obj.kind}/${obj.metadata.name}' for deletion.`);
309656
310135
  store.markToDelete(obj);
309657
310136
  onDelete(obj);
309658
310137
  }
309659
310138
  else {
309660
- src_logger.info('REFLECTOR_ITEM_ADDED', {
309661
- metadata: { kind: obj.kind, name: obj.metadata.name },
309662
- });
310139
+ operator_src_logger.info(`Reflector has added item '${obj.kind}/${obj.metadata.name}'.`);
309663
310140
  onAdd(obj);
309664
310141
  }
309665
310142
  });
309666
310143
  informer.on('update', (obj) => {
309667
- src_logger.info('REFLECTOR_ITEM_UPDATED', {
309668
- metadata: {
309669
- kind: obj.kind,
309670
- name: obj.metadata.name,
309671
- resourceVersion: obj.metadata.resourceVersion,
309672
- },
309673
- });
310144
+ operator_src_logger.info(`Reflector has updated item '${obj.kind}/${obj.metadata.name}' to a new resource version: '${obj.metadata.resourceVersion}'.`);
309674
310145
  if (!store.getItem(obj).markedToDelete &&
309675
310146
  store.hasDeletionTimestamp(obj) &&
309676
310147
  (store.hasBeenMarkedToDelete(obj) || store.modified(obj))) {
309677
- src_logger.info('REFLECTOR_ITEM_UPDATED_MARKED_TO_DELETION', {
309678
- metadata: { kind: obj.kind, name: obj.metadata.name },
309679
- });
310148
+ operator_src_logger.info(`Reflector has updated item '${obj.kind}/${obj.metadata.name}' and marked it for deletion.`);
309680
310149
  store.markToDelete(obj);
309681
310150
  onDelete(obj);
309682
310151
  }
309683
310152
  else if (store.modified(obj)) {
309684
- src_logger.info('REFLECTOR_ITEM_UPDATED_AND_MODIFIED', {
309685
- metadata: { kind: obj.kind, name: obj.metadata.name },
309686
- });
310153
+ operator_src_logger.info(`Reflector has updated and modified item '${obj.kind}/${obj.metadata.name}'.`);
309687
310154
  onChange(obj);
309688
310155
  }
309689
310156
  });
309690
310157
  informer.on('delete', (obj) => {
309691
310158
  // deleted from the etcd
309692
- src_logger.info('REFLECTOR_ITEM_DELETED', {
309693
- metadata: { kind: obj.kind, name: obj.metadata.name },
309694
- });
310159
+ operator_src_logger.info(`Reflector has deleted item '${obj.kind}/${obj.metadata.name}' from the etcd.`);
309695
310160
  store.remove(obj);
309696
310161
  });
309697
310162
  informer.on('error', (err) => {
309698
- src_logger.error('REFLECTOR_ITEM_ERROR', {
309699
- metadata: { error: err, plural, namespace },
309700
- });
310163
+ operator_src_logger.error(`An error occurred in the reflector for '${plural}' in namespace '${namespace}': '${err}'.`);
309701
310164
  setTimeout(async () => {
309702
310165
  try {
309703
310166
  await informer.start();
309704
310167
  }
309705
310168
  catch (err) {
309706
- src_logger.error('REFLECTOR_INFORMER_START_ERROR', {
309707
- metadata: { error: err, plural, namespace },
309708
- });
310169
+ operator_src_logger.error(`Failed to start the reflector informer for '${plural}' in namespace '${namespace}': '${err}'.`);
309709
310170
  }
309710
310171
  }, 5000);
309711
310172
  });
@@ -309725,13 +310186,13 @@ async function needsProvisioningOnCreate(cr) {
309725
310186
  const fCrLog = (cr) => `The item ${cr.kind}: ${cr.metadata.name}`;
309726
310187
  // NO STATUS
309727
310188
  if (!('status' in cr) || !('conditions' in cr.status)) {
309728
- src_logger.debug('STATUS_NO_STATUS_NOR_CONDITION', { metadata: { cr } });
310189
+ operator_src_logger.debug(`The custom resource '${cr.kind}/${cr.metadata.name}' is missing a status and any conditions.`);
309729
310190
  return true;
309730
310191
  }
309731
310192
  // ERROR
309732
310193
  const errCond = getConditionByType(cr.status.conditions, 'ERROR');
309733
310194
  if (errCond && errCond.status === 'True') {
309734
- src_logger.debug('STATUS_ERROR_SKIP_PROVISION', { metadata: { cr } });
310195
+ operator_src_logger.debug(`Skipping the provisioning process for custom resource '${cr.kind}/${cr.metadata.name}' due to a status error.`);
309735
310196
  return false;
309736
310197
  }
309737
310198
  // PROVISIONED
@@ -309739,7 +310200,7 @@ async function needsProvisioningOnCreate(cr) {
309739
310200
  if (provCond &&
309740
310201
  provCond.status === 'True' &&
309741
310202
  provCond.observedGeneration >= cr.metadata.generation) {
309742
- src_logger.debug('STATUS_ALREADY_PROVISIONED', { metadata: { cr } });
310203
+ operator_src_logger.debug(`The custom resource '${cr.kind}/${cr.metadata.name}' is already provisioned; skipping the process.`);
309743
310204
  return false;
309744
310205
  }
309745
310206
  // DELETED
@@ -309747,29 +310208,20 @@ async function needsProvisioningOnCreate(cr) {
309747
310208
  if (delCond &&
309748
310209
  delCond.status === 'True' &&
309749
310210
  delCond.observedGeneration >= cr.metadata.generation) {
309750
- src_logger.debug('STATUS_ALREADY_DELETED', { metadata: { cr } });
310211
+ operator_src_logger.debug(`The custom resource '${cr.kind}/${cr.metadata.name}' has already been deleted; no action is required.`);
309751
310212
  return false;
309752
310213
  }
309753
310214
  // PROVISIONING
309754
310215
  const provisioningCondition = getConditionByType(cr.status.conditions, 'PROVISIONING');
309755
310216
  if (provisioningCondition && provisioningCondition.status === 'True') {
309756
- src_logger.debug('STATUS_IN_PROVISIONING_REPROVISIONING', { metadata: { cr } });
310217
+ operator_src_logger.debug(`The custom resource '${cr.kind}/${cr.metadata.name}' is currently in a provisioning or reprovisioning state.`);
309757
310218
  return true;
309758
310219
  }
309759
- src_logger.debug('STATUS_NOT_HANDLED_STATE_SKIP_PROVISIONING', { metadata: { cr } });
310220
+ operator_src_logger.debug(`Skipping the provisioning process for custom resource '${cr.kind}/${cr.metadata.name}' because its current state is not handled.`);
309760
310221
  return false;
309761
310222
  }
309762
310223
  async function updateTransition(itemPath, reason, type, statusValue, message = '', updateStatusOnly = false) {
309763
- src_logger.info('STATUS_UPDATE_TRANSITION_FOR_ITEM', {
309764
- metadata: {
309765
- itemPath,
309766
- reason,
309767
- type,
309768
- statusValue,
309769
- message,
309770
- updateStatusOnly,
309771
- },
309772
- });
310224
+ operator_src_logger.info(`The item at '${itemPath}' transitioned to a new status of '${statusValue}' (type: '${type}'). The reason for the change is '${reason}' with the message: '${message}'. This was a status-only update: '${updateStatusOnly}'.`);
309773
310225
  const k8sItem = await getItemByItemPath(itemPath);
309774
310226
  if (!('status' in k8sItem))
309775
310227
  k8sItem.status = {};
@@ -309852,7 +310304,7 @@ async function syncer(enqueue) {
309852
310304
  void loop(enqueue);
309853
310305
  return {
309854
310306
  addItem(itemPath) {
309855
- src_logger.info('SYNC_ADD_ITEM', { metadata: { itemPath } });
310307
+ operator_src_logger.info(`Added item of path '${itemPath}' for synchronization`);
309856
310308
  void itemIsSyncable(itemPath).then((itemSyncInfo) => {
309857
310309
  if (!itemSyncInfo.syncable) {
309858
310310
  return;
@@ -309864,7 +310316,7 @@ async function syncer(enqueue) {
309864
310316
  }, helperCalculateRevisionTime(itemSyncInfo.period)),
309865
310317
  needsRevision: false,
309866
310318
  };
309867
- src_logger.info('Configured syncing for item %s %s', itemPath, syncWatchers[itemPath]);
310319
+ operator_src_logger.info(`Configured synchronization for item at path '${itemPath}'`);
309868
310320
  });
309869
310321
  },
309870
310322
  updateItem(itemPath) {
@@ -309872,13 +310324,13 @@ async function syncer(enqueue) {
309872
310324
  // log('Item %s not found, ignoring...', itemPath)
309873
310325
  // return
309874
310326
  //}
309875
- src_logger.debug('SYNC_UPDATE_ITEM', { metadata: { itemPath } });
310327
+ operator_src_logger.debug(`Updated item of path '${itemPath}' during synchronization`);
309876
310328
  void itemIsSyncable(itemPath).then((itemSyncInfo) => {
309877
310329
  if (!itemSyncInfo.syncable) {
309878
310330
  if (syncWatchers[itemPath]) {
309879
310331
  clearInterval(syncWatchers[itemPath].lastRevision);
309880
310332
  delete syncWatchers[itemPath];
309881
- src_logger.info('SYNC_REMOVE_FOR_ITEM', { metadata: { itemPath } });
310333
+ operator_src_logger.info(`Removed item of path '${itemPath}' from synchronization`);
309882
310334
  }
309883
310335
  }
309884
310336
  else {
@@ -309892,26 +310344,19 @@ async function syncer(enqueue) {
309892
310344
  }, helperCalculateRevisionTime(itemSyncInfo.period)),
309893
310345
  needsRevision: false,
309894
310346
  };
309895
- src_logger.debug('SYNC_CONFIGURED_FOR_ITEM', {
309896
- metadata: {
309897
- itemPath,
309898
- watcher: syncWatchers[itemPath],
309899
- },
309900
- });
310347
+ operator_src_logger.debug(`Configured synchronization for item at path '${itemPath}' with watcher '${syncWatchers[itemPath]}'`);
309901
310348
  }
309902
310349
  });
309903
310350
  },
309904
310351
  deleteItem(itemPath) {
309905
310352
  if (!syncWatchers[itemPath]) {
309906
- src_logger.debug('SYNC_DELETE_ITEM_NOT_FOUND_IGNORE', {
309907
- metadata: { itemPath },
309908
- });
310353
+ operator_src_logger.debug(`Ignored deletion attempt for item at path '${itemPath}' as it was not found during synchronization`);
309909
310354
  return;
309910
310355
  }
309911
- src_logger.debug('SYNC_DELETE_ITEM', { metadata: { itemPath } });
310356
+ operator_src_logger.debug(`Deleted item of path '${itemPath}' during synchronization`);
309912
310357
  clearInterval(syncWatchers[itemPath].lastRevision);
309913
310358
  delete syncWatchers[itemPath];
309914
- src_logger.debug('SYNC_DELETE_ITEM_DELETED', { metadata: { itemPath } });
310359
+ operator_src_logger.debug(`Successfully deleted item at path '${itemPath}' during synchronization`);
309915
310360
  },
309916
310361
  };
309917
310362
  }
@@ -309983,13 +310428,7 @@ async function initRetry(enqueue) {
309983
310428
  function retry(itemPath) {
309984
310429
  if (retryWatchers[itemPath]) {
309985
310430
  retryWatchers[itemPath].retryCounter++;
309986
- src_logger.debug('RETRY_FAILED', {
309987
- metadata: {
309988
- itemPath,
309989
- remainRetries: MAXRETRY - retryWatchers[itemPath].retryCounter,
309990
- nextRetry: NEXT_RETRY_SECS * retryWatchers[itemPath].retryCounter,
309991
- },
309992
- });
310431
+ operator_src_logger.debug(`Failed to process item '${itemPath}'. Retrying in '${NEXT_RETRY_SECS * retryWatchers[itemPath].retryCounter}' seconds. Remaining retries: '${MAXRETRY - retryWatchers[itemPath].retryCounter}'.`);
309993
310432
  retryWatchers[itemPath].retry = false;
309994
310433
  retryWatchers[itemPath].nextRetry = setTimeout(() => {
309995
310434
  if (itemPath in retryWatchers)
@@ -310040,12 +310479,7 @@ async function getItemIfNeededRetry(watcher) {
310040
310479
  }
310041
310480
  catch (e) {
310042
310481
  if (e.message && e.message.includes('Error on getItemByItemPath')) {
310043
- src_logger.debug('RETRY_ERROR_ITEM_NOT_FOUND', {
310044
- metadata: {
310045
- message: 'item not found, removed from the retry process',
310046
- itemPath: watcher.itemPath,
310047
- },
310048
- });
310482
+ operator_src_logger.debug(`Item '${watcher.itemPath}' not found, so it has been removed from the retry process.`);
310049
310483
  removeFromRetry(watcher.itemPath);
310050
310484
  return null;
310051
310485
  }
@@ -310085,9 +310519,7 @@ async function resolve(cr, getItemByItemPath, getSecret, namespace = 'default')
310085
310519
  async function resolveSecretRef(namespace, crDependency, getSecret) {
310086
310520
  let secretName = `${crDependency['kind']}-${crDependency['metadata']['name']}-outputs`.toLowerCase();
310087
310521
  if (crDependency.kind === 'FirestartrProviderConfig') {
310088
- src_logger.debug('RESOLVER_SKIP_SECRET_RESOLUTION_FOR', {
310089
- metadata: { kind: 'FirestartrProviderConfig', namespace, crDependency },
310090
- });
310522
+ operator_src_logger.debug(`The resolver is skipping secret resolution for '${crDependency.kind}/${crDependency.metadata.name}' of kind 'FirestartrProviderConfig' in namespace '${namespace}'.`);
310091
310523
  return undefined;
310092
310524
  }
310093
310525
  if (crDependency.kind === 'ExternalSecret') {
@@ -310095,9 +310527,7 @@ async function resolveSecretRef(namespace, crDependency, getSecret) {
310095
310527
  }
310096
310528
  const secret = await getSecret(namespace, secretName);
310097
310529
  if (!secret) {
310098
- src_logger.error('RESOLVER_SECRET_NOT_SOLVABLE', {
310099
- metadata: { secretName, crDependency, namespace },
310100
- });
310530
+ operator_src_logger.error(`The resolver could not find the secret '${secretName}' required by custom resource dependency '${crDependency}' in namespace '${namespace}'.`);
310101
310531
  console.error(`Could not resolve secret ${secretName}`);
310102
310532
  }
310103
310533
  return secret;
@@ -310233,9 +310663,7 @@ const kindsWithFinalizer = [
310233
310663
  */
310234
310664
  async function observeKind(pluralKind, namespace, queue, compute) {
310235
310665
  const lastWorkItems = {};
310236
- src_logger.info('INFORMER_OBSERVE_START', {
310237
- metadata: { kind: pluralKind, namespace },
310238
- });
310666
+ operator_src_logger.info(`The informer has started observing the '${pluralKind}' resource in namespace '${namespace}'.`);
310239
310667
  // onSync
310240
310668
  const enqueueCallback = (event) => {
310241
310669
  return async (item) => {
@@ -310252,13 +310680,7 @@ async function observeKind(pluralKind, namespace, queue, compute) {
310252
310680
  await observe(pluralKind, namespace,
310253
310681
  // on add
310254
310682
  async (item) => {
310255
- src_logger.info('INFORMER_ON_ITEM_ADDED', {
310256
- metadata: {
310257
- kind: pluralKind,
310258
- namespace,
310259
- name: item.metadata.name,
310260
- },
310261
- });
310683
+ operator_src_logger.info(`The informer has detected a new item, '${item.metadata.name}', for '${pluralKind}' in namespace '${namespace}'.`);
310262
310684
  await handleUpsertFinalizer(pluralKind, namespace, item);
310263
310685
  const workItem = await inform(pluralKind, item, 'onAdd', getLastWorkItem(pluralKind, lastWorkItems, item));
310264
310686
  syncCtl.addItem(informer_itemPath(pluralKind, item));
@@ -310269,13 +310691,7 @@ async function observeKind(pluralKind, namespace, queue, compute) {
310269
310691
  },
310270
310692
  // on modify
310271
310693
  async (item) => {
310272
- src_logger.info('INFORMER_ON_ITEM_MODIFIED', {
310273
- metadata: {
310274
- kind: pluralKind,
310275
- namespace,
310276
- name: item.metadata.name,
310277
- },
310278
- });
310694
+ operator_src_logger.info(`The informer has detected that item '${item.metadata.name}' for '${pluralKind}' in namespace '${namespace}' was modified.`);
310279
310695
  const workItem = await inform(pluralKind, item, 'onUpdate', getLastWorkItem(pluralKind, lastWorkItems, item));
310280
310696
  if (workItem) {
310281
310697
  setLastWorkItem(pluralKind, lastWorkItems, item, workItem);
@@ -310284,13 +310700,7 @@ async function observeKind(pluralKind, namespace, queue, compute) {
310284
310700
  },
310285
310701
  // on delete
310286
310702
  async (item) => {
310287
- src_logger.info('INFORMER_ON_ITEM_DELETED', {
310288
- metadata: {
310289
- kind: pluralKind,
310290
- namespace,
310291
- name: item.metadata.name,
310292
- },
310293
- });
310703
+ operator_src_logger.info(`The informer has detected that item '${item.metadata.name}' for '${pluralKind}' in namespace '${namespace}' was deleted.`);
310294
310704
  const workItem = await inform(pluralKind, item, 'onMarkedToDeletion', getLastWorkItem(pluralKind, lastWorkItems, item));
310295
310705
  if (workItem) {
310296
310706
  setLastWorkItem(pluralKind, lastWorkItems, item, workItem);
@@ -310300,17 +310710,11 @@ async function observeKind(pluralKind, namespace, queue, compute) {
310300
310710
  },
310301
310711
  // on rename
310302
310712
  async (item) => {
310303
- src_logger.info('INFORMER_ON_ITEM_RENAMED', {
310304
- metadata: {
310305
- kind: pluralKind,
310306
- namespace,
310307
- name: item.metadata.name,
310308
- },
310309
- });
310713
+ operator_src_logger.info(`The informer has detected that an item for '${pluralKind}' in namespace '${namespace}' has been renamed to '${item.metadata.name}'.`);
310310
310714
  const workItem = await inform(pluralKind, item, 'onRename', getLastWorkItem(pluralKind, lastWorkItems, item));
310311
310715
  // Add the renamed item to the sync queue
310312
310716
  syncCtl.addItem(informer_itemPath(pluralKind, item));
310313
- src_logger.debug('INFORMER_RENAMING_ITEM', { metadata: { workItem } });
310717
+ operator_src_logger.debug(`The informer is renaming item '${workItem.item.metadata.name}' of kind '${workItem.item.kind}' due to a change in its name.`);
310314
310718
  if (workItem) {
310315
310719
  const oldName = workItem.item.metadata.labels[catalog_common.types.controller.FirestartrLabelOldName];
310316
310720
  await handleUnsetFinalizer(pluralKind, namespace, item);
@@ -310371,7 +310775,7 @@ function enqueue(pluralKind, workItem, queue, compute, syncCtl, retryCtl) {
310371
310775
  syncCtl.updateItem(informer_itemPath(pluralKind, item));
310372
310776
  }
310373
310777
  else {
310374
- src_logger.debug('INFORMER_NOT_SPEC_OPERATION', { metadata: { operation } });
310778
+ operator_src_logger.debug(`The informer received an item with an operation type of '${operation}', which is not a specific operation.`);
310375
310779
  }
310376
310780
  };
310377
310781
  queue(workItem);
@@ -310414,9 +310818,7 @@ async function inform(pluralKind, item, op, lastWorkItem = null) {
310414
310818
  return workItem;
310415
310819
  case 'onRename':
310416
310820
  if (await needsProvisioningOnCreate(item)) {
310417
- src_logger.debug('INFORMER_ON_RENAME_NEEDS_PROVISION_ON_CREATE', {
310418
- metadata: { item },
310419
- });
310821
+ operator_src_logger.debug(`The informer is triggering a new provisioning process for the renamed item '${item.kind}/${item.metadata.name}'.`);
310420
310822
  workItem = {
310421
310823
  operation: OperationType.RENAMED,
310422
310824
  item,
@@ -310688,15 +311090,7 @@ let INIT = false;
310688
311090
  * @param {WorkItem} workItem - WorkItem to process
310689
311091
  */
310690
311092
  async function processItem(workItem) {
310691
- src_logger.info('PROCESSOR_NEW_WORKITEM', {
310692
- metadata: {
310693
- operation: workItem.operation,
310694
- workStatus: workItem.workStatus,
310695
- kind: workItem.item.kind,
310696
- name: workItem.item.metadata.name,
310697
- namespace: workItem.item.metadata.namespace,
310698
- },
310699
- });
311093
+ operator_src_logger.info(`The processor received a new work item for '${workItem.operation}' operation on '${workItem.item.kind}/${workItem.item.metadata.name}' in namespace '${workItem.item.metadata.namespace}'. Current work status is '${workItem.workStatus}'.`);
310700
311094
  queue.push(workItem);
310701
311095
  if (!INIT) {
310702
311096
  processItem_loop().catch((err) => {
@@ -310717,15 +311111,7 @@ async function processItem_loop() {
310717
311111
  const logMessage = `${new Date().toISOString()} : Processing OPERATION: ${w.operation} ITEM: ${w.item.kind}/${w.item.metadata.name}`;
310718
311112
  catalog_common.io.writeLogFile('process_item', logMessage);
310719
311113
  const timeout = createTimeout(w);
310720
- src_logger.info('PROCESSOR_PROCESSING_WORKITEM', {
310721
- metadata: {
310722
- operation: w.operation,
310723
- workStatus: w.workStatus,
310724
- kind: w.item.kind,
310725
- name: w.item.metadata.name,
310726
- namespace: w.item.metadata.namespace,
310727
- },
310728
- });
311114
+ operator_src_logger.info(`The processor is currently handling a '${w.operation}' operation for item '${w.item.kind}/${w.item.metadata.name}' in namespace '${w.item.metadata.namespace}'. The current work status is '${w.workStatus}'.`);
310729
311115
  await runWorkItem(w);
310730
311116
  clearTimeout(timeout);
310731
311117
  }
@@ -310741,15 +311127,7 @@ function createTimeout(w) {
310741
311127
  return setTimeout(() => {
310742
311128
  //throw new Error('Timeout on workitem ' + w);
310743
311129
  console.error('Timeout on workitem %O', w);
310744
- src_logger.error('PROCESSOR_TIMEOUT_ON_WORKITEM', {
310745
- metadata: {
310746
- operation: w.operation,
310747
- workStatus: w.workStatus,
310748
- kind: w.item.kind,
310749
- name: w.item.metadata.name,
310750
- namespace: w.item.metadata.namespace,
310751
- },
310752
- });
311130
+ operator_src_logger.error(`The processor timed out while handling a '${w.operation}' operation for item '${w.item.kind}/${w.item.metadata.name}' in namespace '${w.item.metadata.namespace}'. The current work status is '${w.workStatus}'.`);
310753
311131
  process.exit(1);
310754
311132
  }, TIMEOUTS[w.operation] * 1000);
310755
311133
  }
@@ -310775,7 +311153,7 @@ function processItem_wait(t = 2000) {
310775
311153
  return new Promise((ok) => setTimeout(ok, t));
310776
311154
  }
310777
311155
  async function runWorkItem(workItem) {
310778
- src_logger.debug('PROCESSOR_RUNNING_WORK_ITEM', { metadata: { workItem } });
311156
+ operator_src_logger.debug(`The processor is now running the '${workItem.operation}' operation for item '${workItem.item.kind}/${workItem.item.metadata.name}' in namespace '${workItem.item.metadata.namespace}'.`);
310779
311157
  if (!workItem.getItem || !workItem.process || !workItem.operation)
310780
311158
  return;
310781
311159
  try {
@@ -310787,33 +311165,17 @@ async function runWorkItem(workItem) {
310787
311165
  await updateTransition(workItem.handler.itemPath(), condition.reason, condition.type, condition.status, condition.message, condition.updateStatusOnly || false);
310788
311166
  }
310789
311167
  workItem.workStatus = WorkStatus.FINISHED;
310790
- src_logger.debug('PROCESSOR_REMAIN_ITEMS_IN_QUEUE', {
310791
- metadata: { remainingItems: queue.length },
310792
- });
311168
+ operator_src_logger.debug(`The processor has '${queue.length}' items remaining in the queue.`);
310793
311169
  }
310794
311170
  catch (e) {
310795
311171
  if (e instanceof Error &&
310796
311172
  e.message.includes('Error on getItemByItemPath')) {
310797
- src_logger.debug('PROCESSOR_ERROR_ITEM_NOT_FOUND', {
310798
- metadata: {
310799
- workItem,
310800
- message: 'item was not found, removing work item from queue',
310801
- },
310802
- });
311173
+ operator_src_logger.debug(`Item '${workItem.item.kind}/${workItem.item.metadata.name}' was not found, so its work item is being removed from the processor queue.`);
310803
311174
  workItem.workStatus = WorkStatus.FINISHED;
310804
311175
  return;
310805
311176
  }
310806
311177
  else {
310807
- src_logger.error('PROCESSOR_ERROR_PROCESSING_WORKITEM', {
310808
- metadata: {
310809
- operation: workItem.operation,
310810
- workStatus: workItem.workStatus,
310811
- kind: workItem.item.kind,
310812
- name: workItem.item.metadata.name,
310813
- namespace: workItem.item.metadata.namespace,
310814
- error: e,
310815
- },
310816
- });
311178
+ operator_src_logger.error(`An error occurred while the processor was handling the '${workItem.operation}' operation for item '${workItem.item.kind}/${workItem.item.metadata.name}' in namespace '${workItem.item.metadata.namespace}'. Current work status is '${workItem.workStatus}'. The error was: '${e}'.`);
310817
311179
  console.error(e);
310818
311180
  }
310819
311181
  return;
@@ -310825,11 +311187,7 @@ async function runWorkItem(workItem) {
310825
311187
  */
310826
311188
  async function workItemGarbageCollector(queue) {
310827
311189
  while (1) {
310828
- src_logger.debug('PROCESS_ITEM_GARBAGE_COLLECTOR_RUN', {
310829
- metadata: {
310830
- workItemsFound: queue.length,
310831
- },
310832
- });
311190
+ operator_src_logger.debug(`The garbage collector processed '${queue.length}' work items.`);
310833
311191
  for (const [index, wi] of queue.entries()) {
310834
311192
  if (wi.workStatus === WorkStatus.FINISHED) {
310835
311193
  // Because the queue is a constant, we cannot reassign it, instead we
@@ -310838,11 +311196,7 @@ async function workItemGarbageCollector(queue) {
310838
311196
  queue.splice(index, 1);
310839
311197
  }
310840
311198
  }
310841
- src_logger.debug('PROCESS_ITEM_GARBAGE_COLLECTOR_FINISHED', {
310842
- metadata: {
310843
- workItemsLeft: queue.length,
310844
- },
310845
- });
311199
+ operator_src_logger.debug(`The garbage collector finished its run, leaving '${queue.length}' work items in the queue.`);
310846
311200
  await processItem_wait(10 * 1000);
310847
311201
  }
310848
311202
  }
@@ -310854,11 +311208,14 @@ if (process.env.GARBAGE_QUEUE_COLLECTOR) {
310854
311208
  var cdktf_lib = __nccwpck_require__(95933);
310855
311209
  // EXTERNAL MODULE: ../../node_modules/@cdktf/provider-github/lib/provider/index.js
310856
311210
  var lib_provider = __nccwpck_require__(95107);
310857
- ;// CONCATENATED MODULE: ../provisioner/src/entities/base/Entity.ts
311211
+ ;// CONCATENATED MODULE: ../provisioner/src/logger.ts
310858
311212
 
311213
+ /* harmony default export */ const provisioner_src_logger = (catalog_common.logger);
311214
+
311215
+ ;// CONCATENATED MODULE: ../provisioner/src/entities/base/Entity.ts
310859
311216
 
310860
- const Entity_log = src_default()('firestartr:provisioner:entity:base');
310861
311217
  const EXTERNAL_NAME_ANNOTATION = 'firestartr.dev/external-name';
311218
+
310862
311219
  class Metadata {
310863
311220
  constructor(metadata) {
310864
311221
  this._metadata = metadata;
@@ -310905,11 +311262,13 @@ class Entity {
310905
311262
  }
310906
311263
  resolveRef(ref, propertyRef) {
310907
311264
  if (!this.deps) {
310908
- throw `resolveRef:
311265
+ const ErrorMessage = `resolveRef:
310909
311266
 
310910
311267
  Entity with kind ${this.kind} ${this.metadata.name}
310911
311268
 
310912
311269
  does not have any dependencies`;
311270
+ provisioner_src_logger.error(ErrorMessage);
311271
+ throw new Error(ErrorMessage);
310913
311272
  }
310914
311273
  const { kind, name, needsSecret } = ref;
310915
311274
  if (!needsSecret) {
@@ -310918,22 +311277,26 @@ class Entity {
310918
311277
  }
310919
311278
  else {
310920
311279
  if (!propertyRef) {
310921
- throw `resolveRef:
311280
+ const ErrorMessage = `resolveRef:
310922
311281
 
310923
311282
  Entity with kind ${this.kind} ${this.metadata.name}
310924
311283
 
310925
311284
  needs a propertyRef to resolve the secret`;
311285
+ provisioner_src_logger.error(ErrorMessage);
311286
+ throw new Error(ErrorMessage);
310926
311287
  }
310927
311288
  return Buffer.from(this.deps[`${kind}-${name}`].secret.data[propertyRef], 'base64').toString('utf8');
310928
311289
  }
310929
311290
  }
310930
311291
  resolveSecretRef(ref) {
310931
311292
  if (!this.deps) {
310932
- throw `resolveSecretRef:
311293
+ const ErrorMessage = `resolveSecretRef:
310933
311294
 
310934
311295
  Entity with kind ${this.kind} ${this.metadata.name}
310935
311296
 
310936
311297
  does not have any dependencies`;
311298
+ provisioner_src_logger.error(ErrorMessage);
311299
+ throw new Error(ErrorMessage);
310937
311300
  }
310938
311301
  const { name, key } = ref;
310939
311302
  return Buffer.from(this.deps[`Secret-${name}`].cr.data[key], 'base64').toString('utf8');
@@ -310941,11 +311304,13 @@ class Entity {
310941
311304
  resolveOutputs(scope) {
310942
311305
  if (this.spec.writeConnectionSecretToRef) {
310943
311306
  if (!this.mainResource) {
310944
- throw `resolveOutputs:
311307
+ const ErrorMessage = `resolveOutputs:
310945
311308
 
310946
311309
  Entity with kind ${this.kind} ${this.metadata.name}
310947
311310
 
310948
311311
  does not have a mainResource`;
311312
+ provisioner_src_logger.error(ErrorMessage);
311313
+ throw new Error(ErrorMessage);
310949
311314
  }
310950
311315
  /**
310951
311316
  * We don't currently support writing outputs to modules
@@ -310955,13 +311320,15 @@ class Entity {
310955
311320
  const keys = this.getKeysFrom(this.mainResource);
310956
311321
  const outputs = this.spec.writeConnectionSecretToRef.outputs;
310957
311322
  for (const o of outputs) {
310958
- Entity_log('OUTPUT %s', o.key);
311323
+ provisioner_src_logger.debug('OUTPUT %s', o.key);
310959
311324
  if (!keys.includes(o.key)) {
310960
- throw `resolveOutputs:
311325
+ const ErrorMessage = `resolveOutputs:
310961
311326
 
310962
311327
  Entity with kind ${this.kind} ${this.metadata.name}
310963
311328
 
310964
311329
  does not have the output ${o.key}`;
311330
+ provisioner_src_logger.error(ErrorMessage);
311331
+ throw new Error(ErrorMessage);
310965
311332
  }
310966
311333
  new cdktf_lib.TerraformOutput(scope, o.key, {
310967
311334
  value: this.mainResource.getAnyMapAttribute(this.camelToSnake(o.key)),
@@ -310994,7 +311361,6 @@ var repository_file = __nccwpck_require__(79507);
310994
311361
  ;// CONCATENATED MODULE: ../provisioner/src/entities/firestartrgithubrepository/helpers/CodeownersHelper.ts
310995
311362
 
310996
311363
 
310997
- const CodeownersHelper_messageLog = src_default()('firestartr:provisioner:entities:component:helpers:codeownerscreator');
310998
311364
  function provisionCodeowners(scope, repo, branchDefault, fsGithubRepository) {
310999
311365
  const config = {
311000
311366
  dependsOn: [repo, branchDefault],
@@ -311005,7 +311371,7 @@ function provisionCodeowners(scope, repo, branchDefault, fsGithubRepository) {
311005
311371
  overwriteOnCreate: true,
311006
311372
  repository: repo.name,
311007
311373
  };
311008
- CodeownersHelper_messageLog(`Content of the codeowners: ${fsGithubRepository.spec.repo.codeowners}`);
311374
+ provisioner_src_logger.debug(`Content of the codeowners: ${fsGithubRepository.spec.repo.codeowners}`);
311009
311375
  const tfStateKey = `_${fsGithubRepository.getTfStateKey()}-codeowners`;
311010
311376
  new repository_file/* RepositoryFile */.h(scope, tfStateKey, config);
311011
311377
  }
@@ -311018,9 +311384,8 @@ var repository_collaborator = __nccwpck_require__(33786);
311018
311384
 
311019
311385
 
311020
311386
 
311021
- const RepositoryTeamsHelper_messageLog = src_default()('firestartr:provisioner:entities:component:helpers:repositoryteamshelper');
311022
311387
  function provisionPermissions(scope, repo, fsGithubRepository) {
311023
- RepositoryTeamsHelper_messageLog(`provisionRepositoryTeams with name ${fsGithubRepository.metadata.name} in org ${fsGithubRepository.spec.org}`);
311388
+ provisioner_src_logger.info(`provisionRepositoryTeams with name ${fsGithubRepository.metadata.name} in org ${fsGithubRepository.spec.org}`);
311024
311389
  for (const permission of fsGithubRepository.spec.permissions) {
311025
311390
  if ('ref' in permission) {
311026
311391
  const tfStateKey = `_${fsGithubRepository.getTfStateKey()}-${permission.ref.kind}-${permission.ref.name}-tr`;
@@ -311066,9 +311431,8 @@ var branch_protection_v3 = __nccwpck_require__(31706);
311066
311431
 
311067
311432
 
311068
311433
 
311069
- const RepositoryHelper_messageLog = src_default()('firestartr:provisioner:entities:component:helpers:repositoryhelper');
311070
311434
  function provisionRepository(scope, fsGithubRepository) {
311071
- RepositoryHelper_messageLog(`provisionRepository with name ${fsGithubRepository.metadata.name} in org ${fsGithubRepository.spec.org}`);
311435
+ provisioner_src_logger.info(`provisionRepository with name ${fsGithubRepository.metadata.name} in org ${fsGithubRepository.spec.org}`);
311072
311436
  const config = {
311073
311437
  name: fsGithubRepository.metadata.name,
311074
311438
  description: fsGithubRepository.spec.repo.description,
@@ -311105,7 +311469,7 @@ function provisionRepository(scope, fsGithubRepository) {
311105
311469
  return repo;
311106
311470
  }
311107
311471
  function provisionBranchProtections(scope, repo, fsGithubRepository) {
311108
- RepositoryHelper_messageLog(`provisionBranchProtections with name ${fsGithubRepository.metadata.name} in org ${fsGithubRepository.spec.org}`);
311472
+ provisioner_src_logger.info(`provisionBranchProtections with name ${fsGithubRepository.metadata.name} in org ${fsGithubRepository.spec.org}`);
311109
311473
  for (const branchProtection of fsGithubRepository.spec.branchProtections) {
311110
311474
  const tfStateKey = `_${fsGithubRepository.getTfStateKey()}-${branchProtection.pattern}-bp`;
311111
311475
  const statusChecks = {
@@ -311130,8 +311494,6 @@ function provisionBranchProtections(scope, repo, fsGithubRepository) {
311130
311494
 
311131
311495
  ;// CONCATENATED MODULE: ../provisioner/src/config/config.ts
311132
311496
 
311133
-
311134
- const config_messageLog = src_default()('firestartr:provisioner:config');
311135
311497
  /**
311136
311498
  * @description Valid plans for the account
311137
311499
  * @type {Set<string>}
@@ -311238,13 +311600,12 @@ class FirestartrGithubRepository_FirestartrGithubRepository extends Entity {
311238
311600
 
311239
311601
 
311240
311602
 
311241
- const provisioner_messageLog = src_default()('firestartr:provisioner:features:provisioner');
311242
311603
  function provisionFeatureFiles(scope, feature) {
311243
- provisioner_messageLog(`Provisioning feature ${feature.spec.type} for ${feature.spec.repositoryTarget.name}`);
311244
- provisioner_messageLog('Feature output json: %O', feature);
311604
+ provisioner_src_logger.info(`Provisioning feature ${feature.spec.type} for ${feature.spec.repositoryTarget.name}`);
311605
+ provisioner_src_logger.debug('Feature output json: %O', feature);
311245
311606
  if (feature.spec.files) {
311246
311607
  for (const file of feature.spec.files) {
311247
- provisioner_messageLog('Provisioning file %O', file);
311608
+ provisioner_src_logger.debug('Provisioning file %O', file);
311248
311609
  const lifecycleArg = file.userManaged
311249
311610
  ? { ignoreChanges: ['content'] }
311250
311611
  : {};
@@ -311279,8 +311640,6 @@ class FirestartrGithubRepositoryFeature_FirestartrGithubRepositoryFeature extend
311279
311640
  var lib_membership = __nccwpck_require__(27501);
311280
311641
  ;// CONCATENATED MODULE: ../provisioner/src/entities/firestartrgithubmembership/helpers/MembershipHelper.ts
311281
311642
 
311282
-
311283
- const MembershipHelper_messageLog = src_default()('firestartr:provisioner:modules:artifacts:userartifact');
311284
311643
  function provisionMembership(scope, fsGithubMembership) {
311285
311644
  const tfStateKey = `_${fsGithubMembership.getTfStateKey()}`;
311286
311645
  const membership = new lib_membership/* Membership */.E(scope, tfStateKey, {
@@ -311295,8 +311654,6 @@ function provisionMembership(scope, fsGithubMembership) {
311295
311654
  var team_membership = __nccwpck_require__(93268);
311296
311655
  ;// CONCATENATED MODULE: ../provisioner/src/entities/firestartrgithubmembership/helpers/MembershipAllGroupHelper.ts
311297
311656
 
311298
-
311299
- const MembershipAllGroupHelper_messageLog = src_default()('firestartr:provisioner:modules:artifacts:membership:all-group-helper');
311300
311657
  function provisionAllGroupMembershipRelation(scope, fsGithubMembership) {
311301
311658
  const tfStateKey = `_${fsGithubMembership.getTfStateKey()}`;
311302
311659
  const config = {
@@ -311327,11 +311684,10 @@ class FirestartrGithubMembership_FirestartrGithubMembership extends Entity {
311327
311684
  var lib_team = __nccwpck_require__(57889);
311328
311685
  ;// CONCATENATED MODULE: ../provisioner/src/entities/firestartrgithubgroup/helpers/TeamsHelper.ts
311329
311686
 
311330
-
311331
311687
  // import { TeamConfigAux } from '../auxiliars/TeamConfigAux';
311332
- const TeamsHelper_messageLog = src_default()('firestartr:provisioner:entities:component:helpers:repositoryhelper');
311688
+
311333
311689
  function provisionGroup(scope, fsGithubGroup) {
311334
- TeamsHelper_messageLog(`provisionGroup with name ${fsGithubGroup.metadata.name} in org ${fsGithubGroup.spec.org}`);
311690
+ provisioner_src_logger.info(`provisionGroup with name ${fsGithubGroup.metadata.name} in org ${fsGithubGroup.spec.org}`);
311335
311691
  const config = {
311336
311692
  name: fsGithubGroup.metadata.name,
311337
311693
  description: fsGithubGroup.spec.description,
@@ -311349,11 +311705,10 @@ function provisionGroup(scope, fsGithubGroup) {
311349
311705
  ;// CONCATENATED MODULE: ../provisioner/src/entities/firestartrgithubgroup/helpers/TeamMembersHelper.ts
311350
311706
 
311351
311707
 
311352
- const TeamMembersHelper_messageLog = src_default()('firestartr:provisioner:entities:component:helpers:teamsmembershiphelper');
311353
311708
  function provisionMembers(scope, team, fsGithubGroup) {
311354
- TeamMembersHelper_messageLog(`provisionMembers of group ${fsGithubGroup.metadata.name} in org ${fsGithubGroup.spec.org}`);
311709
+ provisioner_src_logger.info(`provisionMembers of group ${fsGithubGroup.metadata.name} in org ${fsGithubGroup.spec.org}`);
311355
311710
  for (const member of fsGithubGroup.spec.members) {
311356
- TeamMembersHelper_messageLog(`Provisioning user ${member.ref.name} for group ${fsGithubGroup.metadata.name}`);
311711
+ provisioner_src_logger.info(`Provisioning user ${member.ref.name} for group ${fsGithubGroup.metadata.name}`);
311357
311712
  const tfStateKey = `_${fsGithubGroup.getTfStateKey()}-${member.ref.kind}-${member.ref.name}-tr`;
311358
311713
  if (member.ref.kind === 'FirestartrGithubMembership') {
311359
311714
  const username = fsGithubGroup.resolveRef(member.ref);
@@ -311388,8 +311743,6 @@ class FirestartrGithubGroup_FirestartrGithubGroup extends Entity {
311388
311743
  var organization_webhook = __nccwpck_require__(80516);
311389
311744
  ;// CONCATENATED MODULE: ../provisioner/src/entities/firestartrgithuborgwebhook/helpers/OrgWebhookHelper.ts
311390
311745
 
311391
-
311392
- const OrgWebhookHelper_messageLog = src_default()('firestartr:provisioner:modules:artifacts:orgwebhook');
311393
311746
  function provisionOrgWebhook(scope, fsGithubOrgWebhook) {
311394
311747
  const tfStateKey = `_${fsGithubOrgWebhook.getTfStateKey()}`;
311395
311748
  const webhookConfig = {
@@ -311655,7 +312008,6 @@ var FirestartrTerraformProvider;
311655
312008
 
311656
312009
 
311657
312010
 
311658
- const GithubStack_messageLog = src_default()('firestartr:provisioner:stacks:githubstack');
311659
312011
  class GithubStack extends BaseStack {
311660
312012
  async provisionEntity(isImport, entity, deps, tfStatePath, orgConfig) {
311661
312013
  try {
@@ -311671,7 +312023,7 @@ class GithubStack extends BaseStack {
311671
312023
  }
311672
312024
  }
311673
312025
  catch (err) {
311674
- GithubStack_messageLog('Error: provisionEntity: %s', err);
312026
+ provisioner_src_logger.error('Error: provisionEntity: %s', err);
311675
312027
  throw err;
311676
312028
  }
311677
312029
  }
@@ -311692,7 +312044,6 @@ class GithubStack extends BaseStack {
311692
312044
 
311693
312045
 
311694
312046
 
311695
- const TerraformModuleStack_messageLog = src_default()('firestartr:provisioner:stacks:terraformmodulestack');
311696
312047
  class TerraformModuleStack extends BaseStack {
311697
312048
  async provisionEntity(isImport, entity, deps, tfStatePath, orgConfig) {
311698
312049
  try {
@@ -311705,7 +312056,7 @@ class TerraformModuleStack extends BaseStack {
311705
312056
  }
311706
312057
  }
311707
312058
  catch (err) {
311708
- TerraformModuleStack_messageLog('Error: provisionEntity: %s', err);
312059
+ provisioner_src_logger.error('Error: provisionEntity: %s', err);
311709
312060
  throw err;
311710
312061
  }
311711
312062
  }
@@ -311761,9 +312112,10 @@ function __calculateTFStatePath(entity) {
311761
312112
  ;// CONCATENATED MODULE: ../provisioner/src/cdktf.ts
311762
312113
 
311763
312114
 
311764
- async function runCDKTF(entityPath, action, depsPath) {
312115
+ async function runCDKTF(entityPath, action, depsPath, stream) {
311765
312116
  return new Promise((ok, ko) => {
311766
312117
  const cdktfProcess = (0,external_child_process_.spawn)('cdktf', [action, '--log-level', 'DEBUG', '--auto-approve'], {
312118
+ stdio: ['inherit', 'pipe', 'pipe'],
311767
312119
  cwd: process.env.IS_DEV_LOCAL_ENVIRONMENT
311768
312120
  ? '/library/packages/provisioner'
311769
312121
  : '/library/provisioner',
@@ -311791,10 +312143,14 @@ async function runCDKTF(entityPath, action, depsPath) {
311791
312143
  const logparsed = log.toString();
311792
312144
  if (!logparsed.includes('Synthesizing')) {
311793
312145
  output += catalog_common.io.stripAnsi(logparsed);
312146
+ if (stream)
312147
+ stream.write(catalog_common.io.stripAnsi(logparsed));
311794
312148
  }
311795
312149
  });
311796
312150
  cdktfProcess.stderr.on('data', (log) => {
311797
312151
  output += catalog_common.io.stripAnsi(log.toString());
312152
+ if (stream)
312153
+ stream.write(catalog_common.io.stripAnsi(log.toString()));
311798
312154
  });
311799
312155
  cdktfProcess.on('exit', async (code) => {
311800
312156
  if (code !== 0) {
@@ -311812,7 +312168,6 @@ async function runCDKTF(entityPath, action, depsPath) {
311812
312168
 
311813
312169
 
311814
312170
 
311815
- const installer_messageLog = src_default()('firestartr:provisioner:features:installer');
311816
312171
  async function installer_installFeaturesForComponent(component, store) {
311817
312172
  const componentFeatures = component.spec?.provisioner?.features || '[]';
311818
312173
  const componentFeaturesToInstall = componentFeatures.filter((feature) => {
@@ -311821,7 +312176,7 @@ async function installer_installFeaturesForComponent(component, store) {
311821
312176
  });
311822
312177
  if (componentFeaturesToInstall.length > 0) {
311823
312178
  for (const feature of componentFeaturesToInstall) {
311824
- installer_messageLog('Installing feature %s for component %s', feature.name, component.metadata.name);
312179
+ log.info('Installing feature %s for component %s', feature.name, component.metadata.name);
311825
312180
  // Get feature config
311826
312181
  const featureConfig = common.features.features.getFeatureRenderedConfigForComponent(component, feature.name);
311827
312182
  // prepare files
@@ -311834,7 +312189,7 @@ async function installer_installFeaturesForComponent(component, store) {
311834
312189
  }
311835
312190
  }
311836
312191
  else {
311837
- installer_messageLog(`No features to install for component ${component.metadata.name}`);
312192
+ log.error(`No features to install for component ${component.metadata.name}`);
311838
312193
  }
311839
312194
  return store;
311840
312195
  }
@@ -311861,7 +312216,7 @@ async function getFileContentFromGithubIfExists(path, repositoryName, owner) {
311861
312216
  }
311862
312217
  catch (e) {
311863
312218
  if (e.status === 404) {
311864
- installer_messageLog(`File ${path} not found in ${repositoryName}`);
312219
+ log.debug(`File ${path} not found in ${repositoryName}`);
311865
312220
  return false;
311866
312221
  }
311867
312222
  throw e;
@@ -311886,7 +312241,6 @@ function isFreshInstallation(featureName, component) {
311886
312241
 
311887
312242
 
311888
312243
 
311889
- const preparer_messageLog = src_default()('firestartr:provisioner:features:installer');
311890
312244
  async function preparer_prepareFeaturesForComponent(component, store) {
311891
312245
  // those are the features to maintain
311892
312246
  let componentFeatures = component.spec?.provisioner?.features || [];
@@ -311901,7 +312255,7 @@ async function preparer_prepareFeaturesForComponent(component, store) {
311901
312255
  if (componentFeatures.length > 0) {
311902
312256
  const entityPath = dumpArtifactYaml(component);
311903
312257
  for (const feature of componentFeatures) {
311904
- preparer_messageLog('Installing feature %s for component %s', feature.name, component.metadata.name);
312258
+ log.info('Installing feature %s for component %s', feature.name, component.metadata.name);
311905
312259
  await featuresPreparer.getFeatureConfig(feature.name, feature.version, entityPath);
311906
312260
  // Get feature config
311907
312261
  const featureConfig = common.features.features.getFeatureRenderedConfigForComponent(component, feature.name);
@@ -311925,17 +312279,17 @@ const external_node_readline_namespaceObject = __WEBPACK_EXTERNAL_createRequire(
311925
312279
 
311926
312280
 
311927
312281
 
311928
- const terraform_messageLog = src_default()('firestartr:provisioner:terraform');
311929
- async function runTerraform(entity, command) {
312282
+ async function runTerraform(entity, command, stream) {
311930
312283
  let entityID = `${entity.kind.toLowerCase()}--${entity['spec']['firestartr']['tfStateKey']}`;
311931
312284
  if (entity.kind === 'FirestartrGithubRepositoryFeature')
311932
312285
  entityID = `${entity.kind.toLowerCase()}--${entity.metadata.name}`;
311933
312286
  const workDir = external_path_.join(process.env.IS_DEV_LOCAL_ENVIRONMENT
311934
312287
  ? '/library/packages/provisioner'
311935
312288
  : '/library/provisioner', 'cdktf.out', 'stacks', entityID);
311936
- terraform_messageLog(`Running terraform with command ${command} in ${workDir}`);
312289
+ provisioner_src_logger.info(`Running terraform with command ${command} in ${workDir}`);
311937
312290
  return new Promise((ok, ko) => {
311938
312291
  const terraformProcess = (0,external_child_process_.spawn)('terraform', [...command], {
312292
+ stdio: ['inherit', 'pipe', 'pipe'],
311939
312293
  cwd: workDir,
311940
312294
  env: {
311941
312295
  PATH: process.env.PATH,
@@ -311953,17 +312307,18 @@ async function runTerraform(entity, command) {
311953
312307
  terraformProcess.stdout.on('data', (log) => {
311954
312308
  const line = catalog_common.io.stripAnsi(log.toString());
311955
312309
  output += line;
311956
- console.log(line);
312310
+ if (stream)
312311
+ stream.write(line);
311957
312312
  });
311958
312313
  terraformProcess.stderr.on('data', (log) => {
311959
312314
  const line = catalog_common.io.stripAnsi(log.toString());
311960
312315
  output += line;
311961
- console.log(line);
312316
+ if (stream)
312317
+ stream.write(line);
311962
312318
  });
311963
312319
  terraformProcess.on('exit', async (code) => {
311964
312320
  console.log(`child process exited with code ${code}`);
311965
312321
  if (code !== 0) {
311966
- console.log(output);
311967
312322
  ko(output);
311968
312323
  }
311969
312324
  else {
@@ -311972,13 +312327,13 @@ async function runTerraform(entity, command) {
311972
312327
  });
311973
312328
  });
311974
312329
  }
311975
- function terraformInit(entity) {
311976
- return runTerraform(entity, ['init', '-no-color']);
312330
+ function terraformInit(entity, stream) {
312331
+ return runTerraform(entity, ['init', '-no-color'], stream);
311977
312332
  }
311978
- function terraformPlan(entity) {
311979
- return runTerraform(entity, ['plan', '-no-color']);
312333
+ function terraformPlan(entity, stream) {
312334
+ return runTerraform(entity, ['plan', '-no-color'], stream);
311980
312335
  }
311981
- async function terraformApply(entity, isImport = false, skipPlan = false) {
312336
+ async function terraformApply(entity, isImport = false, skipPlan = false, stream) {
311982
312337
  let line = false;
311983
312338
  if (isImport && !skipPlan) {
311984
312339
  console.log(`
@@ -311997,15 +312352,15 @@ Type 'yes' to continue:`);
311997
312352
  });
311998
312353
  }
311999
312354
  if (line === 'yes' || skipPlan) {
312000
- return runTerraform(entity, ['apply', '-no-color', '-auto-approve']);
312355
+ return runTerraform(entity, ['apply', '-no-color', '-auto-approve'], stream);
312001
312356
  }
312002
312357
  else {
312003
312358
  console.log(`🚀 Skipping apply for entity ${entity.kind} ${entity.metadata.name}`);
312004
312359
  return Promise.resolve('');
312005
312360
  }
312006
312361
  }
312007
- function terraformDestroy(entity) {
312008
- return runTerraform(entity, ['destroy', '-no-color', '-auto-approve']);
312362
+ function terraformDestroy(entity, stream) {
312363
+ return runTerraform(entity, ['destroy', '-no-color', '-auto-approve'], stream);
312009
312364
  }
312010
312365
 
312011
312366
  ;// CONCATENATED MODULE: ../provisioner/src/features/uninstaller.ts
@@ -312014,12 +312369,11 @@ function terraformDestroy(entity) {
312014
312369
 
312015
312370
 
312016
312371
 
312017
- const uninstaller_messageLog = src_default()('firestartr:provisioner:features:uninstaller');
312018
312372
  async function untrackManagedFiles(feature, deps) {
312019
312373
  if (!feature.spec.files || feature.spec.files.length < 1)
312020
312374
  return;
312021
- uninstaller_messageLog('Removing managed files from the Terraform State');
312022
- uninstaller_messageLog('Synthing the project...');
312375
+ provisioner_src_logger.debug('Removing managed files from the Terraform State');
312376
+ provisioner_src_logger.debug('Synthing the project...');
312023
312377
  const randomFilenameFeature = `${catalog_common.generic.randomString(20)}.yaml`;
312024
312378
  const randomFilenameDeps = `${catalog_common.generic.randomString(20)}_deps.yaml`;
312025
312379
  catalog_common.io.writeYamlFile(randomFilenameFeature, feature, '/tmp');
@@ -312027,7 +312381,7 @@ async function untrackManagedFiles(feature, deps) {
312027
312381
  await runCDKTF(external_path_.join('/tmp', randomFilenameFeature), 'synth', external_path_.join('/tmp', randomFilenameDeps));
312028
312382
  await runTerraform(feature, ['init']);
312029
312383
  for (const file of feature.spec.files.filter((file) => file.userManaged === true)) {
312030
- uninstaller_messageLog(`Removing from the state file ${file.path}`);
312384
+ provisioner_src_logger.debug(`Removing from the state file ${file.path}`);
312031
312385
  // Terraform replaces / with -- and . with - in the state file names, so we do the same to get the state file name
312032
312386
  const stateFileName = `${feature.spec.type}-${file.path}`
312033
312387
  .replace(/\//g, '--')
@@ -312100,14 +312454,46 @@ function getNextStatus(status) {
312100
312454
 
312101
312455
 
312102
312456
 
312457
+
312458
+
312103
312459
  class Resource {
312104
312460
  setLogger(fn) {
312105
312461
  this.logFn = fn;
312106
312462
  }
312463
+ setSynthStreamLogs(callbacks) {
312464
+ this.synthStreamCallbacks = callbacks;
312465
+ }
312466
+ setTFStreamLogs(callbacks) {
312467
+ this.tfStreamCallbacks = callbacks;
312468
+ }
312469
+ async onSyncStreaming() {
312470
+ if (!this.logStream) {
312471
+ this.logStream = new external_stream_.PassThrough();
312472
+ }
312473
+ if (this.synthStreamCallbacks) {
312474
+ const callbacks = await this.synthStreamCallbacks.prepare();
312475
+ this.setLogStream(callbacks.fnData, callbacks.fnEnd);
312476
+ }
312477
+ }
312478
+ async onTFStreaming() {
312479
+ if (!this.logStream) {
312480
+ this.logStream = new external_stream_.PassThrough();
312481
+ }
312482
+ if (this.tfStreamCallbacks) {
312483
+ const callbacks = await this.tfStreamCallbacks.prepare();
312484
+ this.setLogStream(callbacks.fnData, callbacks.fnEnd);
312485
+ }
312486
+ }
312487
+ setLogStream(fnData, fnEnd, reopen = true) {
312488
+ if (reopen || !this.logStream)
312489
+ this.logStream = new external_stream_.PassThrough();
312490
+ this.logStream.on('data', (data) => fnData(data.toString()));
312491
+ this.logStream.on('end', () => fnEnd());
312492
+ }
312107
312493
  constructor(mainCR, operation, deps = []) {
312108
312494
  this.data = {};
312109
312495
  this.output = '';
312110
- this.logFn = (msg) => console.log(msg);
312496
+ this.logFn = (msg) => provisioner_src_logger.debug(msg);
312111
312497
  this.set('main_artifact', mainCR);
312112
312498
  this.set('operation', operation);
312113
312499
  this.set('deps', deps);
@@ -312117,36 +312503,46 @@ class Resource {
312117
312503
  await this.synth();
312118
312504
  await this.runTerraform();
312119
312505
  await this.postprocess();
312506
+ if (this.logStream) {
312507
+ this.logStream.end();
312508
+ this.logStream = null;
312509
+ }
312120
312510
  }
312121
312511
  artifact() {
312122
312512
  return this.get('main_artifact');
312123
312513
  }
312124
312514
  async synth() {
312515
+ await this.onSyncStreaming();
312125
312516
  const randomFilenameArtifact = `${catalog_common.generic.randomString(20)}.yaml`;
312126
312517
  const randomFilenameDeps = `${catalog_common.generic.randomString(20)}_deps.yaml`;
312127
312518
  catalog_common.io.writeYamlFile(randomFilenameArtifact, this.get('main_artifact'), '/tmp');
312128
312519
  catalog_common.io.writeYamlFile(randomFilenameDeps, this.get('deps'), '/tmp');
312129
- await runCDKTF(external_path_.join('/tmp', randomFilenameArtifact), 'synth', external_path_.join('/tmp', randomFilenameDeps));
312520
+ await runCDKTF(external_path_.join('/tmp', randomFilenameArtifact), 'synth', external_path_.join('/tmp', randomFilenameDeps), this.logStream);
312521
+ if (this.logStream) {
312522
+ this.logStream.end();
312523
+ this.logStream = null;
312524
+ }
312130
312525
  }
312131
312526
  log(msg) {
312132
312527
  this.logFn(msg);
312133
312528
  }
312134
312529
  async runTerraform() {
312530
+ await this.onTFStreaming();
312135
312531
  let output = '';
312136
- output += await terraformInit(this.get('main_artifact'));
312137
- output += await terraformPlan(this.get('main_artifact'));
312532
+ output += await terraformInit(this.get('main_artifact'), this.logStream);
312533
+ output += await terraformPlan(this.get('main_artifact'), this.logStream);
312138
312534
  if (this.get('operation') === 'CREATE' ||
312139
312535
  this.get('operation') === 'UPDATE') {
312140
- output += await terraformApply(this.get('main_artifact'), false, true);
312536
+ output += await terraformApply(this.get('main_artifact'), false, true, this.logStream);
312141
312537
  }
312142
312538
  else if (this.get('operation') === 'DELETE') {
312143
- output += await terraformDestroy(this.get('main_artifact'));
312539
+ output += await terraformDestroy(this.get('main_artifact'), this.logStream);
312144
312540
  }
312145
312541
  else if (this.get('operation') === 'IMPORT') {
312146
- output += await terraformApply(this.get('main_artifact'), true, false);
312542
+ output += await terraformApply(this.get('main_artifact'), true, false, this.logStream);
312147
312543
  }
312148
312544
  else if (this.get('operation') === 'IMPORT_SKIP_PLAN') {
312149
- output += await terraformApply(this.get('main_artifact'), true, true);
312545
+ output += await terraformApply(this.get('main_artifact'), true, true, this.logStream);
312150
312546
  }
312151
312547
  else {
312152
312548
  throw new Error(`unknown operation: ${this.get('operation')}`);
@@ -312171,7 +312567,6 @@ class Resource {
312171
312567
 
312172
312568
 
312173
312569
 
312174
- const github_feature_log = src_default()('firestartr:provisioner:github_repository_feature');
312175
312570
  class github_feature_FirestartrGithubRepositoryFeature extends Resource {
312176
312571
  static kind() {
312177
312572
  return 'FirestartrGithubRepositoryFeature';
@@ -312179,19 +312574,19 @@ class github_feature_FirestartrGithubRepositoryFeature extends Resource {
312179
312574
  async preprocess() {
312180
312575
  switch (this.get('operation')) {
312181
312576
  case 'CREATE':
312182
- github_feature_log('CREATED');
312577
+ provisioner_src_logger.debug('Creating FirestartrGithubRepositoryFeature');
312183
312578
  await this._updateManagedFiles();
312184
312579
  break;
312185
312580
  case 'UPDATE':
312186
- github_feature_log('UPDATED');
312581
+ provisioner_src_logger.debug('Updating FirestartrGithubRepositoryFeature');
312187
312582
  await this._updateManagedFiles();
312188
312583
  break;
312189
312584
  case 'DELETE':
312190
- github_feature_log('DELETE');
312585
+ provisioner_src_logger.debug('Deleting FirestartrGithubRepositoryFeature');
312191
312586
  await untrackManagedFiles(this.get('main_artifact'), this.get('deps'));
312192
312587
  break;
312193
312588
  default:
312194
- github_feature_log(`UNKNOWN: ${this.get('operation')}`);
312589
+ provisioner_src_logger.debug(`Unknown operation '${this.get('operation')}' for FirestartrGithubRepositoryFeature`);
312195
312590
  }
312196
312591
  }
312197
312592
  async _updateManagedFiles() {
@@ -312205,7 +312600,7 @@ class github_feature_FirestartrGithubRepositoryFeature extends Resource {
312205
312600
  file.content = newContent;
312206
312601
  }
312207
312602
  catch (e) {
312208
- github_feature_log(`File ${file.path} not found in repo ${repoInfo.ref.name} on branch ${repoInfo.branch}. File content not updated`);
312603
+ provisioner_src_logger.error(`File ${file.path} not found in repo ${repoInfo.ref.name} on branch ${repoInfo.branch}. File content not updated`);
312209
312604
  }
312210
312605
  }
312211
312606
  }
@@ -312285,7 +312680,6 @@ async function provisionRegularBranch(repo, branchName, sourceBranch, org) {
312285
312680
 
312286
312681
 
312287
312682
 
312288
- const github_repository_log = src_default()('firestartr:provisioner:github_repository');
312289
312683
  class github_repository_FirestartrGithubRepository extends Resource {
312290
312684
  static kind() {
312291
312685
  return 'FirestartrGithubRepository';
@@ -312293,22 +312687,23 @@ class github_repository_FirestartrGithubRepository extends Resource {
312293
312687
  async preprocess() {
312294
312688
  switch (this.get('operation')) {
312295
312689
  case 'CREATE':
312296
- github_repository_log('CREATE');
312690
+ provisioner_src_logger.debug('Creating FirestartrGithubRepository');
312297
312691
  break;
312298
312692
  case 'UPDATE':
312299
- github_repository_log('UPDATED');
312693
+ provisioner_src_logger.debug('Updating FirestartrGithubRepository');
312300
312694
  break;
312301
312695
  case 'DELETE':
312302
- github_repository_log('DELETED');
312696
+ provisioner_src_logger.debug('Deleted FirestartrGithubRepository');
312303
312697
  break;
312304
312698
  case 'IMPORT':
312305
- github_repository_log('IMPORT');
312699
+ provisioner_src_logger.debug('Importing FirestartrGithubRepository');
312306
312700
  break;
312307
312701
  case 'IMPORT_SKIP_PLAN':
312308
- github_repository_log('IMPORT_SKIP_PLAN');
312702
+ provisioner_src_logger.debug('Import skipping plan for FirestartrGithubRepository');
312309
312703
  break;
312310
312704
  default:
312311
- github_repository_log('UNKNOWN');
312705
+ provisioner_src_logger.debug('Unknown operation for FirestartrGithubRepository ' +
312706
+ this.get('operation'));
312312
312707
  }
312313
312708
  }
312314
312709
  async postprocess() {
@@ -312316,20 +312711,20 @@ class github_repository_FirestartrGithubRepository extends Resource {
312316
312711
  switch (this.get('operation')) {
312317
312712
  case 'CREATE':
312318
312713
  case 'UPDATE':
312319
- github_repository_log('CREATE & UPDATE');
312714
+ provisioner_src_logger.debug(`Created and updated FirestartrGithubRepository ${cr.metadata.name}`);
312320
312715
  await provisionAdditionalBranches(cr);
312321
312716
  break;
312322
312717
  case 'DELETE':
312323
- github_repository_log('DELETED');
312718
+ provisioner_src_logger.debug(`Deleted FirestartrGithubRepository ${cr.metadata.name}`);
312324
312719
  break;
312325
312720
  case 'IMPORT':
312326
- github_repository_log('IMPORT');
312721
+ provisioner_src_logger.debug(`Imported FirestartrGithubRepository ${cr.metadata.name}`);
312327
312722
  break;
312328
312723
  case 'IMPORT_SKIP_PLAN':
312329
- github_repository_log('IMPORT_SKIP_PLAN');
312724
+ provisioner_src_logger.debug(`Imported skipped plan FirestartrGithubRepository ${cr.metadata.name}`);
312330
312725
  break;
312331
312726
  default:
312332
- github_repository_log('UNKNOWN');
312727
+ provisioner_src_logger.debug(`Finished for unknown operation ${this.get('operation')} for FirestartrGithubRepository`);
312333
312728
  }
312334
312729
  }
312335
312730
  }
@@ -312337,7 +312732,6 @@ class github_repository_FirestartrGithubRepository extends Resource {
312337
312732
  ;// CONCATENATED MODULE: ../provisioner/src/resources/github_membership/index.ts
312338
312733
 
312339
312734
 
312340
- const github_membership_log = src_default()('firestartr:provisioner:github_membership');
312341
312735
  class github_membership_FirestartrGithubMembership extends Resource {
312342
312736
  static kind() {
312343
312737
  return 'FirestartrGithubMembership';
@@ -312345,22 +312739,23 @@ class github_membership_FirestartrGithubMembership extends Resource {
312345
312739
  async preprocess() {
312346
312740
  switch (this.get('operation')) {
312347
312741
  case 'CREATE':
312348
- github_membership_log('CREATE');
312742
+ provisioner_src_logger.debug('Creating FirestartrGithubMembership');
312349
312743
  break;
312350
312744
  case 'UPDATE':
312351
- github_membership_log('UPDATED');
312745
+ provisioner_src_logger.debug('Updating FirestartrGithubMembership');
312352
312746
  break;
312353
312747
  case 'DELETE':
312354
- github_membership_log('DELETED');
312748
+ provisioner_src_logger.debug('Deleted FirestartrGithubMembership');
312355
312749
  break;
312356
312750
  case 'IMPORT':
312357
- github_membership_log('IMPORT');
312751
+ provisioner_src_logger.debug('Importing FirestartrGithubMembership');
312358
312752
  break;
312359
312753
  case 'IMPORT_SKIP_PLAN':
312360
- github_membership_log('IMPORT_SKIP_PLAN');
312754
+ provisioner_src_logger.debug('Import skipping plan for FirestartrGithubMembership');
312361
312755
  break;
312362
312756
  default:
312363
- github_membership_log('UNKNOWN');
312757
+ provisioner_src_logger.debug('Unknown operation for FirestartrGithubMembership ' +
312758
+ this.get('operation'));
312364
312759
  }
312365
312760
  }
312366
312761
  }
@@ -312368,7 +312763,6 @@ class github_membership_FirestartrGithubMembership extends Resource {
312368
312763
  ;// CONCATENATED MODULE: ../provisioner/src/resources/github_group/index.ts
312369
312764
 
312370
312765
 
312371
- const github_group_log = src_default()('firestartr:provisioner:github_group');
312372
312766
  class github_group_FirestartrGithubGroup extends Resource {
312373
312767
  static kind() {
312374
312768
  return 'FirestartrGithubGroup';
@@ -312376,22 +312770,23 @@ class github_group_FirestartrGithubGroup extends Resource {
312376
312770
  async preprocess() {
312377
312771
  switch (this.get('operation')) {
312378
312772
  case 'CREATE':
312379
- github_group_log('CREATE');
312773
+ provisioner_src_logger.debug('Creating FirestartrGithubGroup');
312380
312774
  break;
312381
312775
  case 'UPDATE':
312382
- github_group_log('UPDATED');
312776
+ provisioner_src_logger.debug('Updating FirestartrGithubGroup');
312383
312777
  break;
312384
312778
  case 'DELETE':
312385
- github_group_log('DELETED');
312779
+ provisioner_src_logger.debug('Deleted FirestartrGithubGroup');
312386
312780
  break;
312387
312781
  case 'IMPORT':
312388
- github_group_log('IMPORT');
312782
+ provisioner_src_logger.debug('Importing FirestartrGithubGroup');
312389
312783
  break;
312390
312784
  case 'IMPORT_SKIP_PLAN':
312391
- github_group_log('IMPORT_SKIP_PLAN');
312785
+ provisioner_src_logger.debug('Import skipping plan for FirestartrGithubGroup');
312392
312786
  break;
312393
312787
  default:
312394
- github_group_log('UNKNOWN');
312788
+ provisioner_src_logger.debug('Unknown operation for FirestartrGithubGroup ' +
312789
+ this.get('operation'));
312395
312790
  }
312396
312791
  }
312397
312792
  }
@@ -312399,14 +312794,13 @@ class github_group_FirestartrGithubGroup extends Resource {
312399
312794
  ;// CONCATENATED MODULE: ../provisioner/src/resources/terraform_module/index.ts
312400
312795
 
312401
312796
 
312402
- const terraform_module_log = src_default()('firestartr:provisioner:terraform_module');
312403
312797
  class FirestartrTerraformModule extends Resource {
312404
312798
  static kind() {
312405
312799
  return 'FirestartrTerraformModule';
312406
312800
  }
312407
312801
  async preprocess() {
312408
312802
  const operation = this.get('operation');
312409
- terraform_module_log(operation);
312803
+ provisioner_src_logger.debug(`Running operation '${operation}' for FirestartrTerraformModule`);
312410
312804
  switch (operation) {
312411
312805
  case 'CREATE':
312412
312806
  break;
@@ -312427,7 +312821,6 @@ class FirestartrTerraformModule extends Resource {
312427
312821
  ;// CONCATENATED MODULE: ../provisioner/src/resources/github_orgWebhook/index.ts
312428
312822
 
312429
312823
 
312430
- const github_orgWebhook_log = src_default()('firestartr:provisioner:github_orgWebhook');
312431
312824
  class github_orgWebhook_FirestartrGithubOrgWebhook extends Resource {
312432
312825
  static kind() {
312433
312826
  return 'FirestartrGithubOrgWebhook';
@@ -312435,22 +312828,23 @@ class github_orgWebhook_FirestartrGithubOrgWebhook extends Resource {
312435
312828
  async preprocess() {
312436
312829
  switch (this.get('operation')) {
312437
312830
  case 'CREATE':
312438
- github_orgWebhook_log('CREATE');
312831
+ provisioner_src_logger.debug('Creating FirestartrGithubOrgWebhook');
312439
312832
  break;
312440
312833
  case 'UPDATE':
312441
- github_orgWebhook_log('UPDATED');
312834
+ provisioner_src_logger.debug('Updating FirestartrGithubOrgWebhook');
312442
312835
  break;
312443
312836
  case 'DELETE':
312444
- github_orgWebhook_log('DELETED');
312837
+ provisioner_src_logger.debug('Deleted FirestartrGithubOrgWebhook');
312445
312838
  break;
312446
312839
  case 'IMPORT':
312447
- github_orgWebhook_log('IMPORT');
312840
+ provisioner_src_logger.debug('Importing FirestartrGithubOrgWebhook');
312448
312841
  break;
312449
312842
  case 'IMPORT_SKIP_PLAN':
312450
- github_orgWebhook_log('IMPORT_SKIP_PLAN');
312843
+ provisioner_src_logger.debug('Import skipping plan for FirestartrGithubOrgWebhook');
312451
312844
  break;
312452
312845
  default:
312453
- github_orgWebhook_log('UNKNOWN');
312846
+ provisioner_src_logger.debug('Unknown operation for FirestartrGithubOrgWebhook ' +
312847
+ this.get('operation'));
312454
312848
  }
312455
312849
  }
312456
312850
  }
@@ -312487,6 +312881,12 @@ async function runProvisioner(data, opts) {
312487
312881
  ? 'DELETE'
312488
312882
  : 'UNKNOWN';
312489
312883
  const resource = createInstanceOf(mainCr, operation, deps);
312884
+ if ('logStreamCallbacksCDKTF' in opts) {
312885
+ resource.setSynthStreamLogs(opts['logStreamCallbacksCDKTF']);
312886
+ }
312887
+ if ('logStreamCallbacksTF' in opts) {
312888
+ resource.setTFStreamLogs(opts['logStreamCallbacksTF']);
312889
+ }
312490
312890
  await resource.run();
312491
312891
  return resource;
312492
312892
  }
@@ -312505,7 +312905,6 @@ function createInstanceOf(entity, op, deps) {
312505
312905
 
312506
312906
 
312507
312907
 
312508
- const provisioner_messageLog_0 = src_default()('firestartr:provisioner:main');
312509
312908
  async function deploy(app) {
312510
312909
  const entity = catalog_common.io.fromYaml(external_fs_.readFileSync(catalog_common.environment.getFromEnvironment(catalog_common.types.envVars.cdktfEntityPath), 'utf8'));
312511
312910
  const deps = catalog_common.io.fromYaml(external_fs_.readFileSync(catalog_common.environment.getFromEnvironment(catalog_common.types.envVars.cdktfDepsPath), 'utf8'));
@@ -312514,7 +312913,7 @@ async function deploy(app) {
312514
312913
  : false;
312515
312914
  catalog_common.io.removeFile(catalog_common.environment.getFromEnvironment(catalog_common.types.envVars.cdktfEntityPath));
312516
312915
  catalog_common.io.removeFile(catalog_common.environment.getFromEnvironment(catalog_common.types.envVars.cdktfDepsPath));
312517
- provisioner_messageLog_0('Entity to provision: %O', entity);
312916
+ provisioner_src_logger.info(`Entity to provision: ${entity}`);
312518
312917
  const orgConfig = {
312519
312918
  bucket: catalog_common.environment.getFromEnvironment(catalog_common.types.envVars.s3Bucket),
312520
312919
  dynamodbTable: catalog_common.environment.getFromEnvironment(catalog_common.types.envVars.s3Lock),
@@ -312532,7 +312931,7 @@ async function deploy(app) {
312532
312931
  app.synth();
312533
312932
  }
312534
312933
  catch (e) {
312535
- void provisioner_messageLog_0('Error: deploy: %s', e);
312934
+ provisioner_src_logger.error('Error: deploy: %s', e);
312536
312935
  throw e;
312537
312936
  }
312538
312937
  }
@@ -312554,26 +312953,20 @@ if (process.env.RUN_PROVISIONER) {
312554
312953
  async function tryPublishApply(item, planOutput, kind) {
312555
312954
  try {
312556
312955
  if (!('firestartr.dev/last-state-pr' in item.metadata.annotations)) {
312557
- src_logger.debug('USER_FEEDBACK_PUBLISH_APPLY_NO_LAST_STATE', {
312558
- metadata: { name: item.metadata.name, kind },
312559
- });
312956
+ operator_src_logger.debug(`The user feedback for the '${kind}/${item.metadata.name}' apply operation could not be published because the last state was not found.`);
312560
312957
  return;
312561
312958
  }
312562
312959
  await publishApply(item, planOutput, kind);
312563
312960
  }
312564
312961
  catch (e) {
312565
- src_logger.error('USER_FEEDBACK_PUBLISH_APPLY_ERROR', {
312566
- metadata: { name: item.metadata.name, kind, error: e },
312567
- });
312962
+ operator_src_logger.error(`The user feedback for the '${kind}/${item.metadata.name}' apply operation failed to publish due to an error: '${e}'.`);
312568
312963
  }
312569
312964
  }
312570
312965
  async function tryPublishDestroy(item, destroyOutput) {
312571
312966
  let lastPr = null;
312572
312967
  try {
312573
312968
  const { repo, org } = extractPrInfo(item);
312574
- src_logger.debug('USER_FEEDBACK_PUBLISH_DESTROY', {
312575
- metadata: { item, repo, org },
312576
- });
312969
+ operator_src_logger.debug(`The user feedback for the '${item.kind}/${item.metadata.name}' destroy operation is being published for repository '${repo}' in organization '${org}'.`);
312577
312970
  lastPr = await github_0.pulls.filterPrBy({
312578
312971
  title: `hydrate: ${item.metadata.name}`,
312579
312972
  state: 'closed',
@@ -312584,9 +312977,7 @@ async function tryPublishDestroy(item, destroyOutput) {
312584
312977
  maxRetries: 3,
312585
312978
  });
312586
312979
  if (!lastPr) {
312587
- src_logger.debug('USER_FEEDBACK_PUBLISH_DESTROY_NO_LAST_STATE', {
312588
- metadata: { item },
312589
- });
312980
+ operator_src_logger.debug(`The user feedback for the '${item.kind}/${item.metadata.name}' destroy operation could not be published because the last state was not found.`);
312590
312981
  return;
312591
312982
  }
312592
312983
  const dividedOutput = github_0.pulls.divideCommentIntoChunks(destroyOutput, 250);
@@ -312604,20 +312995,14 @@ async function tryPublishDestroy(item, destroyOutput) {
312604
312995
  ${commentContent}
312605
312996
  \`\`\`
312606
312997
  </details>`;
312607
- src_logger.debug('USER_FEEDBACK_PUBLISH_COMMENT', {
312608
- metadata: { lastPr: lastPr.number, repo, org, item },
312609
- });
312998
+ operator_src_logger.debug(`The user feedback for item '${item.kind}/${item.metadata.name}' is being published as a comment on pull request '${lastPr.number}' for repository '${repo}' in organization '${org}'.`);
312610
312999
  await github_0.pulls.commentInPR(comment, lastPr.number, repo, org);
312611
- src_logger.debug('USER_FEEDBACK_PUBLISH_DESTROY_COMMENT', {
312612
- metadata: { lastPr: lastPr.number, item },
312613
- });
313000
+ operator_src_logger.debug(`The user feedback for the '${item.kind}/${item.metadata.name}' destroy operation is being published as a comment on pull request '${lastPr.number}'.`);
312614
313001
  currentCommentNo += 1;
312615
313002
  }
312616
313003
  }
312617
313004
  catch (e) {
312618
- src_logger.error('USER_FEEDBACK_PUBLISH_ERROR', {
312619
- metadata: { lastPr: lastPr.number, item, error: e },
312620
- });
313005
+ operator_src_logger.error(`An error occurred while publishing user feedback for item '${item.kind}/${item.metadata.name}' on pull request '${lastPr.number}': '${e}'.`);
312621
313006
  }
312622
313007
  }
312623
313008
  async function publishApply(item, applyOutput, kind) {
@@ -312654,9 +313039,7 @@ function tryCreateErrorSummary(title, errorMsg) {
312654
313039
  return summaryText;
312655
313040
  }
312656
313041
  catch (e) {
312657
- src_logger.error('USER_FEEDBACK_GETTING_ERROR_SUMMARY', {
312658
- metadata: { error: e, title, errorMsg },
312659
- });
313042
+ operator_src_logger.error(`An error occurred while getting the error summary for '${title}'. The error was '${e}', with the message: '${errorMsg}'.`);
312660
313043
  return `Error when getting error summary: ${e}`;
312661
313044
  }
312662
313045
  }
@@ -312678,9 +313061,7 @@ async function tryPublishError(item, reason, message) {
312678
313061
  await publishError(item, reason, message);
312679
313062
  }
312680
313063
  catch (e) {
312681
- src_logger.error('USER_FEEDBACK_TRY_PUBLISH_ERROR', {
312682
- metadata: { item, error: e, reason },
312683
- });
313064
+ operator_src_logger.error(`The user feedback for item '${item.kind}/${item.metadata.name}' failed to publish due to an error: '${e}'. Reason: '${reason}'.`);
312684
313065
  }
312685
313066
  }
312686
313067
  async function publishError(item, reason, message) {
@@ -312718,6 +313099,53 @@ ${commentContent}
312718
313099
  }
312719
313100
  }
312720
313101
 
313102
+ ;// CONCATENATED MODULE: ../operator/src/user-feedback-ops/gh-checkrun.ts
313103
+
313104
+ async function GHCheckRun(cmd, item) {
313105
+ const prInfo = gh_checkrun_extractPrInfo(item);
313106
+ if (!prInfo.prNumber) {
313107
+ throw new Error('TFCheckRun: prNumber not retrievable');
313108
+ }
313109
+ const checkRun = await github_0.feedback.createCheckRun(prInfo.org, prInfo.repo, helperCreateCheckRunName(cmd, item), {
313110
+ //Number(pr_number),
313111
+ pullNumber: Number(prInfo.prNumber),
313112
+ includeCheckRunComment: true,
313113
+ checkRunComment: `The Github ${item.kind} is being processed (cmd=${cmd}). Details: `,
313114
+ });
313115
+ checkRun.mdOptionsDetails({
313116
+ quotes: 'terraform',
313117
+ });
313118
+ checkRun.update('Initiating', 'queued');
313119
+ return {
313120
+ fnData: (d) => {
313121
+ checkRun.update(d.toString(), 'in_progress');
313122
+ },
313123
+ fnEnd: () => {
313124
+ checkRun.close('OK', true);
313125
+ },
313126
+ fnOnError: (err) => {
313127
+ checkRun.close('KO', false);
313128
+ },
313129
+ };
313130
+ }
313131
+ function helperCreateCheckRunName(cmd, item) {
313132
+ return `Github Provisioner / ${item.kind} - ${cmd}`;
313133
+ }
313134
+ function gh_checkrun_extractPrInfo(item) {
313135
+ const prInfo = item.metadata.annotations['firestartr.dev/last-state-pr'];
313136
+ const prNumber = prInfo.split('#')[1];
313137
+ if (!prNumber)
313138
+ throw new Error('No PR number found in CR');
313139
+ const orgRepo = prInfo.split('#')[0];
313140
+ const org = orgRepo.split('/')[0];
313141
+ if (!org)
313142
+ throw new Error('No org found in CR');
313143
+ const repo = orgRepo.split('/')[1];
313144
+ if (!repo)
313145
+ throw new Error('No repo found in CR');
313146
+ return { prNumber, repo, org };
313147
+ }
313148
+
312721
313149
  ;// CONCATENATED MODULE: ../operator/cdktf.ts
312722
313150
 
312723
313151
 
@@ -312727,8 +313155,8 @@ ${commentContent}
312727
313155
 
312728
313156
 
312729
313157
 
312730
- const cdktf_log = src_default()('firestartr:operator:cdktf');
312731
313158
  function processOperation(item, op, handler) {
313159
+ operator_src_logger.info(`Processing operation ${op} on ${item.kind}/${item.metadata?.name}`);
312732
313160
  try {
312733
313161
  switch (op) {
312734
313162
  case OperationType.UPDATED:
@@ -312750,7 +313178,7 @@ function processOperation(item, op, handler) {
312750
313178
  }
312751
313179
  }
312752
313180
  catch (e) {
312753
- cdktf_log(`Operation ${op} failed: ${e}`);
313181
+ operator_src_logger.error(`Operation ${op} failed: ${e}`);
312754
313182
  throw e;
312755
313183
  }
312756
313184
  }
@@ -312801,6 +313229,9 @@ async function* sync(item, op, handler) {
312801
313229
  };
312802
313230
  }
312803
313231
  async function* markedToDeletion(item, op, handler) {
313232
+ // here we store the current callbacks that
313233
+ // are being used (synth|tf-apply...)
313234
+ let checkRunCtl;
312804
313235
  try {
312805
313236
  void cleanTerraformState();
312806
313237
  const type = 'DELETING';
@@ -312825,15 +313256,38 @@ async function* markedToDeletion(item, op, handler) {
312825
313256
  status: 'True',
312826
313257
  message: 'Destroying process started',
312827
313258
  };
312828
- if (item.metadata.annotations['firestartr.dev/last-state-pr'] || false) {
312829
- await addDestroyCommitStatus(item, 'pending', 'Performing destroy operation...', `Terraform Destroy ${item.metadata.name}`);
312830
- }
312831
313259
  const deps = await handler.resolveReferences();
313260
+ const annotation = 'firestartr.dev/last-state-pr';
313261
+ const statePr = item?.metadata?.annotations?.[annotation];
313262
+ const hasStatePr = typeof statePr === 'string' && statePr.trim().length > 0;
313263
+ if (!hasStatePr) {
313264
+ operator_src_logger.warn(`CR ${item?.kind ?? 'UnknownKind'}/${item?.metadata?.name ?? 'unknown'} ` +
313265
+ `has no "${annotation}" annotation; skipping GitHub Check Runs (synth, terraform apply).`);
313266
+ }
313267
+ else {
313268
+ operator_src_logger.debug(`CR ${item.kind}/${item.metadata.name} uses "${annotation}" = ${statePr}`);
313269
+ }
312832
313270
  const destroyOutput = await provisioner.runProvisioner({
312833
313271
  mainCr: item,
312834
313272
  deps,
312835
313273
  }, {
312836
313274
  delete: true,
313275
+ ...(hasStatePr
313276
+ ? {
313277
+ logStreamCallbacksCDKTF: {
313278
+ prepare: async () => {
313279
+ checkRunCtl = await GHCheckRun('synth', item);
313280
+ return checkRunCtl;
313281
+ },
313282
+ },
313283
+ logStreamCallbacksTF: {
313284
+ prepare: async () => {
313285
+ checkRunCtl = await GHCheckRun('terraform destroy', item);
313286
+ return checkRunCtl;
313287
+ },
313288
+ },
313289
+ }
313290
+ : {}),
312837
313291
  });
312838
313292
  const output = destroyOutput.output;
312839
313293
  await tryPublishDestroy(item, output);
@@ -312859,10 +313313,11 @@ async function* markedToDeletion(item, op, handler) {
312859
313313
  status: 'True',
312860
313314
  message: e.toString(),
312861
313315
  };
313316
+ // if there is a current checkRun working
313317
+ // we close it with an error
313318
+ if (checkRunCtl)
313319
+ checkRunCtl.fnOnError(e);
312862
313320
  await handler.writeTerraformOutputInTfResult(item, e);
312863
- if (item.metadata.annotations['firestartr.dev/last-state-pr'] || false) {
312864
- await addDestroyCommitStatus(item, 'failure', 'Destroy operation failed', `Terraform Destroy ${item.metadata.name}`);
312865
- }
312866
313321
  void handler.error();
312867
313322
  }
312868
313323
  }
@@ -312881,6 +313336,9 @@ async function* nothing(item, op, handler) {
312881
313336
  * @param handler -
312882
313337
  */
312883
313338
  async function* doApply(item, op, handler) {
313339
+ // here we store the current callbacks that
313340
+ // are being used (synth|tf-apply...)
313341
+ let checkRunCtl;
312884
313342
  try {
312885
313343
  cleanTerraformState();
312886
313344
  yield {
@@ -312922,16 +313380,41 @@ async function* doApply(item, op, handler) {
312922
313380
  opts['create'] = true;
312923
313381
  }
312924
313382
  const deps = await handler.resolveReferences();
312925
- cdktf_log('Item %s has the following dependencies: %O', item.metadata.name, deps);
312926
- if (item.metadata.annotations['firestartr.dev/last-state-pr'] || false) {
312927
- await addApplyCommitStatus(item, 'pending', catalog_common.generic.getPrLinkFromAnnotationValue(item.metadata.annotations['firestartr.dev/last-state-pr']), 'Performing apply operation...', `Terraform Apply ${item.metadata.name}`);
313383
+ operator_src_logger.info(`Item ${item.metadata.name} has the following dependencies: ${deps}`);
313384
+ const annotation = 'firestartr.dev/last-state-pr';
313385
+ const statePr = item?.metadata?.annotations?.[annotation];
313386
+ const hasStatePr = typeof statePr === 'string' && statePr.trim().length > 0;
313387
+ if (!hasStatePr) {
313388
+ operator_src_logger.warn(`CR ${item?.kind ?? 'UnknownKind'}/${item?.metadata?.name ?? 'unknown'} ` +
313389
+ `has no "${annotation}" annotation; skipping GitHub Check Runs (synth, terraform apply).`);
313390
+ }
313391
+ else {
313392
+ operator_src_logger.debug(`CR ${item.kind}/${item.metadata.name} uses "${annotation}" = ${statePr}`);
312928
313393
  }
312929
313394
  const applyOutput = await provisioner.runProvisioner({
312930
313395
  mainCr: item,
312931
313396
  deps,
312932
- }, opts);
313397
+ }, {
313398
+ ...opts,
313399
+ ...(hasStatePr
313400
+ ? {
313401
+ logStreamCallbacksCDKTF: {
313402
+ prepare: async () => {
313403
+ checkRunCtl = await GHCheckRun('synth', item);
313404
+ return checkRunCtl;
313405
+ },
313406
+ },
313407
+ logStreamCallbacksTF: {
313408
+ prepare: async () => {
313409
+ checkRunCtl = await GHCheckRun('terraform apply', item);
313410
+ return checkRunCtl;
313411
+ },
313412
+ },
313413
+ }
313414
+ : {}),
313415
+ });
312933
313416
  await tryPublishApply(item, applyOutput?.data?.output, item.kind);
312934
- const terraformOutputJson = await provisioner.runTerraform(item, ['output', '-json']);
313417
+ const terraformOutputJson = await provisioner.runTerraform(item, ['output', '-json'], null);
312935
313418
  if (!terraformOutputJson) {
312936
313419
  throw new Error(`Terraform output is empty for ${item.kind}/${item.metadata.name}`);
312937
313420
  }
@@ -312959,9 +313442,6 @@ async function* doApply(item, op, handler) {
312959
313442
  message: 'doApply',
312960
313443
  };
312961
313444
  await handler.writeTerraformOutputInTfResult(item, output);
312962
- if (item.metadata.annotations['firestartr.dev/last-state-pr'] || false) {
312963
- await addApplyCommitStatus(item, 'success', catalog_common.generic.getPrLinkFromAnnotationValue(item.metadata.annotations['firestartr.dev/last-state-pr']), 'Apply operation completed', `Terraform Apply ${item.metadata.name}`);
312964
- }
312965
313445
  handler.success();
312966
313446
  }
312967
313447
  catch (e) {
@@ -312973,7 +313453,11 @@ async function* doApply(item, op, handler) {
312973
313453
  error = e;
312974
313454
  }
312975
313455
  await tryPublishApply(item, error, item.kind);
312976
- cdktf_log('Error applying item %s: %O', item.metadata.name, error);
313456
+ // if there is a current checkRun working
313457
+ // we close it with an error
313458
+ if (checkRunCtl)
313459
+ checkRunCtl.fnOnError(error);
313460
+ operator_src_logger.error(`Error applying item ${item.metadata.name}: ${error}`);
312977
313461
  yield {
312978
313462
  item,
312979
313463
  reason: op,
@@ -312995,9 +313479,6 @@ async function* doApply(item, op, handler) {
312995
313479
  status: 'False',
312996
313480
  message: error.toString(),
312997
313481
  };
312998
- if (item.metadata.annotations['firestartr.dev/last-state-pr'] || false) {
312999
- await addApplyCommitStatus(item, 'failure', catalog_common.generic.getPrLinkFromAnnotationValue(item.metadata.annotations['firestartr.dev/last-state-pr']), 'Apply operation failed', `Terraform Apply ${item.metadata.name}`);
313000
- }
313001
313482
  handler.error();
313002
313483
  if (error) {
313003
313484
  await handler.writeTerraformOutputInTfResult(item, error);
@@ -313114,57 +313595,68 @@ class TFPlanItemVersion extends TFPlanItem {
313114
313595
  }
313115
313596
  }
313116
313597
 
313598
+ ;// CONCATENATED MODULE: ../terraform_provisioner/src/logger.ts
313599
+
313600
+ /* harmony default export */ const terraform_provisioner_src_logger = (catalog_common.logger);
313601
+
313117
313602
  ;// CONCATENATED MODULE: ../terraform_provisioner/src/utils.ts
313118
313603
 
313119
313604
 
313120
- //import Debug from "debug"
313121
313605
 
313122
- //const infolog: Debug.Debugger = Debug('firestartr:operator:cmd:terraform')
313606
+
313123
313607
  async function utils_validate(path, secrets) {
313124
313608
  return await tfExec(path, ['validate'], secrets);
313125
313609
  }
313126
- async function init(path, secrets) {
313127
- return await tfExec(path, ['init'], secrets);
313610
+ async function init(path, secrets, stream) {
313611
+ return await tfExec(path, ['init'], secrets, ['-input=false'], stream);
313128
313612
  }
313129
- async function initFromModule(path, source, secrets) {
313130
- return tfExec(path, ['init', `-from-module=${source}`], secrets, []);
313613
+ async function initFromModule(path, source, secrets, stream) {
313614
+ return tfExec(path, ['init', `-from-module=${source}`], secrets, [], stream);
313131
313615
  }
313132
- async function plan(path, secrets, format, args = ['plan']) {
313133
- const plan = await tfExec(path, args.concat(format === 'json' ? ['-json'] : []), secrets);
313616
+ async function plan(path, secrets, format, args = ['plan'], stream) {
313617
+ terraform_provisioner_src_logger.info(`Running terraform plan with ${format} in path ${path}`);
313618
+ const plan = await tfExec(path, args.concat(format === 'json' ? ['-json'] : []), secrets, ['-input=false'], stream);
313134
313619
  if (format === 'json') {
313135
313620
  const tfPlan = planGet(plan);
313136
313621
  return tfPlan;
313137
313622
  }
313138
313623
  return plan;
313139
313624
  }
313140
- async function apply(path, secrets) {
313141
- return await tfExec(path, ['apply', '-auto-approve'], secrets);
313625
+ async function apply(path, secrets, stream) {
313626
+ terraform_provisioner_src_logger.debug(`Running terraform apply in path ${path}`);
313627
+ return await tfExec(path, ['apply', '-auto-approve'], secrets, ['-input=false'], stream);
313142
313628
  }
313143
- async function destroy(path, secrets) {
313144
- return await tfExec(path, ['destroy', '-auto-approve'], secrets);
313629
+ async function destroy(path, secrets, stream) {
313630
+ terraform_provisioner_src_logger.debug(`Running terraform destroy in path ${path}`);
313631
+ return await tfExec(path, ['destroy', '-auto-approve'], secrets, ['-input=false'], stream);
313145
313632
  }
313146
313633
  async function output(path, secrets) {
313634
+ terraform_provisioner_src_logger.debug(`Running terraform output in path ${path}`);
313147
313635
  return await tfExec(path, ['output', '-json'], secrets, []);
313148
313636
  }
313149
- async function tfExec(path, args, secrets, extraArgs = ['-input=false']) {
313150
- // Format to TF_VAR variables -> https://developer.hashicorp.com/terraform/cli/config/environment-variables#tf_var_name
313151
- for (const secret of secrets) {
313152
- process.env[`${secret.key}`] = secret.value;
313153
- }
313154
- process.env['TF_PLUGIN_CACHE_DIR'] = '/home/terraform-plugins-cache';
313637
+ async function tfExec(path, args, secrets, extraArgs = ['-input=false'], stream) {
313638
+ terraform_provisioner_src_logger.info(`Spawning terraform process ['terraform ${args.concat(extraArgs).join(' ')}'] in path '${path}'`);
313155
313639
  return new Promise((ok, ko) => {
313156
- const tfProcess = (0,external_child_process_.spawn)('terraform', args.concat(extraArgs), { cwd: path });
313157
- tfProcess.stdout.pipe(process.stdout);
313158
- tfProcess.stderr.pipe(process.stderr);
313640
+ const tfProcess = (0,external_child_process_.spawn)('terraform', args.concat(extraArgs), {
313641
+ cwd: path,
313642
+ stdio: ['inherit', 'pipe', 'pipe'],
313643
+ env: envBuilder(secrets),
313644
+ });
313159
313645
  let output = '';
313160
313646
  let flagStdoutEnd = false;
313161
313647
  let flagStderrEnd = false;
313162
313648
  let outputErrors = '';
313163
313649
  tfProcess.stdout.on('data', (log) => {
313164
- output += catalog_common.io.stripAnsi(log.toString());
313650
+ const line = catalog_common.io.stripAnsi(log.toString());
313651
+ output += line;
313652
+ if (stream)
313653
+ stream.write(line);
313165
313654
  });
313166
313655
  tfProcess.stderr.on('data', (log) => {
313167
- outputErrors += catalog_common.io.stripAnsi(log.toString());
313656
+ const line = catalog_common.io.stripAnsi(log.toString());
313657
+ outputErrors += line;
313658
+ if (stream)
313659
+ stream.write(line);
313168
313660
  });
313169
313661
  tfProcess.stdout.on('end', () => {
313170
313662
  flagStdoutEnd = true;
@@ -313179,9 +313671,12 @@ async function tfExec(path, args, secrets, extraArgs = ['-input=false']) {
313179
313671
  await catalog_common.generic.sleep(500);
313180
313672
  }
313181
313673
  if (code !== 0) {
313182
- ko(output + outputErrors);
313674
+ terraform_provisioner_src_logger.error(`Terraform output ${path}: ${output + outputErrors}`);
313675
+ terraform_provisioner_src_logger.error(`Terraform output ${path}: ${[output, outputErrors].join('')}`);
313676
+ ko([output, outputErrors].join(''));
313183
313677
  }
313184
313678
  else {
313679
+ terraform_provisioner_src_logger.info(`Terraform output ${path}: ${output}`);
313185
313680
  ok(output);
313186
313681
  }
313187
313682
  });
@@ -313195,7 +313690,9 @@ async function configureGit(ghToken) {
313195
313690
  'url."https://' + ghToken + '@github.com".insteadOf',
313196
313691
  'https://github.com',
313197
313692
  ];
313198
- const gitProcess = spawn('git', options);
313693
+ const gitProcess = spawn('git', options, {
313694
+ stdio: ['inherit', 'pipe', 'pipe'],
313695
+ });
313199
313696
  let output = '';
313200
313697
  gitProcess.on('data', (log) => {
313201
313698
  output += common.io.stripAnsi(log.toString());
@@ -313216,6 +313713,18 @@ async function configureGit(ghToken) {
313216
313713
  });
313217
313714
  });
313218
313715
  }
313716
+ function envBuilder(secrets) {
313717
+ const env = {};
313718
+ ['PATH', 'ORG', 'KUBERNETES_SERVICE_PORT', 'KUBERNETES_SERVICE_HOST'].forEach((key) => {
313719
+ env[key] = process.env[key];
313720
+ });
313721
+ // Format to TF_VAR variables -> https://developer.hashicorp.com/terraform/cli/config/environment-variables#tf_var_name
313722
+ for (const secret of secrets) {
313723
+ env[`${secret.key}`] = secret.value;
313724
+ }
313725
+ terraform_provisioner_src_logger.debug(`Environment variables: ${Object.keys(env).join(', ')}`);
313726
+ return { ...env, TF_PLUGIN_CACHE_DIR: '/home/terraform-plugins-cache' };
313727
+ }
313219
313728
 
313220
313729
  ;// CONCATENATED MODULE: ../terraform_provisioner/src/writer.ts
313221
313730
 
@@ -313550,6 +314059,7 @@ function fCheckString(keys, refs) {
313550
314059
 
313551
314060
 
313552
314061
 
314062
+
313553
314063
  class project_tf_TFProjectManager {
313554
314064
  constructor(ctx) {
313555
314065
  this.tfOutput = '';
@@ -313558,6 +314068,14 @@ class project_tf_TFProjectManager {
313558
314068
  this.tfVarsJsonWriter = new WriterTfVarsJson(ctx.values, ctx.references);
313559
314069
  this.secrets = ctx.secrets;
313560
314070
  }
314071
+ setStreamCallbacks(fnData, fnEnd, reopen = true) {
314072
+ if (reopen || !this.stream)
314073
+ this.stream = new external_stream_.PassThrough();
314074
+ this.stream.on('data', (data) => {
314075
+ fnData(data.toString());
314076
+ });
314077
+ this.stream.on('end', fnEnd);
314078
+ }
313561
314079
  getOutput() {
313562
314080
  return this.tfOutput;
313563
314081
  }
@@ -313568,10 +314086,10 @@ class project_tf_TFProjectManager {
313568
314086
  this.tfVarsJsonWriter.writeToTerraformProject(external_path_.join(this.projectPath, 'terraform.tfvars.json'));
313569
314087
  }
313570
314088
  async __init() {
313571
- this.tfOutput += await init(this.projectPath, this.secrets);
314089
+ this.tfOutput += await init(this.projectPath, this.secrets, this.stream);
313572
314090
  }
313573
314091
  async __initFromModule() {
313574
- this.tfOutput += await init(this.projectPath, this.secrets);
314092
+ this.tfOutput += await init(this.projectPath, this.secrets, this.stream);
313575
314093
  }
313576
314094
  async validate() {
313577
314095
  await this.__init();
@@ -313581,24 +314099,27 @@ class project_tf_TFProjectManager {
313581
314099
  await this.__init();
313582
314100
  if (format === 'json')
313583
314101
  this.tfOutput = null;
313584
- this.tfOutput = await plan(this.projectPath, this.secrets, format);
314102
+ this.tfOutput = await plan(this.projectPath, this.secrets, format, ['plan'], this.stream);
314103
+ if (this.stream)
314104
+ this.stream.end();
313585
314105
  }
313586
314106
  async planDestroy(format) {
313587
314107
  await this.__init();
313588
314108
  if (format === 'json')
313589
314109
  this.tfOutput = null;
313590
- this.tfOutput = await plan(this.projectPath, this.secrets, format, [
313591
- 'plan',
313592
- '-destroy',
313593
- ]);
314110
+ this.tfOutput = await plan(this.projectPath, this.secrets, format, ['plan', '-destroy'], this.stream);
313594
314111
  }
313595
314112
  async apply() {
313596
314113
  await this.__init();
313597
- this.tfOutput += await apply(this.projectPath, this.secrets);
314114
+ this.tfOutput += await apply(this.projectPath, this.secrets, this.stream);
314115
+ if (this.stream)
314116
+ this.stream.end();
313598
314117
  }
313599
314118
  async destroy() {
313600
314119
  await this.__init();
313601
- this.tfOutput += await destroy(this.projectPath, this.secrets);
314120
+ this.tfOutput += await destroy(this.projectPath, this.secrets, this.stream);
314121
+ if (this.stream)
314122
+ this.stream.end();
313602
314123
  }
313603
314124
  async output() {
313604
314125
  await this.__init();
@@ -313692,6 +314213,7 @@ var lib_ajv_default = /*#__PURE__*/__nccwpck_require__.n(lib_ajv);
313692
314213
 
313693
314214
 
313694
314215
 
314216
+
313695
314217
  class TFProjectManagerRemote {
313696
314218
  constructor(ctx) {
313697
314219
  this.tfOutput = '';
@@ -313704,6 +314226,14 @@ class TFProjectManagerRemote {
313704
314226
  getOutput() {
313705
314227
  return this.tfOutput;
313706
314228
  }
314229
+ setStreamCallbacks(fnData, fnEnd, reopen = true) {
314230
+ if (reopen || !this.stream)
314231
+ this.stream = new external_stream_.PassThrough();
314232
+ this.stream.on('data', (data) => {
314233
+ fnData(data.toString());
314234
+ });
314235
+ this.stream.on('end', fnEnd);
314236
+ }
313707
314237
  async build() {
313708
314238
  external_fs_.rmSync(this.projectPath, { recursive: true, force: true });
313709
314239
  await this.__configGit();
@@ -313735,19 +314265,25 @@ insteadOf = https://github.com`);
313735
314265
  async plan(format) {
313736
314266
  await this.__init();
313737
314267
  if (format === 'json') {
313738
- this.tfOutput = await plan(this.projectPath, this.secrets, format);
314268
+ this.tfOutput = await plan(this.projectPath, this.secrets, format, ['plan'], this.stream);
313739
314269
  }
313740
314270
  else {
313741
- this.tfOutput += await plan(this.projectPath, this.secrets, format);
314271
+ this.tfOutput += await plan(this.projectPath, this.secrets, format, ['plan'], this.stream);
313742
314272
  }
314273
+ if (this.stream)
314274
+ this.stream.end();
313743
314275
  }
313744
314276
  async apply() {
313745
314277
  await this.__init();
313746
- this.tfOutput += await apply(this.projectPath, this.secrets);
314278
+ this.tfOutput += await apply(this.projectPath, this.secrets, this.stream);
314279
+ if (this.stream)
314280
+ this.stream.end();
313747
314281
  }
313748
314282
  async destroy() {
313749
314283
  await this.__init();
313750
- this.tfOutput += await destroy(this.projectPath, this.secrets);
314284
+ this.tfOutput += await destroy(this.projectPath, this.secrets, this.stream);
314285
+ if (this.stream)
314286
+ this.stream.end();
313751
314287
  }
313752
314288
  async planDestroy(format) {
313753
314289
  await this.__init();
@@ -313776,6 +314312,7 @@ insteadOf = https://github.com`);
313776
314312
 
313777
314313
 
313778
314314
 
314315
+
313779
314316
  const terraform_provisioner_ajv = new (lib_ajv_default())();
313780
314317
  const terraform_provisioner_validate = terraform_provisioner_ajv.compile(terraform_provisioner_src_schema);
313781
314318
  function validateContext(context) {
@@ -313793,7 +314330,8 @@ async function run() {
313793
314330
  await execCommand(command, tfProject);
313794
314331
  }
313795
314332
  // Programatic API
313796
- async function runTerraformProvisioner(context, command = 'init') {
314333
+ async function runTerraformProvisioner(context, command = 'init', streaming) {
314334
+ terraform_provisioner_src_logger.info(`Running command ${command} on a ${context.type} project`);
313797
314335
  validateContext(context);
313798
314336
  let tfProject = {};
313799
314337
  if (context.type === 'Inline') {
@@ -313802,10 +314340,14 @@ async function runTerraformProvisioner(context, command = 'init') {
313802
314340
  else if (context.type === 'Remote') {
313803
314341
  tfProject = new TFProjectManagerRemote(context);
313804
314342
  }
314343
+ if (streaming) {
314344
+ tfProject.setStreamCallbacks(streaming.fnData, streaming.fnEnd);
314345
+ }
313805
314346
  const output = await execCommand(command, tfProject);
313806
314347
  return output;
313807
314348
  }
313808
314349
  async function execCommand(command, tfProject) {
314350
+ terraform_provisioner_src_logger.info(`Executing command ${command} on ${tfProject.projectPath}`);
313809
314351
  await tfProject.build();
313810
314352
  switch (command) {
313811
314353
  case 'init':
@@ -313914,6 +314456,52 @@ async function* errorPolicyCompatibility(syncPolicy, generalPolicy, item, op) {
313914
314456
  await tryPublishError(item, op, message);
313915
314457
  }
313916
314458
 
314459
+ ;// CONCATENATED MODULE: ../operator/src/user-feedback-ops/tf-checkrun.ts
314460
+
314461
+ async function TFCheckRun(cmd, item) {
314462
+ const prInfo = tf_checkrun_extractPrInfo(item);
314463
+ if (!prInfo.prNumber) {
314464
+ throw new Error('TFCheckRun: prNumber not retrievable');
314465
+ }
314466
+ const checkRun = await github_0.feedback.createCheckRun(prInfo.org, prInfo.repo, tf_checkrun_helperCreateCheckRunName(cmd), {
314467
+ //Number(pr_number),
314468
+ pullNumber: Number(prInfo.prNumber),
314469
+ includeCheckRunComment: true,
314470
+ checkRunComment: `The TFWorkspace is being processed (cmd=${cmd}). Details: `,
314471
+ });
314472
+ checkRun.mdOptionsDetails({
314473
+ quotes: 'terraform',
314474
+ });
314475
+ checkRun.update('Initiating', 'queued');
314476
+ return {
314477
+ fnData: (d) => {
314478
+ checkRun.update(d.toString(), 'in_progress');
314479
+ },
314480
+ fnEnd: () => {
314481
+ checkRun.close('OK', true);
314482
+ },
314483
+ fnOnError: (err) => {
314484
+ checkRun.close('KO', false);
314485
+ },
314486
+ };
314487
+ }
314488
+ function tf_checkrun_helperCreateCheckRunName(cmd) {
314489
+ return `TFWorkspace - ${cmd}`;
314490
+ }
314491
+ function tf_checkrun_extractPrInfo(item) {
314492
+ const prInfo = item.metadata.annotations['firestartr.dev/last-state-pr'];
314493
+ const prNumber = prInfo.split('#')[1];
314494
+ if (!prNumber)
314495
+ throw new Error('No PR number found in CR');
314496
+ const org = prInfo.split('#')[0].split('/')[0];
314497
+ if (!org)
314498
+ throw new Error('No org found in CR');
314499
+ const repo = prInfo.split('#')[0].split('/')[1];
314500
+ if (!repo)
314501
+ throw new Error('No repo found in CR');
314502
+ return { prNumber, repo, org };
314503
+ }
314504
+
313917
314505
  ;// CONCATENATED MODULE: ../operator/src/tfworkspaces/process-operation.ts
313918
314506
 
313919
314507
 
@@ -313924,6 +314512,7 @@ async function* errorPolicyCompatibility(syncPolicy, generalPolicy, item, op) {
313924
314512
 
313925
314513
 
313926
314514
 
314515
+
313927
314516
  const TF_PROJECTS_PATH = '/tmp/tfworkspaces';
313928
314517
  function process_operation_processOperation(item, op, handler) {
313929
314518
  try {
@@ -313957,7 +314546,7 @@ function process_operation_processOperation(item, op, handler) {
313957
314546
  }
313958
314547
  }
313959
314548
  catch (e) {
313960
- src_logger.error('TERRAFORM_PROCESSOR_OP_ERROR', { metadata: { op, error: e } });
314549
+ operator_src_logger.error(`The Terraform processor encountered an error during operation '${op}': '${e}'.`);
313961
314550
  throw e;
313962
314551
  }
313963
314552
  }
@@ -313990,9 +314579,7 @@ async function* doPlanJSONFormat(item, op, handler) {
313990
314579
  message: 'Planning process started',
313991
314580
  };
313992
314581
  const deps = await handler.resolveReferences();
313993
- src_logger.info('TERRAFORM_PROCESSOR_PLAN_ASSESS_DEPS', {
313994
- metadata: { item, deps },
313995
- });
314582
+ operator_src_logger.info(`The Terraform processor is planning to assess dependencies for item '${item.kind}/${item.metadata.name}' with dependencies: '${deps}'.`);
313996
314583
  const context = buildProvisionerContext(item, deps);
313997
314584
  let planType = 'plan-json';
313998
314585
  if ('deletionTimestamp' in item.metadata) {
@@ -314055,9 +314642,7 @@ async function* doPlanJSONFormat(item, op, handler) {
314055
314642
  }
314056
314643
  catch (e) {
314057
314644
  console.error(e);
314058
- src_logger.error('TERRAFORM_PROCESSOR_PLAN_OBSERVE_ERROR', {
314059
- metadata: { item, error: e },
314060
- });
314645
+ operator_src_logger.error(`The Terraform processor encountered an error while observing the plan for item '${item.kind}/${item.metadata.name}': '${e}'.`);
314061
314646
  yield {
314062
314647
  item,
314063
314648
  reason: op,
@@ -314143,9 +314728,7 @@ async function* process_operation_sync(item, op, handler, syncPolicy, generalPol
314143
314728
  message: 'Sync process started',
314144
314729
  };
314145
314730
  if (!syncPolicy) {
314146
- src_logger.debug('TERRAFORM_PROCESSOR_NO_SYNC_POLICY_ONLY_OBSERVE', {
314147
- metadata: { op, item },
314148
- });
314731
+ operator_src_logger.debug(`The Terraform processor is only observing item '${item.kind}/${item.metadata.name}' because no sync policy was found for operation '${op}'.`);
314149
314732
  yield* doPlanJSONFormat(item, op, handler);
314150
314733
  return;
314151
314734
  }
@@ -314164,9 +314747,7 @@ async function* process_operation_sync(item, op, handler, syncPolicy, generalPol
314164
314747
  break;
314165
314748
  }
314166
314749
  default: {
314167
- src_logger.debug('TERRAFORM_PROCESSOR_POLICY_NOT_SUPPORTED', {
314168
- metadata: { syncPolicy, item },
314169
- });
314750
+ operator_src_logger.debug(`The Terraform processor detected a sync policy '${syncPolicy}' for item '${item.kind}/${item.metadata.name}' that is not supported.`);
314170
314751
  yield* doPlanJSONFormat(item, op, handler);
314171
314752
  break;
314172
314753
  }
@@ -314291,6 +314872,7 @@ async function* process_operation_nothing(item, op, handler) {
314291
314872
  * @param handler -
314292
314873
  */
314293
314874
  async function* process_operation_doApply(item, op, handler) {
314875
+ const checkRunCtl = await TFCheckRun('apply', item);
314294
314876
  try {
314295
314877
  yield {
314296
314878
  item,
@@ -314337,14 +314919,9 @@ async function* process_operation_doApply(item, op, handler) {
314337
314919
  message: 'Provisioning process started',
314338
314920
  };
314339
314921
  const deps = await handler.resolveReferences();
314340
- src_logger.info('TERRAFORM_PROCESSOR_APPLY_ASSESS_DEPS', {
314341
- metadata: { item, deps },
314342
- });
314922
+ operator_src_logger.info(`The Terraform processor is applying and assessing dependencies for item '${item.kind}/${item.metadata.name}' with dependencies: '${deps}'.`);
314343
314923
  const context = buildProvisionerContext(item, deps);
314344
- if (item.metadata.annotations['firestartr.dev/last-state-pr'] || false) {
314345
- await addApplyCommitStatus(item, 'pending', catalog_common.generic.getPrLinkFromAnnotationValue(item.metadata.annotations['firestartr.dev/last-state-pr']), 'Performing apply operation...', `Terraform Apply ${item.metadata.name}`);
314346
- }
314347
- const applyOutput = await runTerraformProvisioner(context, 'apply');
314924
+ const applyOutput = await runTerraformProvisioner(context, 'apply', checkRunCtl);
314348
314925
  await tryPublishApply(item, applyOutput, 'TFWorkspace');
314349
314926
  const terraformOutputJson = await runTerraformProvisioner(context, 'output');
314350
314927
  if (!terraformOutputJson) {
@@ -314376,17 +314953,13 @@ async function* process_operation_doApply(item, op, handler) {
314376
314953
  message: 'doApply',
314377
314954
  };
314378
314955
  await handler.writeTerraformOutputInTfResult(item, output);
314379
- if (item.metadata.annotations['firestartr.dev/last-state-pr'] || false) {
314380
- await addApplyCommitStatus(item, 'success', catalog_common.generic.getPrLinkFromAnnotationValue(item.metadata.annotations['firestartr.dev/last-state-pr']), 'Apply operation completed', `Terraform Apply ${item.metadata.name}`);
314381
- }
314382
314956
  handler.success();
314383
314957
  }
314384
314958
  catch (e) {
314959
+ checkRunCtl.fnOnError(e);
314385
314960
  console.error(e);
314386
314961
  await tryPublishApply(item, e, 'TFWorkspace');
314387
- src_logger.error('TERRAFORM_PROCESSOR_APPLY_ERROR', {
314388
- metadata: { item, op, error: e },
314389
- });
314962
+ operator_src_logger.error(`The Terraform processor encountered an error during operation '${op}' for item '${item.kind}/${item.metadata.name}': '${e}'.`);
314390
314963
  yield {
314391
314964
  item,
314392
314965
  reason: op,
@@ -314408,9 +314981,6 @@ async function* process_operation_doApply(item, op, handler) {
314408
314981
  status: 'False',
314409
314982
  message: JSON.stringify(e),
314410
314983
  };
314411
- if (item.metadata.annotations['firestartr.dev/last-state-pr'] || false) {
314412
- await addApplyCommitStatus(item, 'failure', catalog_common.generic.getPrLinkFromAnnotationValue(item.metadata.annotations['firestartr.dev/last-state-pr']), 'Apply operation failed', `Terraform Apply ${item.metadata.name}`);
314413
- }
314414
314984
  handler.error();
314415
314985
  if (e) {
314416
314986
  await handler.writeTerraformOutputInTfResult(item, e);
@@ -314715,30 +315285,22 @@ async function tryAcquireOrRenewLease(namespace, leaseDurationSeconds) {
314715
315285
  const name = 'firestartr-lease';
314716
315286
  const currentPod = await getCurrentPod(namespace);
314717
315287
  try {
314718
- src_logger.debug('LEADER_ELECTION_TRYING_ACQUIRE_LEASE', {
314719
- metadata: { name, namespace },
314720
- });
315288
+ operator_src_logger.debug(`Attempting to acquire the leader election lease for '${name}' in namespace '${namespace}'.`);
314721
315289
  const lease = await k8sApi.readNamespacedLease(name, namespace);
314722
315290
  const weAreTheLeader = lease.body.metadata.ownerReferences[0].uid === currentPod.metadata.uid;
314723
315291
  if (!weAreTheLeader) {
314724
- src_logger.debug('LEADER_ELECTION_LEASE_ACQUIRED_BY_ANOTHER_POD', {
314725
- metadata: { name, namespace },
314726
- });
315292
+ operator_src_logger.debug(`Another pod has acquired the leader election lease for '${name}' in namespace '${namespace}'.`);
314727
315293
  throw new LeaseAcquisitionError('Lease already acquired by another pod');
314728
315294
  }
314729
315295
  lease.body.spec.acquireTime = new client_node_dist.V1MicroTime();
314730
315296
  lease.body.spec.renewTime = new client_node_dist.V1MicroTime();
314731
315297
  lease.body.spec.leaseDurationSeconds = 30;
314732
- src_logger.debug('LEADER_ELECTION_LEASE_RENEWING', {
314733
- metadata: { name, namespace },
314734
- });
315298
+ operator_src_logger.debug(`Renewing the leader election lease for '${name}' in namespace '${namespace}'.`);
314735
315299
  await k8sApi.replaceNamespacedLease(name, namespace, lease.body);
314736
315300
  }
314737
315301
  catch (err) {
314738
315302
  if (err.response && err.response.statusCode === 404) {
314739
- src_logger.debug('LEADER_ELECTION_LEASE_NOT_FOUND_CREATING', {
314740
- metadata: { name, namespace },
314741
- });
315303
+ operator_src_logger.debug(`The leader election lease for '${name}' in namespace '${namespace}' was not found. Creating a new one.`);
314742
315304
  const lease = {
314743
315305
  apiVersion: 'coordination.k8s.io/v1',
314744
315306
  kind: 'Lease',
@@ -314761,16 +315323,12 @@ async function tryAcquireOrRenewLease(namespace, leaseDurationSeconds) {
314761
315323
  },
314762
315324
  };
314763
315325
  await k8sApi.createNamespacedLease(namespace, lease);
314764
- src_logger.debug('LEADER_ELECTION_LEASE_CREATED', {
314765
- metadata: { name, namespace },
314766
- });
315326
+ operator_src_logger.debug(`A new leader election lease has been created for '${name}' in namespace '${namespace}'.`);
314767
315327
  }
314768
315328
  else {
314769
315329
  if (err.response)
314770
315330
  console.log(err.response);
314771
- src_logger.debug('LEADER_ELECTION_LEASE_RENEWAL_ERROR', {
314772
- metadata: { name, namespace, error: err },
314773
- });
315331
+ operator_src_logger.debug(`An error occurred while renewing the leader election lease for '${name}' in namespace '${namespace}': '${err}'.`);
314774
315332
  throw err;
314775
315333
  }
314776
315334
  }
@@ -314781,9 +315339,7 @@ async function tryAcquireOrRenewLease(namespace, leaseDurationSeconds) {
314781
315339
  async function acquireLease(namespace, cb, interval = 10000) {
314782
315340
  try {
314783
315341
  await tryAcquireOrRenewLease(namespace, interval / 1000);
314784
- src_logger.debug('LEADER_ELECTION_LEASE_ACQUIRED_EXEC_CALLBACK', {
314785
- metadata: { namespace },
314786
- });
315342
+ operator_src_logger.debug(`Successfully acquired the leader election lease in namespace '${namespace}'. Executing the callback.`);
314787
315343
  cb();
314788
315344
  }
314789
315345
  catch (err) {
@@ -314791,9 +315347,7 @@ async function acquireLease(namespace, cb, interval = 10000) {
314791
315347
  if (err instanceof LeaseAcquisitionError) {
314792
315348
  console.error(`Failed to acquire Lease, retrying in ${interval / 1000} seconds`);
314793
315349
  }
314794
- src_logger.silly('LEADER_ELECTION_LEASE_ACQUIRED_FAILED_RETRY', {
314795
- metadata: { retryIn: interval / 1000 },
314796
- });
315350
+ operator_src_logger.silly(`Failed to acquire the leader election lease; will retry in '${interval / 1000}' seconds.`);
314797
315351
  await setTimeout(() => acquireLease(namespace, cb), interval);
314798
315352
  }
314799
315353
  }
@@ -314822,7 +315376,7 @@ function processOperationPlan(item, op, handler) {
314822
315376
  }
314823
315377
  }
314824
315378
  catch (e) {
314825
- src_logger.error('TFWORKSPACE_PROCESSOR_PLAN_ERROR', {
315379
+ operator_src_logger.error('TFWORKSPACE_PROCESSOR_PLAN_ERROR', {
314826
315380
  metadata: { item, error: e, op },
314827
315381
  });
314828
315382
  throw e;
@@ -314873,7 +315427,7 @@ async function* doPlanPlainTextFormat(item, op, handler, action) {
314873
315427
  message: 'Planning process started',
314874
315428
  };
314875
315429
  const deps = await handler.resolveReferences();
314876
- src_logger.info('TFWORKSPACE_PROCESSOR_PLAN_ASSESS_DEPS', {
315430
+ operator_src_logger.info('TFWORKSPACE_PROCESSOR_PLAN_ASSESS_DEPS', {
314877
315431
  metadata: { item, deps },
314878
315432
  });
314879
315433
  const context = processOperationPlan_buildProvisionerContext(item, deps);
@@ -314909,7 +315463,7 @@ async function* doPlanPlainTextFormat(item, op, handler, action) {
314909
315463
  }
314910
315464
  catch (e) {
314911
315465
  await processOperationPlan_publishPlan(item, JSON.stringify(e));
314912
- src_logger.error('TFWORKSPACE_PROCESSOR_PLAN_OBSERVING_ERROR', {
315466
+ operator_src_logger.error('TFWORKSPACE_PROCESSOR_PLAN_OBSERVING_ERROR', {
314913
315467
  metadata: { item, error: e },
314914
315468
  });
314915
315469
  yield {
@@ -314967,7 +315521,7 @@ async function* processOperationPlan_doPlanJSONFormat(item, op, handler, action)
314967
315521
  message: 'Planning process started',
314968
315522
  };
314969
315523
  const deps = await handler.resolveReferences();
314970
- src_logger.info('TFWORKSPACE_PROCESSOR_PLAN_DO_PLAN_ASSESS_DEPS', {
315524
+ operator_src_logger.info('TFWORKSPACE_PROCESSOR_PLAN_DO_PLAN_ASSESS_DEPS', {
314971
315525
  metadata: { item, deps },
314972
315526
  });
314973
315527
  const context = processOperationPlan_buildProvisionerContext(item, deps);
@@ -315027,7 +315581,7 @@ async function* processOperationPlan_doPlanJSONFormat(item, op, handler, action)
315027
315581
  }
315028
315582
  catch (e) {
315029
315583
  console.error(e);
315030
- src_logger.error('TFWORKSPACE_PROCESSOR_PLAN_DO_PLAN_ERROR', {
315584
+ operator_src_logger.error('TFWORKSPACE_PROCESSOR_PLAN_DO_PLAN_ERROR', {
315031
315585
  metadata: { item, error: e },
315032
315586
  });
315033
315587
  yield {
@@ -315394,42 +315948,30 @@ async function ctx_buildContext(claim, namespace, command) {
315394
315948
  let cr = null;
315395
315949
  let deps = null;
315396
315950
  compute['resolveDeps'] = async () => {
315397
- src_logger.debug('TFWORKSPACE_RESOLVE_DEPS_FOR_CLAIM', {
315398
- metadata: { name: claim.name },
315399
- });
315951
+ operator_src_logger.debug(`The Terraform workspace is resolving dependencies for the claim '${claim.name}'.`);
315400
315952
  // First, we bring the previous CR, if any, to get the tfStateKey
315401
- src_logger.debug('TFWORKSPACE_RESOLVE_GET_PREVIOUS_CR', {
315402
- metadata: { name: claim.name },
315403
- });
315953
+ operator_src_logger.debug(`The Terraform workspace is resolving and getting the previous custom resource for claim '${claim.name}'.`);
315404
315954
  previousCR = await getCRfromClaimRef(claim.kind, claim.name, namespace);
315405
315955
  let tfStateKey = null;
315406
315956
  if (previousCR) {
315407
- src_logger.debug('TFWORKSPACE_RESOLVE_PREVIOUS_CR_FOUND', {
315408
- metadata: { name: claim.name },
315409
- });
315957
+ operator_src_logger.debug(`The Terraform workspace found a previous custom resource for claim '${claim.name}'.`);
315410
315958
  tfStateKey = previousCR.spec.firestartr.tfStateKey;
315411
315959
  }
315412
315960
  else
315413
- src_logger.debug('TFWORKSPACE_RESOLVE_PREVIOUS_CR_NOT_FOUND', {
315414
- metadata: { name: claim.name },
315415
- });
315961
+ operator_src_logger.debug(`The Terraform workspace did not find a previous custom resource for claim '${claim.name}'.`);
315416
315962
  // Then we render the claim passing a function to resolve the refs in the k8s API
315417
- src_logger.debug('TFWORKSPACE_RESOLVE_START_RENDERING', {
315418
- metadata: { name: claim.name },
315419
- });
315963
+ operator_src_logger.debug(`The Terraform workspace is starting the rendering process for claim '${claim.name}'.`);
315420
315964
  cr = await cdk8s_renderer.renderTfWorkspace(claim, tfStateKey, getTFWorkspaceRefs, namespace);
315421
315965
  cr['metadata']['namespace'] = namespace;
315422
- src_logger.debug('TFWORKSPACE_RESOLVE_CR_RENDERED', { metadata: { cr } });
315966
+ operator_src_logger.debug(`The Terraform workspace has finished rendering the custom resource '${cr.kind}/${cr.metadata.name}' in namespace '${cr.metadata.namespace}'.`);
315423
315967
  // Finally, we resolve the deps in the rendered CR
315424
315968
  deps = await resolve(cr, getItemByItemPath, getSecret, namespace);
315425
- src_logger.debug('TFWORKSPACE_RESOLVE_DEPS_RESOLVED', {
315426
- metadata: { name: claim.name },
315427
- });
315969
+ operator_src_logger.debug(`The Terraform workspace has finished resolving all dependencies for claim '${claim.name}'.`);
315428
315970
  };
315429
315971
  compute['dryRunExec'] = async () => {
315430
315972
  // We assume that if there is no previous CR, we are creating a new one
315431
315973
  // This will be preceeded by the resolveDeps function
315432
- src_logger.debug('TFWORKSPACE_DRY_RUN_VALIDATING_CR', { metadata: { cr } });
315974
+ operator_src_logger.debug(`The Terraform workspace is dry-running the validation for custom resource '${cr.kind}/${cr.metadata.name}' in namespace '${cr.metadata.namespace}'.`);
315433
315975
  if (!previousCR) {
315434
315976
  await createDryRun(cr, namespace);
315435
315977
  }
@@ -315437,17 +315979,15 @@ async function ctx_buildContext(claim, namespace, command) {
315437
315979
  cr.metadata.resourceVersion = previousCR.metadata.resourceVersion;
315438
315980
  await updateDryRun(cr, namespace);
315439
315981
  }
315440
- src_logger.debug('TFWORKSPACE_DRY_RUN_VALIDATED_CR', { metadata: { cr } });
315982
+ operator_src_logger.debug(`The Terraform workspace has finished validating the custom resource '${cr.kind}/${cr.metadata.name}' in namespace '${cr.metadata.namespace}'.`);
315441
315983
  };
315442
315984
  compute['runProvision'] = async () => {
315443
- src_logger.debug('TFWORKSPACE_RUN_PROVISION_INIT_TERRAFORM', {
315985
+ operator_src_logger.debug('TFWORKSPACE_RUN_PROVISION_INIT_TERRAFORM', {
315444
315986
  metadata: { cr, command },
315445
315987
  });
315446
315988
  const data = await buildProvisionerContext(cr, deps);
315447
315989
  const result = await runTerraformProvisioner(data, command);
315448
- src_logger.debug('TFWORKSPACE_RUN_PROVISION_FINISHED_TERRAFORM', {
315449
- metadata: { cr, command },
315450
- });
315990
+ operator_src_logger.debug(`The Terraform workspace has finished the '${command}' command for provisioning custom resource '${cr.kind}/${cr.metadata.name}' in namespace '${cr.metadata.namespace}'.`);
315451
315991
  return result;
315452
315992
  };
315453
315993
  return new Ctx({}, compute);
@@ -315600,27 +316140,28 @@ var sdk_metrics_build_src = __nccwpck_require__(84016);
315600
316140
  ;// CONCATENATED MODULE: ../operator/src/metrics/CRStates.ts
315601
316141
 
315602
316142
 
316143
+
315603
316144
  const INTERVAL_IN_SEGS = 60;
315604
316145
  class CRStateMetrics {
315605
316146
  constructor(kind, namespace, meter) {
315606
316147
  this.kind = kind;
315607
- this.provisionedGauge = meter.createGauge(`firestartr_${this.kind}_provisioned_total`, {
315608
- description: `Total number of ${this.kind} in PROVISIONED state`,
316148
+ this.provisionedGauge = meter.createGauge('firestartr_provisioned_total', {
316149
+ description: 'Total number of CRs in PROVISIONED state',
315609
316150
  });
315610
- this.provisioningGauge = meter.createGauge(`firestartr_${this.kind}_provisioning_total`, {
315611
- description: `Total number of ${this.kind} in PROVISIONING state`,
316151
+ this.provisioningGauge = meter.createGauge('firestartr_provisioning_total', {
316152
+ description: 'Total number of CRs in PROVISIONING state',
315612
316153
  });
315613
- this.outOfSyncGauge = meter.createGauge(`firestartr_${this.kind}_out_of_sync_total`, {
315614
- description: `Total number of ${this.kind} in OUT_OF_SYNC state`,
316154
+ this.outOfSyncGauge = meter.createGauge('firestartr_out_of_sync_total', {
316155
+ description: 'Total number of CRs in OUT_OF_SYNC state',
315615
316156
  });
315616
- this.errorGauge = meter.createGauge(`firestartr_${this.kind}_error_total`, {
315617
- description: `Total number of ${this.kind} in ERROR state`,
316157
+ this.errorGauge = meter.createGauge('firestartr_error_total', {
316158
+ description: 'Total number of CRs in ERROR state',
315618
316159
  });
315619
- this.planningGauge = meter.createGauge(`firestartr_${this.kind}_planning_total`, {
315620
- description: `Total number of ${this.kind} in PLANNING state`,
316160
+ this.planningGauge = meter.createGauge('firestartr_planning_total', {
316161
+ description: 'Total number of CRs in PLANNING state',
315621
316162
  });
315622
- this.deletedGauge = meter.createGauge(`firestartr_${this.kind}_deleted_total`, {
315623
- description: `Total number of ${this.kind} in DELETED state`,
316163
+ this.deletedGauge = meter.createGauge('firestartr_deleted_total', {
316164
+ description: 'Total number of CRs in DELETED state',
315624
316165
  });
315625
316166
  this.namespace = namespace;
315626
316167
  }
@@ -315677,19 +316218,33 @@ class CRStateMetrics {
315677
316218
  }
315678
316219
  this.provisionedGauge.record(provisionedCount, {
315679
316220
  namespace: this.namespace,
316221
+ kind: this.kind,
315680
316222
  });
315681
316223
  this.provisioningGauge.record(provisioningCount, {
315682
316224
  namespace: this.namespace,
316225
+ kind: this.kind,
316226
+ });
316227
+ this.planningGauge.record(planningCount, {
316228
+ namespace: this.namespace,
316229
+ kind: this.kind,
316230
+ });
316231
+ this.deletedGauge.record(deletedCount, {
316232
+ namespace: this.namespace,
316233
+ kind: this.kind,
316234
+ });
316235
+ this.outOfSyncGauge.record(outOfSyncCount, {
316236
+ namespace: this.namespace,
316237
+ kind: this.kind,
316238
+ });
316239
+ this.errorGauge.record(errorCount, {
316240
+ namespace: this.namespace,
316241
+ kind: this.kind,
315683
316242
  });
315684
- this.planningGauge.record(planningCount, { namespace: this.namespace });
315685
- this.deletedGauge.record(deletedCount, { namespace: this.namespace });
315686
- this.outOfSyncGauge.record(outOfSyncCount, { namespace: this.namespace });
315687
- this.errorGauge.record(errorCount, { namespace: this.namespace });
315688
316243
  }
315689
316244
  catch (err) {
315690
- console.log(err);
316245
+ console.log(`CRStateMetrics: update ${err}`);
315691
316246
  this.onUpdate = false;
315692
- throw new Error(`CRStateMetrics: update ${err}`);
316247
+ operator_src_logger.error('CR_METRICS_UPDATE', { error: err });
315693
316248
  }
315694
316249
  this.onUpdate = false;
315695
316250
  }
@@ -315783,7 +316338,7 @@ async function startCRStates(meter, kindList, namespace) {
315783
316338
 
315784
316339
  const deploymentName = catalog_common.environment.getFromEnvironment(catalog_common.types.envVars.operatorDeploymentName) || 'firestartr-firestartr-controller';
315785
316340
  const DEFAULT_OPERATOR_DEPLOY = (/* unused pure expression or super */ null && (deploymentName));
315786
- async function tfPlanner(claimFilePath, claim, namespace, debug, jobTtl, cmd = 'plan') {
316341
+ async function tfPlanner(claimFilePath, claim, namespace, debug, jobTtl = 300, cmd = 'plan') {
315787
316342
  const { kc } = await getConnection();
315788
316343
  const k8sApi = kc.makeApiClient(client.AppsV1Api);
315789
316344
  const batchV1Api = kc.makeApiClient(client.BatchV1Api);
@@ -315803,10 +316358,12 @@ async function tfPlanner(claimFilePath, claim, namespace, debug, jobTtl, cmd = '
315803
316358
  ? '/library/scripts/run.sh'
315804
316359
  : '/library/run.sh';
315805
316360
  job.spec = new client.V1JobSpec();
315806
- if (jobTtl)
315807
- job.spec.ttlSecondsAfterFinished = jobTtl;
316361
+ job.spec.ttlSecondsAfterFinished = jobTtl;
315808
316362
  job.spec.template = controllerDeploy.body.spec
315809
316363
  .template;
316364
+ // set activeDeadlineSeconds to force terminate jobs that exceed this time
316365
+ // see https://kubernetes.io/docs/concepts/workloads/controllers/job/#job-termination-and-cleanup
316366
+ job.spec.activeDeadlineSeconds = 3600;
315810
316367
  job.spec.template.spec.containers[0].command = [
315811
316368
  'sh',
315812
316369
  '-c',
@@ -315818,9 +316375,12 @@ async function tfPlanner(claimFilePath, claim, namespace, debug, jobTtl, cmd = '
315818
316375
  }
315819
316376
  job.spec.template.spec.restartPolicy = 'Never';
315820
316377
  job.metadata = metadata;
316378
+ // we exclude logs to be sent to datadog
316379
+ job.spec.template.metadata.annotations = {
316380
+ 'ad.datadoghq.com/logs_exclude': 'true',
316381
+ };
315821
316382
  await batchV1Api.createNamespacedJob(namespace, job);
315822
316383
  await copyClaimAndGetLogs(namespace, job.metadata.name, claimFilePath);
315823
- await batchV1Api.deleteNamespacedJob(job.metadata.name, namespace);
315824
316384
  }
315825
316385
  async function copyClaimAndGetLogs(namespace, jobName, sourcePath) {
315826
316386
  const { kc } = await getConnection();
@@ -315974,7 +316534,7 @@ function runOperator(opts) {
315974
316534
  importModeActive = importMode;
315975
316535
  if (importModeSkipPlan)
315976
316536
  importModeSkipPlanActive = importModeSkipPlan;
315977
- src_logger.info('START_OPERATOR', { ...opts });
316537
+ operator_src_logger.info(`started the operator with options ${JSON.stringify(opts)}`);
315978
316538
  const run = ignoreLease
315979
316539
  ? (_namespace, cb) => cb()
315980
316540
  : acquireLease;
@@ -315994,7 +316554,7 @@ function runOperator(opts) {
315994
316554
  .catch((e) => {
315995
316555
  console.log('exit catch kind', kind);
315996
316556
  console.error(e);
315997
- src_logger.error('CRASHED', { kind, error: e });
316557
+ operator_src_logger.error('CRASHED', { kind, error: e });
315998
316558
  })
315999
316559
  .finally(() => {
316000
316560
  console.log('kind', kind);
@@ -316026,7 +316586,7 @@ function getProvisionImplementation(plural) {
316026
316586
  }
316027
316587
  if (!implementation)
316028
316588
  throw new Error(`No implementation found for ${plural}`);
316029
- src_logger.info('GOT_PROVISION_IMPL', { kind: plural });
316589
+ operator_src_logger.info(`Retrieved the provision implementation for the kind '${plural}'`);
316030
316590
  return implementation;
316031
316591
  }
316032
316592