@firestartr/cli 1.47.0 → 1.48.1-snapshot-0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. package/build/index.js +1312 -766
  2. package/build/packages/catalog_common/index.d.ts +8 -0
  3. package/build/packages/catalog_common/src/io/write.d.ts +2 -2
  4. package/build/packages/catalog_common/src/logger/index.d.ts +2 -0
  5. package/build/packages/catalog_common/src/logger/logger.d.ts +9 -0
  6. package/build/packages/catalog_common/src/logger/utils.d.ts +1 -0
  7. package/build/packages/cdk8s_renderer/src/validations/crSize.d.ts +1 -0
  8. package/build/packages/features_preparer/src/logger.d.ts +9 -0
  9. package/build/packages/features_renderer/index.d.ts +10 -1
  10. package/build/packages/features_renderer/src/auxiliar.d.ts +71 -0
  11. package/build/packages/features_renderer/src/render.d.ts +2 -0
  12. package/build/packages/github/index.d.ts +5 -0
  13. package/build/packages/github/src/check_run.d.ts +83 -0
  14. package/build/packages/github/src/logger.d.ts +9 -0
  15. package/build/packages/operator/src/logger.d.ts +2 -2
  16. package/build/packages/operator/src/user-feedback-ops/gh-checkrun.d.ts +5 -0
  17. package/build/packages/operator/src/user-feedback-ops/tf-checkrun.d.ts +5 -0
  18. package/build/packages/provisioner/src/cdktf.d.ts +3 -1
  19. package/build/packages/provisioner/src/logger.d.ts +9 -0
  20. package/build/packages/provisioner/src/resources/resource.d.ts +10 -0
  21. package/build/packages/provisioner/src/terraform.d.ts +7 -5
  22. package/build/packages/terraform_provisioner/index.d.ts +1 -1
  23. package/build/packages/terraform_provisioner/src/logger.d.ts +9 -0
  24. package/build/packages/terraform_provisioner/src/project_tf.d.ts +4 -0
  25. package/build/packages/terraform_provisioner/src/project_tf_remote.d.ts +4 -0
  26. package/build/packages/terraform_provisioner/src/utils.d.ts +8 -6
  27. package/package.json +1 -1
package/build/index.js CHANGED
@@ -288941,14 +288941,131 @@ var external_path_ = __nccwpck_require__(71017);
288941
288941
  var external_path_default = /*#__PURE__*/__nccwpck_require__.n(external_path_);
288942
288942
  // EXTERNAL MODULE: ../../node_modules/yaml/dist/index.js
288943
288943
  var yaml_dist = __nccwpck_require__(8447);
288944
- // EXTERNAL MODULE: ../../node_modules/debug/src/index.js
288945
- var src = __nccwpck_require__(67984);
288946
- var src_default = /*#__PURE__*/__nccwpck_require__.n(src);
288944
+ // EXTERNAL MODULE: ../../node_modules/winston/lib/winston.js
288945
+ var winston = __nccwpck_require__(66752);
288946
+ var winston_default = /*#__PURE__*/__nccwpck_require__.n(winston);
288947
+ ;// CONCATENATED MODULE: ../catalog_common/src/logger/utils.ts
288948
+ // https://siderite.dev/blog/jsonstringify-with-circular-references.html/#at2011170946
288949
+ function fixCircularReferences(o) {
288950
+ const weirdTypes = [
288951
+ Int8Array,
288952
+ Uint8Array,
288953
+ Uint8ClampedArray,
288954
+ Int16Array,
288955
+ Uint16Array,
288956
+ Int32Array,
288957
+ Uint32Array,
288958
+ BigInt64Array,
288959
+ BigUint64Array,
288960
+ Float32Array,
288961
+ Float64Array,
288962
+ ArrayBuffer,
288963
+ SharedArrayBuffer,
288964
+ DataView,
288965
+ ];
288966
+ const defs = new Map();
288967
+ return (k, v) => {
288968
+ if (k && v === o) {
288969
+ return `[${String(k)} is the same as original object]`;
288970
+ }
288971
+ if (v === undefined || v === null) {
288972
+ return v;
288973
+ }
288974
+ // Check for the Timeout constructor. This will also catch TimersList indirectly
288975
+ // since TimersList is part of the circular structure *of* a Timeout object.
288976
+ if (v && v.constructor && v.constructor.name === 'Timeout') {
288977
+ return '[Node.js internal timer object]';
288978
+ }
288979
+ // An alternative check could be `v instanceof Timeout` but the constructor name
288980
+ // check is more reliable for these internal types.
288981
+ const weirdType = weirdTypes.find((t) => v instanceof t);
288982
+ if (weirdType) {
288983
+ return weirdType.toString();
288984
+ }
288985
+ if (typeof v === 'function') {
288986
+ return v.toString();
288987
+ }
288988
+ if (v && typeof v === 'object') {
288989
+ const def = defs.get(v);
288990
+ if (def) {
288991
+ return `[${String(k)} is the same as ${def}]`;
288992
+ }
288993
+ defs.set(v, String(k));
288994
+ }
288995
+ return v;
288996
+ };
288997
+ }
288998
+
288999
+ ;// CONCATENATED MODULE: ../catalog_common/src/logger/logger.ts
289000
+
289001
+
289002
+ const validLogLevels = [
289003
+ 'error',
289004
+ 'warn',
289005
+ 'info',
289006
+ 'debug',
289007
+ 'verbose',
289008
+ 'silly',
289009
+ ];
289010
+ let initiated = false;
289011
+ let logger = null;
289012
+ // Type guard to check if a value is a valid LogLevel
289013
+ function isValidLogLevel(level) {
289014
+ return (typeof level === 'string' && validLogLevels.includes(level));
289015
+ }
289016
+ function initLogger() {
289017
+ if (initiated)
289018
+ return;
289019
+ const logLevel = process.env.LOG_LEVEL && isValidLogLevel(process.env.LOG_LEVEL)
289020
+ ? process.env.LOG_LEVEL
289021
+ : 'info';
289022
+ logger = winston_default().createLogger({
289023
+ level: logLevel,
289024
+ exitOnError: false,
289025
+ format: winston.format.combine(winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }), winston.format.json()),
289026
+ transports: [
289027
+ new winston.transports.Console({
289028
+ level: logLevel,
289029
+ }),
289030
+ ],
289031
+ });
289032
+ initiated = true;
289033
+ }
289034
+ function doLog(level, args) {
289035
+ initLogger();
289036
+ const [message, data] = args;
289037
+ let finalMessage = message;
289038
+ if (data) {
289039
+ const fx = fixCircularReferences(data.metadata);
289040
+ try {
289041
+ finalMessage =
289042
+ finalMessage + ' | ' + JSON.stringify(data?.metadata, fx, 2);
289043
+ }
289044
+ catch (err) {
289045
+ console.error(`Serializing ${message}: ${err}`);
289046
+ return;
289047
+ }
289048
+ }
289049
+ logger[level].apply(logger, [finalMessage]);
289050
+ }
289051
+ const logger_log = {
289052
+ error: (...args) => doLog('error', args),
289053
+ warn: (...args) => doLog('warn', args),
289054
+ info: (...args) => doLog('info', args),
289055
+ debug: (...args) => doLog('debug', args),
289056
+ verbose: (...args) => doLog('verbose', args),
289057
+ silly: (...args) => doLog('silly', args),
289058
+ };
289059
+ /* harmony default export */ const logger_logger = (logger_log);
289060
+
289061
+ ;// CONCATENATED MODULE: ../catalog_common/src/logger/index.ts
289062
+
289063
+ /* harmony default export */ const src_logger = (logger_logger);
289064
+
288947
289065
  ;// CONCATENATED MODULE: ../catalog_common/src/io/common.ts
288948
289066
 
288949
289067
 
288950
289068
 
288951
- const messageLog = src_default()('firestartr:catalog_common:io:common');
288952
289069
  const ComponentPaths = (/* unused pure expression or super */ null && ([
288953
289070
  'apiVersion',
288954
289071
  'kind',
@@ -289033,25 +289150,25 @@ function transformKind(kind) {
289033
289150
  }
289034
289151
  }
289035
289152
  function getPath(kind, name, catalogPath) {
289036
- messageLog(`Getting path for kind ${kind} and name ${name} in catalog path ${catalogPath}`);
289153
+ src_logger.debug(`Getting path for kind ${kind} and name ${name} in catalog path ${catalogPath}`);
289037
289154
  return external_path_.join(catalogPath, transformKind(kind), name + '.yaml');
289038
289155
  }
289039
289156
  function getKindPath(kind, catalogPath) {
289040
- messageLog(`Getting path for kind ${kind} in catalog path ${catalogPath}`);
289157
+ src_logger.debug(`Getting path for kind ${kind} in catalog path ${catalogPath}`);
289041
289158
  return external_path_.join(catalogPath, transformKind(kind));
289042
289159
  }
289043
289160
  function fromYaml(data) {
289044
289161
  const result = yaml_dist.parse(data);
289045
- messageLog('Loading YAML data: %O', result);
289162
+ src_logger.debug('Loading YAML data: %O', result);
289046
289163
  return result;
289047
289164
  }
289048
289165
  function toYaml(data, opts = {}) {
289049
- messageLog('opts', opts);
289166
+ src_logger.debug('opts', opts);
289050
289167
  const result = yaml_dist.stringify(data);
289051
289168
  return result;
289052
289169
  }
289053
289170
  function dumpYaml(data) {
289054
- messageLog('Dumping object data to YAML %O', data);
289171
+ src_logger.debug('Dumping object data to YAML %O', data);
289055
289172
  return yaml_dist.stringify(data);
289056
289173
  }
289057
289174
 
@@ -289059,7 +289176,6 @@ function dumpYaml(data) {
289059
289176
  var external_child_process_ = __nccwpck_require__(32081);
289060
289177
  ;// CONCATENATED MODULE: ../catalog_common/src/generic/random.ts
289061
289178
 
289062
- const random_messageLog = src_default()('firestartr:catalog_common:generic:random');
289063
289179
  function randomString(length = 10) {
289064
289180
  let result = '';
289065
289181
  const characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
@@ -289069,7 +289185,7 @@ function randomString(length = 10) {
289069
289185
  result += characters.charAt(Math.floor(Math.random() * charactersLength));
289070
289186
  counter += 1;
289071
289187
  }
289072
- random_messageLog('Generated random string %s', result);
289188
+ src_logger.debug(`Generated random string ${result}`);
289073
289189
  return result;
289074
289190
  }
289075
289191
  function shuffleArray(array) {
@@ -289104,17 +289220,16 @@ function shuffleObject(obj, shuffleArrays = false) {
289104
289220
 
289105
289221
 
289106
289222
 
289107
- const clone_catalog_messageLog = src_default()('firestartr:catalog_common:io:clone_catalog');
289108
289223
  function cloneCatalog(catalogPath, dest = _calculateRandomDestination()) {
289109
- clone_catalog_messageLog(`Cloning catalog from ${catalogPath} to ${dest}`);
289224
+ src_logger.info(`Cloning catalog from ${catalogPath} to ${dest}`);
289110
289225
  return new Promise((ok, ko) => {
289111
289226
  (0,external_child_process_.exec)(`cp -a ${catalogPath} ${dest}`, (error, _stdout, _stderr) => {
289112
289227
  if (error) {
289113
- clone_catalog_messageLog(`Error cloning catalog: ${error.message}`);
289228
+ src_logger.error(`Error cloning catalog: ${error.message}`);
289114
289229
  return ko(error.message);
289115
289230
  }
289116
289231
  else {
289117
- clone_catalog_messageLog(`Catalog cloned to successfully to ${dest}`);
289232
+ src_logger.info(`Catalog cloned to successfully to ${dest}`);
289118
289233
  return ok(dest);
289119
289234
  }
289120
289235
  });
@@ -289133,29 +289248,28 @@ var external_fs_default = /*#__PURE__*/__nccwpck_require__.n(external_fs_);
289133
289248
 
289134
289249
 
289135
289250
 
289136
- const write_messageLog = src_default()('firestartr:catalog_common:io:write');
289137
289251
  function writeEntity(entity, path) {
289138
289252
  try {
289139
289253
  entity['metadata']['annotations']['fire-starter.dev/timestamp'] =
289140
289254
  Math.floor(Date.now() / 1000).toString();
289141
289255
  //If we have an status, we remove it
289142
- write_messageLog(`Writing to catalog ${path} entity %O`, entity);
289256
+ src_logger.debug(`Writing to catalog ${path} entity ${entity}`);
289143
289257
  external_fs_.writeFileSync(getPath(entity['kind'], entity['metadata']['name'], path), dumpYaml(entity));
289144
289258
  }
289145
289259
  catch (err) {
289146
- write_messageLog('Error writing entity, error %O', err);
289260
+ src_logger.error(`Error writing entity '${entity.kind}', error ${err}`);
289147
289261
  throw `writeEntity: ${entity.kind} ${err}`;
289148
289262
  }
289149
289263
  }
289150
289264
  function writeClaim(claim, claimsPath) {
289151
289265
  try {
289152
289266
  const kindFolder = `${claim['kind']}s`.toLowerCase().replace('claim', '');
289153
- write_messageLog(`Writing to gitops ${claimsPath}/${kindFolder} claim %O`, claim);
289267
+ src_logger.debug(`Writing to gitops ${claimsPath}/${kindFolder} claim ${claim}`);
289154
289268
  external_fs_.mkdirSync(external_path_.join(claimsPath, kindFolder), { recursive: true });
289155
289269
  external_fs_.writeFileSync(getPathClaim(claim['kind'], claim['name'], claimsPath), dumpYaml(claim));
289156
289270
  }
289157
289271
  catch (err) {
289158
- write_messageLog('Error writing claim, error %O', err);
289272
+ src_logger.error(`Error writing claim, error ${err}`);
289159
289273
  throw `writeClaim: ${claim.kind} ${err}`;
289160
289274
  }
289161
289275
  }
@@ -289172,7 +289286,7 @@ function writeYamlFile(fileName, data, pathFile = '/tmp') {
289172
289286
  external_fs_.writeFileSync(external_path_.join(pathFile, fileName), dumpYaml(data));
289173
289287
  }
289174
289288
  catch (err) {
289175
- write_messageLog('Error writing yaml file, error %O', err);
289289
+ src_logger.error(`Error writing yaml file, error ${err}`);
289176
289290
  throw `writeYamlFile: ${fileName} ${err}`;
289177
289291
  }
289178
289292
  }
@@ -289181,23 +289295,23 @@ function getPathClaim(kind, name, claimsPath) {
289181
289295
  }
289182
289296
  function renameEntity(entity, catalogPath, oldname) {
289183
289297
  try {
289184
- write_messageLog('Renaming oldname %s in %O', oldname, entity);
289298
+ src_logger.debug(`Renaming oldname ${oldname} in ${entity}`);
289185
289299
  const oldPath = getPath(entity.kind, oldname, catalogPath);
289186
289300
  const newPath = getPath(entity.kind, entity.metadata.name, catalogPath);
289187
289301
  external_fs_.renameSync(oldPath, newPath);
289188
289302
  }
289189
289303
  catch (err) {
289190
- write_messageLog('Error writing entity, error %O', err);
289304
+ src_logger.error(`Error writing entity, error ${err}`);
289191
289305
  throw `renameEntity: ${entity.kind} ${err}`;
289192
289306
  }
289193
289307
  }
289194
289308
  function removeEntity(entity, catalogPath) {
289195
289309
  try {
289196
- write_messageLog(`Removing entity ${entity.kind}/${entity.metadata.name} in catalog ${catalogPath}`);
289310
+ src_logger.debug(`Removing entity ${entity.kind}/${entity.metadata.name} in catalog ${catalogPath}`);
289197
289311
  external_fs_.rmSync(getPath(entity.kind, entity.metadata.name, catalogPath));
289198
289312
  }
289199
289313
  catch (err) {
289200
- write_messageLog(`Error removing entity ${entity.kind}/${entity.metadata.name} in catalog ${catalogPath}: ${err}`);
289314
+ src_logger.error(`Error removing entity ${entity.kind}/${entity.metadata.name} in catalog ${catalogPath}: ${err}`);
289201
289315
  throw `removeEntity: ${entity.kind} ${err}`;
289202
289316
  }
289203
289317
  }
@@ -289213,21 +289327,21 @@ function moveFile(oldPath, newPath) {
289213
289327
  external_fs_.cpSync(oldPath, newPath);
289214
289328
  external_fs_.rmSync(oldPath);
289215
289329
  }
289216
- function writeFunctionLog(functionName, log) {
289330
+ function writeFunctionLog(functionName, logStream) {
289217
289331
  try {
289218
- external_fs_.writeFileSync(external_path_.join('/tmp', `${functionName}.${randomString(5)}.log`), log + '\n');
289332
+ external_fs_.writeFileSync(external_path_.join('/tmp', `${functionName}.${randomString(5)}.log`), logStream + '\n');
289219
289333
  }
289220
289334
  catch (err) {
289221
- write_messageLog('Error writing log, error %O', err);
289335
+ src_logger.error(`Error writing log, error ${err}`);
289222
289336
  throw `writeLog: ${functionName} ${err}`;
289223
289337
  }
289224
289338
  }
289225
- function writeLogFile(fileName, log) {
289339
+ function writeLogFile(fileName, logStream) {
289226
289340
  try {
289227
- external_fs_.appendFileSync(external_path_.join('/tmp', fileName + '.log'), log + '\n');
289341
+ external_fs_.appendFileSync(external_path_.join('/tmp', fileName + '.log'), logStream + '\n');
289228
289342
  }
289229
289343
  catch (err) {
289230
- write_messageLog('Error writing log, error %O', err);
289344
+ src_logger.error(`Error writing log, error ${err}`);
289231
289345
  throw `writeLog: ${fileName} ${err}`;
289232
289346
  }
289233
289347
  }
@@ -289237,7 +289351,6 @@ function writeLogFile(fileName, log) {
289237
289351
 
289238
289352
 
289239
289353
 
289240
- const read_messageLog = src_default()('firestartr:catalog_common:io:read');
289241
289354
  function readEntity(kind, name, catalogPaths) {
289242
289355
  try {
289243
289356
  if (typeof catalogPaths === 'string') {
@@ -289246,7 +289359,7 @@ function readEntity(kind, name, catalogPaths) {
289246
289359
  let data = false;
289247
289360
  for (const catalogPath of catalogPaths) {
289248
289361
  try {
289249
- read_messageLog(`Reading entity ${kind}/${name} from catalog ${catalogPath}`);
289362
+ src_logger.debug(`Reading entity ${kind}/${name} from catalog ${catalogPath}`);
289250
289363
  const entityPath = getPath(kind, name, catalogPath);
289251
289364
  if (external_fs_.existsSync(entityPath)) {
289252
289365
  if (data) {
@@ -289256,7 +289369,7 @@ function readEntity(kind, name, catalogPaths) {
289256
289369
  }
289257
289370
  }
289258
289371
  catch (err) {
289259
- read_messageLog('readEntity: cached error %s', err);
289372
+ src_logger.debug('readEntity: cached error %s', err);
289260
289373
  if (err === 'DUPLICATED') {
289261
289374
  throw `Error reading entity: Duplicated ${kind}/${name} in ${catalogPaths.join(', ')}`;
289262
289375
  }
@@ -289268,7 +289381,7 @@ function readEntity(kind, name, catalogPaths) {
289268
289381
  return fromYaml(data);
289269
289382
  }
289270
289383
  catch (err) {
289271
- read_messageLog(err);
289384
+ src_logger.error(err);
289272
289385
  throw `readEntity->: ${kind}/${name}: ${err}`;
289273
289386
  }
289274
289387
  }
@@ -289276,13 +289389,13 @@ function listByKind(kind, catalogPaths, callback, exclude = []) {
289276
289389
  if (typeof catalogPaths === 'string') {
289277
289390
  catalogPaths = [catalogPaths];
289278
289391
  }
289279
- read_messageLog('CATALOGS_PATHS_ %O', catalogPaths);
289392
+ src_logger.debug(`CATALOGS_PATHS_ ${catalogPaths}`);
289280
289393
  const list = [];
289281
289394
  catalogPaths.forEach((catalogPath) => {
289282
289395
  list.push(...external_fs_.readdirSync(getKindPath(kind, catalogPath)));
289283
289396
  });
289284
- read_messageLog('LIST_ %O', list);
289285
- read_messageLog(`Listing entities of kind ${kind} from catalogs`);
289397
+ src_logger.debug(`LIST_ ${list}`);
289398
+ src_logger.debug(`Listing entities of kind ${kind} from catalogs`);
289286
289399
  return list
289287
289400
  .filter((file) => file.match(/\.yaml$/))
289288
289401
  .filter((file) => exclude.indexOf(file.replace(/\.yaml/, '')) === -1)
@@ -290213,6 +290326,9 @@ class CsvWriter {
290213
290326
  }
290214
290327
  /* harmony default export */ const csv_generator = (CsvWriter);
290215
290328
 
290329
+ // EXTERNAL MODULE: ../../node_modules/debug/src/index.js
290330
+ var src = __nccwpck_require__(67984);
290331
+ var src_default = /*#__PURE__*/__nccwpck_require__.n(src);
290216
290332
  ;// CONCATENATED MODULE: ../catalog_common/src/generic/logger.ts
290217
290333
 
290218
290334
 
@@ -290275,9 +290391,8 @@ var lodash_default = /*#__PURE__*/__nccwpck_require__.n(lodash);
290275
290391
 
290276
290392
 
290277
290393
  const { camelCase } = (lodash_default());
290278
- const name_log = src_default()('firestartr:catalog_common:generic:name');
290279
290394
  function normalizeName(name) {
290280
- name_log('Normalizing name %s', name);
290395
+ src_logger.debug(`Normalizing name ${name}`);
290281
290396
  return name.replace(/[^a-z0-9]/gi, '-').toLowerCase();
290282
290397
  }
290283
290398
  function transformKeysToCamelCase(obj) {
@@ -290618,7 +290733,6 @@ const ExternalSecretsApiGroup = 'external-secrets.io';
290618
290733
 
290619
290734
  ;// CONCATENATED MODULE: ../catalog_common/src/environment/index.ts
290620
290735
 
290621
- const environment_messageLog = src_default()('firestartr:catalog_common:environment');
290622
290736
  function getFromEnvironment(envVar) {
290623
290737
  return process.env[envVar];
290624
290738
  }
@@ -290636,7 +290750,7 @@ function getFromEnvironmentAsBoolean(envVar) {
290636
290750
  }
290637
290751
  function checkExistOnEnvironment(envVar) {
290638
290752
  const environmentValue = getFromEnvironment(envVar);
290639
- environment_messageLog(`Checking if environment variable ${envVar} exists: ${environmentValue}`);
290753
+ src_logger.debug(`Checking if environment variable ${envVar} exists: ${environmentValue}`);
290640
290754
  if (!environmentValue || environmentValue === '') {
290641
290755
  return false;
290642
290756
  }
@@ -290682,30 +290796,29 @@ const fullMembersTeam = getFromEnvironmentWithDefault(envVars.fullOrgGroup, `${o
290682
290796
  ;// CONCATENATED MODULE: ../catalog_common/src/features/tarballs.ts
290683
290797
 
290684
290798
 
290685
- const tarballs_messageLog = src_default()('firestartr:catalog_common:features:tarballs');
290686
290799
  function getFeatureZipDownloadPath(featureName, version, owner, repo) {
290687
290800
  const featureDownloadPath = `/tmp/${basicFeaturePath(featureName, version, owner, repo)}-zipball.zip`;
290688
- tarballs_messageLog('Feature tarball download path %s', featureDownloadPath);
290801
+ src_logger.debug(`Feature tarball download path ${featureDownloadPath}`);
290689
290802
  return featureDownloadPath;
290690
290803
  }
290691
290804
  function removeFeatureTarball(featureName, version, owner, repo) {
290692
290805
  const featurePath = getFeatureZipDownloadPath(featureName, version, owner, repo);
290693
- tarballs_messageLog('Removing feature tarball %s', featurePath);
290806
+ src_logger.debug(`Removing feature tarball ${featurePath}`);
290694
290807
  external_fs_.unlinkSync(featurePath);
290695
- tarballs_messageLog(`Removed tarball for feature ${featureName} and version ${version}: ${featurePath}`);
290808
+ src_logger.debug(`Removed tarball for feature ${featureName} and version ${version}: ${featurePath}`);
290696
290809
  }
290697
290810
  function featureTarballExists(featureName, version, owner, repo) {
290698
290811
  const featurePath = getFeatureZipDownloadPath(featureName, version, owner, repo);
290699
290812
  const exists = external_fs_.existsSync(featurePath);
290700
- tarballs_messageLog(`Tarball ${featurePath} exists? ${exists}`);
290813
+ src_logger.debug(`Tarball ${featurePath} exists? ${exists}`);
290701
290814
  return exists;
290702
290815
  }
290703
290816
  function getFeaturesExtractPath(featureName, version, owner, repo, options = {}) {
290704
290817
  const { createIfNotExists } = options;
290705
290818
  const extractPath = `/tmp/${basicFeaturePath(featureName, version, owner, repo)}-extract`;
290706
- tarballs_messageLog('Extract path %s', extractPath);
290819
+ src_logger.debug(`Extract path ${extractPath}`);
290707
290820
  if (createIfNotExists && !external_fs_.existsSync(extractPath)) {
290708
- tarballs_messageLog('Extract path %s does not exist, creating', extractPath);
290821
+ src_logger.debug(`Extract path ${extractPath} does not exist, creating`);
290709
290822
  external_fs_.mkdirSync(extractPath, { recursive: true });
290710
290823
  }
290711
290824
  return extractPath;
@@ -290722,17 +290835,16 @@ function trasformLeg(leg) {
290722
290835
 
290723
290836
 
290724
290837
 
290725
- const features_io_messageLog = src_default()('firestartr:catalog_common:features:features_io');
290726
290838
  function getFeatureRenderedPathForEntity(entity, featureName, basePath = '/tmp') {
290727
290839
  const entityFolderName = `${entity.metadata.name}`.toLowerCase();
290728
290840
  return external_path_default().join(basePath, entityFolderName, featureName);
290729
290841
  }
290730
290842
  function getFeatureRenderedConfigForComponent(entity, featureName, basePath = '/tmp/features') {
290731
- features_io_messageLog('Getting rendered config for component %s and feature %s', entity.name, featureName);
290843
+ src_logger.info(`Getting rendered config for component ${entity.name}and feature ${featureName}`);
290732
290844
  const workdir = getFeatureRenderedPathForEntity(entity, featureName, basePath);
290733
290845
  const config = JSON.parse(external_fs_.readFileSync(`${workdir}/output.json`, { encoding: 'utf8' }));
290734
- features_io_messageLog('Feature output: %O', config);
290735
- features_io_messageLog(`Rendered feature ${featureName} for component ${entity.name}. Result: ${(JSON.stringify, config)}`);
290846
+ src_logger.debug(`Feature output: ${config}`);
290847
+ src_logger.debug(`Rendered feature ${featureName} for component ${entity.name}. Result: ${(JSON.stringify, config)}`);
290736
290848
  return config;
290737
290849
  }
290738
290850
 
@@ -290746,7 +290858,6 @@ function getFeatureRenderedConfigForComponent(entity, featureName, basePath = '/
290746
290858
 
290747
290859
  ;// CONCATENATED MODULE: ../catalog_common/src/policies/policies.ts
290748
290860
 
290749
- const policies_log = src_default()('firestartr:catalog_common:policies');
290750
290861
  const FIRESTARTR_POLICIES = [
290751
290862
  {
290752
290863
  name: 'full-control',
@@ -290785,17 +290896,17 @@ function getPolicyByName(policyName) {
290785
290896
  return FIRESTARTR_POLICIES.find((p) => p.name === policyName || p.aliases.includes(policyName));
290786
290897
  }
290787
290898
  function policiesAreCompatible(syncPolicy, generalPolicy) {
290788
- policies_log('Validating policy compatibility: %s %s', syncPolicy, generalPolicy);
290899
+ src_logger.debug('Validating policy compatibility: %s %s', syncPolicy, generalPolicy);
290789
290900
  const syncPolicyWeight = getPolicyByName(syncPolicy)?.weight;
290790
290901
  const generalPolicyWeight = getPolicyByName(generalPolicy)?.weight;
290791
290902
  if (!syncPolicyWeight || !generalPolicyWeight) {
290792
290903
  throw new Error(`Policy ${syncPolicy} or ${generalPolicy} not found`);
290793
290904
  }
290794
290905
  if (generalPolicyWeight >= syncPolicyWeight) {
290795
- policies_log('Policies %s %s are compatible', syncPolicy, generalPolicy);
290906
+ src_logger.debug('Policies %s %s are compatible', syncPolicy, generalPolicy);
290796
290907
  return true;
290797
290908
  }
290798
- policies_log('Policies %s %s are not compatible', syncPolicy, generalPolicy);
290909
+ src_logger.debug('Policies %s %s are not compatible', syncPolicy, generalPolicy);
290799
290910
  return false;
290800
290911
  }
290801
290912
 
@@ -290815,6 +290926,7 @@ function policiesAreCompatible(syncPolicy, generalPolicy) {
290815
290926
 
290816
290927
 
290817
290928
 
290929
+
290818
290930
  /* harmony default export */ const catalog_common = ({
290819
290931
  io: io,
290820
290932
  generic: generic,
@@ -290823,6 +290935,7 @@ function policiesAreCompatible(syncPolicy, generalPolicy) {
290823
290935
  defaults: defaults,
290824
290936
  features: features,
290825
290937
  policies: policies,
290938
+ logger: logger_logger,
290826
290939
  });
290827
290940
 
290828
290941
  ;// CONCATENATED MODULE: ../../node_modules/universal-user-agent/index.js
@@ -297369,13 +297482,16 @@ async function getOctokitFromPat(envVar) {
297369
297482
  }
297370
297483
  /* harmony default export */ const src_auth = ({ getOctokitForOrg });
297371
297484
 
297485
+ ;// CONCATENATED MODULE: ../github/src/logger.ts
297486
+
297487
+ /* harmony default export */ const github_src_logger = (catalog_common.logger);
297488
+
297372
297489
  ;// CONCATENATED MODULE: ../github/src/organization.ts
297373
297490
 
297374
297491
 
297375
- const organization_messageLog = src_default()('firestartr:github:organization');
297376
297492
  const defaultPerPage = 100;
297377
297493
  async function getRepositoryList(org, perPageEntries = defaultPerPage) {
297378
- organization_messageLog(`Getting repository list for ${org} with ${perPageEntries} entries per page`);
297494
+ github_src_logger.info(`Getting repository list for ${org} with ${perPageEntries} entries per page`);
297379
297495
  const octokit = await getOctokitForOrg(org);
297380
297496
  const options = octokit.repos.listForOrg.endpoint.merge({
297381
297497
  org: org,
@@ -297385,7 +297501,7 @@ async function getRepositoryList(org, perPageEntries = defaultPerPage) {
297385
297501
  return await doPaginatedRequest(options);
297386
297502
  }
297387
297503
  async function getTeamList(org, perPageEntries = defaultPerPage) {
297388
- organization_messageLog(`Getting team list for ${org} with ${perPageEntries} entries per page`);
297504
+ github_src_logger.info(`Getting team list for ${org} with ${perPageEntries} entries per page`);
297389
297505
  const octokit = await getOctokitForOrg(org);
297390
297506
  const options = octokit.rest.teams.list.endpoint.merge({
297391
297507
  org: org,
@@ -297394,7 +297510,7 @@ async function getTeamList(org, perPageEntries = defaultPerPage) {
297394
297510
  return await doPaginatedRequest(options);
297395
297511
  }
297396
297512
  async function getUserList(org, perPageEntries = defaultPerPage) {
297397
- organization_messageLog(`Getting user list for ${org} with ${perPageEntries} entries per page`);
297513
+ github_src_logger.info(`Getting user list for ${org} with ${perPageEntries} entries per page`);
297398
297514
  const octokit = await getOctokitForOrg(org);
297399
297515
  const options = await octokit.rest.orgs.listMembers.endpoint.merge({
297400
297516
  org: org,
@@ -297403,7 +297519,7 @@ async function getUserList(org, perPageEntries = defaultPerPage) {
297403
297519
  return await doPaginatedRequest(options);
297404
297520
  }
297405
297521
  async function validateMember(username, org) {
297406
- organization_messageLog(`Validating ${username} is a member of ${org}`);
297522
+ github_src_logger.debug(`Validating ${username} is a member of ${org}`);
297407
297523
  const octokit = await getOctokitForOrg(org);
297408
297524
  const result = await octokit.orgs.checkMembershipForUser({
297409
297525
  org: org,
@@ -297412,7 +297528,7 @@ async function validateMember(username, org) {
297412
297528
  return result;
297413
297529
  }
297414
297530
  async function getUserRoleInOrg(username, org) {
297415
- organization_messageLog(`Getting user ${username} role in ${org}`);
297531
+ github_src_logger.info(`Getting user ${username} role in ${org}`);
297416
297532
  const octokit = await getOctokitForOrg(org);
297417
297533
  const membership = await octokit.orgs.getMembershipForUser({
297418
297534
  org: org,
@@ -297421,13 +297537,13 @@ async function getUserRoleInOrg(username, org) {
297421
297537
  return membership.data.role;
297422
297538
  }
297423
297539
  async function getOrgInfo(org) {
297424
- organization_messageLog(`Getting info for org ${org}`);
297540
+ github_src_logger.info(`Getting info for org ${org}`);
297425
297541
  const octokit = await getOctokitForOrg(org);
297426
297542
  const orgInfo = await octokit.orgs.get({ org });
297427
297543
  return orgInfo.data;
297428
297544
  }
297429
297545
  async function getOrgPlanName(org) {
297430
- organization_messageLog(`Getting plan for org ${org}`);
297546
+ github_src_logger.info(`Getting plan for org ${org}`);
297431
297547
  const orgInfo = await getOrgInfo(org);
297432
297548
  return orgInfo.plan.name;
297433
297549
  }
@@ -297451,9 +297567,8 @@ async function doPaginatedRequest(options) {
297451
297567
 
297452
297568
 
297453
297569
 
297454
- const repository_messageLog = src_default()('firestartr:github:repository');
297455
297570
  async function listReleases(repo, owner = 'prefapp') {
297456
- repository_messageLog(`Getting releases for ${owner}/${repo}`);
297571
+ github_src_logger.info(`Getting releases for ${owner}/${repo}`);
297457
297572
  const octokit = await getOctokitForOrg(owner);
297458
297573
  const response = await octokit.rest.repos.listReleases({
297459
297574
  owner,
@@ -297464,7 +297579,7 @@ async function listReleases(repo, owner = 'prefapp') {
297464
297579
  return response.data;
297465
297580
  }
297466
297581
  async function getReleaseByTag(releaseTag, repo, owner = 'prefapp') {
297467
- repository_messageLog(`Getting release ${releaseTag} for ${owner}/${repo}`);
297582
+ github_src_logger.info(`Getting release ${releaseTag} for ${owner}/${repo}`);
297468
297583
  const octokit = await getOctokitForOrg(owner);
297469
297584
  const response = await octokit.rest.repos.getReleaseByTag({
297470
297585
  owner,
@@ -297479,7 +297594,7 @@ async function getFileFromGithub(path, repo, owner = 'prefapp') {
297479
297594
  return await octokit.rest.repos.getContent({ owner, repo, path });
297480
297595
  }
297481
297596
  async function getContent(path, repo, owner = 'prefapp', ref = '') {
297482
- repository_messageLog(`Getting content for ${owner}/${repo}/${path}`);
297597
+ github_src_logger.info(`Getting content for ${owner}/${repo}/${path}`);
297483
297598
  const octokit = await getOctokitForOrg(owner);
297484
297599
  const opts = {
297485
297600
  owner,
@@ -297493,19 +297608,19 @@ async function getContent(path, repo, owner = 'prefapp', ref = '') {
297493
297608
  return Buffer.from(content.data.content, 'base64').toString('utf8');
297494
297609
  }
297495
297610
  async function getRepoInfo(owner, name) {
297496
- repository_messageLog(`Getting repo info for ${owner}/${name}`);
297611
+ github_src_logger.info(`Getting repo info for ${owner}/${name}`);
297497
297612
  const octokit = await getOctokitForOrg(owner);
297498
297613
  const res = await octokit.repos.get({ owner: owner, repo: name });
297499
297614
  return res['data'];
297500
297615
  }
297501
297616
  async function getPages(owner, name) {
297502
- repository_messageLog(`Getting pages for ${owner}/${name}`);
297617
+ github_src_logger.info(`Getting pages for ${owner}/${name}`);
297503
297618
  const octokit = await getOctokitForOrg(owner);
297504
297619
  const res = await octokit.repos.getPages({ owner: owner, repo: name });
297505
297620
  return res['data'];
297506
297621
  }
297507
297622
  async function getOIDCRepo(owner, name) {
297508
- repository_messageLog(`Getting repo info for ${owner}/${name}`);
297623
+ github_src_logger.info(`Getting repo info for ${owner}/${name}`);
297509
297624
  const octokit = await getOctokitForOrg(owner);
297510
297625
  return await octokit.request(`GET /repos/${owner}/${name}/actions/oidc/customization/sub`, {
297511
297626
  owner: owner,
@@ -297516,7 +297631,7 @@ async function getOIDCRepo(owner, name) {
297516
297631
  });
297517
297632
  }
297518
297633
  async function getBranchProtection(owner, repo, branch = 'main') {
297519
- repository_messageLog(`Getting branch protection for ${owner}/${repo}/${branch}`);
297634
+ github_src_logger.info(`Getting branch protection for ${owner}/${repo}/${branch}`);
297520
297635
  const octokit = await getOctokitForOrg(owner);
297521
297636
  const res = await octokit.repos.getBranchProtection({
297522
297637
  owner: owner,
@@ -297526,13 +297641,13 @@ async function getBranchProtection(owner, repo, branch = 'main') {
297526
297641
  return res['data'];
297527
297642
  }
297528
297643
  async function getTeams(owner, repo) {
297529
- repository_messageLog(`Getting teams for ${owner}/${repo}`);
297644
+ github_src_logger.info(`Getting teams for ${owner}/${repo}`);
297530
297645
  const octokit = await getOctokitForOrg(owner);
297531
297646
  const res = await octokit.repos.listTeams({ owner: owner, repo: repo });
297532
297647
  return res['data'];
297533
297648
  }
297534
297649
  async function getCollaborators(owner, repo, affiliation = 'direct') {
297535
- repository_messageLog(`Getting collaborators for ${owner}/${repo}`);
297650
+ github_src_logger.info(`Getting collaborators for ${owner}/${repo}`);
297536
297651
  const octokit = await getOctokitForOrg(owner);
297537
297652
  const res = await octokit.repos.listCollaborators({
297538
297653
  owner: owner,
@@ -297543,7 +297658,7 @@ async function getCollaborators(owner, repo, affiliation = 'direct') {
297543
297658
  }
297544
297659
  async function setContent(path, fileContent, repo, owner = 'prefapp', branch = 'main', message = '') {
297545
297660
  const base64Content = Buffer.from(fileContent, 'utf8').toString('base64');
297546
- repository_messageLog(`Setting content for ${owner}/${repo}/${path}`);
297661
+ github_src_logger.info(`Setting content for ${owner}/${repo}/${path}`);
297547
297662
  if (message === '') {
297548
297663
  message = `Update ${path}`;
297549
297664
  }
@@ -297551,10 +297666,10 @@ async function setContent(path, fileContent, repo, owner = 'prefapp', branch = '
297551
297666
  try {
297552
297667
  const currentContent = await getFileFromGithub(path, repo, owner);
297553
297668
  sha = currentContent.data.sha;
297554
- repository_messageLog('File already exists, updating it');
297669
+ github_src_logger.debug('File already exists, updating it');
297555
297670
  }
297556
297671
  catch {
297557
- repository_messageLog('File does not exists, creating it');
297672
+ github_src_logger.debug('File does not exist, creating it');
297558
297673
  }
297559
297674
  const octokit = await getOctokitForOrg(owner);
297560
297675
  await octokit.rest.repos.createOrUpdateFileContents({
@@ -297569,7 +297684,7 @@ async function setContent(path, fileContent, repo, owner = 'prefapp', branch = '
297569
297684
  }
297570
297685
  async function uploadFile(destinationPath, filePath, repo, owner = 'prefapp', branch = 'main', message = '') {
297571
297686
  if (!external_fs_.existsSync(filePath)) {
297572
- repository_messageLog(`File ${filePath} does not exists or is not readable`);
297687
+ github_src_logger.error(`File ${filePath} does not exists or is not readable`);
297573
297688
  throw `${filePath} does not exists or is not readable`;
297574
297689
  }
297575
297690
  // Read file contents and call setContent
@@ -297578,16 +297693,16 @@ async function uploadFile(destinationPath, filePath, repo, owner = 'prefapp', br
297578
297693
  }
297579
297694
  async function deleteFile(path, repo, owner = 'prefapp', branch = 'main', message = '') {
297580
297695
  let sha = undefined;
297581
- repository_messageLog(`Deleting file ${owner}/${repo}/${path}`);
297696
+ github_src_logger.info(`Deleting file ${owner}/${repo}/${path}`);
297582
297697
  try {
297583
297698
  const currentContent = await getFileFromGithub(path, repo, owner);
297584
297699
  sha = currentContent.data.sha;
297585
297700
  }
297586
297701
  catch {
297587
- repository_messageLog(`File ${path} does not exist in ${repo}`);
297702
+ github_src_logger.error(`File ${path} does not exist in ${repo}`);
297588
297703
  }
297589
297704
  if (!sha) {
297590
- repository_messageLog(`File ${path} does not exist in ${repo}`);
297705
+ github_src_logger.error(`File ${path} does not exist in ${repo}`);
297591
297706
  throw `File ${path} does not exist in ${repo}`;
297592
297707
  }
297593
297708
  if (message === '') {
@@ -297604,7 +297719,7 @@ async function deleteFile(path, repo, owner = 'prefapp', branch = 'main', messag
297604
297719
  });
297605
297720
  }
297606
297721
  async function addStatusCheck(output, is_failure, head_sha, name, status, repo, owner = 'prefapp') {
297607
- repository_messageLog(`Adding status checks to commit ${head_sha} in ${owner}/${repo}`);
297722
+ github_src_logger.info(`Adding status checks to commit ${head_sha} in ${owner}/${repo}`);
297608
297723
  const octokit = await getOctokitForOrg(owner);
297609
297724
  const payload = { output, head_sha, name, owner, repo, status };
297610
297725
  if (status === 'completed') {
@@ -297613,7 +297728,7 @@ async function addStatusCheck(output, is_failure, head_sha, name, status, repo,
297613
297728
  await octokit.rest.checks.create(payload);
297614
297729
  }
297615
297730
  async function addCommitStatus(state, sha, repo, owner = 'prefapp', target_url = '', description = '', context = '') {
297616
- repository_messageLog(`Adding commit status with state ${state} to SHA ${sha} in ${owner}/${repo}`);
297731
+ github_src_logger.info(`Adding commit status with state ${state} to SHA ${sha} in ${owner}/${repo}`);
297617
297732
  const octokit = await getOctokitForOrg(owner);
297618
297733
  await octokit.rest.repos.createCommitStatus({
297619
297734
  owner,
@@ -297645,9 +297760,8 @@ async function addCommitStatus(state, sha, repo, owner = 'prefapp', target_url =
297645
297760
  ;// CONCATENATED MODULE: ../github/src/team.ts
297646
297761
 
297647
297762
 
297648
- const team_messageLog = src_default()('firestartr:github:team');
297649
297763
  async function getTeamMembers(team, org) {
297650
- team_messageLog(`Getting members for ${org}/${team}`);
297764
+ github_src_logger.info(`Getting members for ${org}/${team}`);
297651
297765
  const octokit = await getOctokitForOrg(org);
297652
297766
  const res = await octokit.rest.teams.listMembersInOrg({
297653
297767
  org: org,
@@ -297656,13 +297770,13 @@ async function getTeamMembers(team, org) {
297656
297770
  return res['data'];
297657
297771
  }
297658
297772
  async function getTeamInfo(team, org) {
297659
- team_messageLog(`Getting info for ${org}/${team}`);
297773
+ github_src_logger.info(`Getting info for ${org}/${team}`);
297660
297774
  const octokit = await getOctokitForOrg(org);
297661
297775
  const res = await octokit.rest.teams.getByName({ org: org, team_slug: team });
297662
297776
  return res['data'];
297663
297777
  }
297664
297778
  async function getTeamRoleUser(org, team, username) {
297665
- team_messageLog(`Getting role for ${username} in ${org}/${team}`);
297779
+ github_src_logger.info(`Getting role for ${username} in ${org}/${team}`);
297666
297780
  const octokit = await getOctokitForOrg(org);
297667
297781
  const res = await octokit.rest.teams.getMembershipForUserInOrg({
297668
297782
  org: org,
@@ -297672,7 +297786,7 @@ async function getTeamRoleUser(org, team, username) {
297672
297786
  return res['data'];
297673
297787
  }
297674
297788
  async function create(org, team, privacy = 'closed') {
297675
- team_messageLog(`Creating team ${org}/${team}`);
297789
+ github_src_logger.info(`Creating team ${org}/${team}`);
297676
297790
  const octokit = await getOctokitForOrg(org);
297677
297791
  return await octokit.rest.teams.create({
297678
297792
  org: org,
@@ -297681,7 +297795,7 @@ async function create(org, team, privacy = 'closed') {
297681
297795
  });
297682
297796
  }
297683
297797
  async function addOrUpdateMember(org, team, username, role = 'member') {
297684
- team_messageLog(`Adding or updating ${username} in ${org}/${team} with role ${role}`);
297798
+ github_src_logger.info(`Adding or updating ${username} in ${org}/${team} with role ${role}`);
297685
297799
  const octokit = await getOctokitForOrg(org);
297686
297800
  return await octokit.rest.teams.addOrUpdateMembershipForUserInOrg({
297687
297801
  org: org,
@@ -297691,7 +297805,7 @@ async function addOrUpdateMember(org, team, username, role = 'member') {
297691
297805
  });
297692
297806
  }
297693
297807
  async function removeMember(org, team, username) {
297694
- team_messageLog(`Removing ${username} from ${org}/${team}`);
297808
+ github_src_logger.info(`Removing ${username} from ${org}/${team}`);
297695
297809
  const octokit = await getOctokitForOrg(org);
297696
297810
  return await octokit.teams.removeMembershipForUserInOrg({
297697
297811
  org: org,
@@ -297711,9 +297825,8 @@ async function removeMember(org, team, username) {
297711
297825
  ;// CONCATENATED MODULE: ../github/src/user.ts
297712
297826
 
297713
297827
 
297714
- const user_messageLog = src_default()('firestartr:github:user');
297715
297828
  async function getUserInfo(name) {
297716
- user_messageLog(`Getting user ${name} info`);
297829
+ github_src_logger.info(`Getting user ${name} info`);
297717
297830
  const octokit = await getOctokitForOrg(name);
297718
297831
  return await octokit.users.getByUsername({ username: name });
297719
297832
  }
@@ -297724,11 +297837,10 @@ async function getUserInfo(name) {
297724
297837
  ;// CONCATENATED MODULE: ../github/src/pull_request.ts
297725
297838
 
297726
297839
 
297727
- const pull_request_messageLog = src_default()('firestartr:github:pull_request');
297728
297840
  const commentMaxSize = 65535;
297729
297841
  async function commentInPR(comment, pr_number, repo, owner = 'prefapp') {
297730
297842
  try {
297731
- pull_request_messageLog(`Commenting ${comment} in PR ${pr_number} of ${owner}/${repo}`);
297843
+ github_src_logger.info(`Commenting ${comment} in PR ${pr_number} of ${owner}/${repo}`);
297732
297844
  const octokit = await getOctokitForOrg(owner);
297733
297845
  await octokit.rest.issues.createComment({
297734
297846
  owner,
@@ -297747,12 +297859,12 @@ async function getPrData(pull_number, repo, owner) {
297747
297859
  return await octokit.rest.pulls.get({ owner, repo, pull_number });
297748
297860
  }
297749
297861
  async function getPrLastCommitSHA(pull_number, repo, owner = 'prefapp') {
297750
- pull_request_messageLog(`Getting last commit SHA for PR ${pull_number} of ${owner}/${repo}`);
297862
+ github_src_logger.info(`Getting last commit SHA for PR ${pull_number} of ${owner}/${repo}`);
297751
297863
  const prData = await getPrData(pull_number, repo, owner);
297752
297864
  return prData.data.head.sha;
297753
297865
  }
297754
297866
  async function getPrMergeCommitSHA(pull_number, repo, owner = 'prefapp') {
297755
- pull_request_messageLog(`Getting merge commit SHA for PR ${pull_number} of ${owner}/${repo}`);
297867
+ github_src_logger.info(`Getting merge commit SHA for PR ${pull_number} of ${owner}/${repo}`);
297756
297868
  const prData = await getPrData(pull_number, repo, owner);
297757
297869
  if (prData.data.merge_commit_sha !== null) {
297758
297870
  return prData.data.merge_commit_sha;
@@ -297790,7 +297902,7 @@ function divideCommentIntoChunks(comment, sizeReduction = 0) {
297790
297902
  return result;
297791
297903
  }
297792
297904
  async function getPrFiles(pr_number, repo, owner = 'prefapp') {
297793
- pull_request_messageLog(`Getting PR details of PR ${pr_number} of ${owner}/${repo}`);
297905
+ github_src_logger.info(`Getting PR details of PR ${pr_number} of ${owner}/${repo}`);
297794
297906
  const octokit = await getOctokitForOrg(owner);
297795
297907
  return await octokit.rest.pulls.listFiles({
297796
297908
  owner,
@@ -297832,9 +297944,8 @@ async function filterPrBy(filter, opts) {
297832
297944
  ;// CONCATENATED MODULE: ../github/src/issues.ts
297833
297945
 
297834
297946
 
297835
- const issues_log = src_default()('firestartr:github:issues');
297836
297947
  async function issues_create(owner, repo, title, body, labels = []) {
297837
- issues_log(`Creating issue in ${owner}/${repo}`);
297948
+ github_src_logger.info(`Creating issue in ${owner}/${repo}`);
297838
297949
  const octokit = await getOctokitForOrg(owner);
297839
297950
  return await octokit.rest.issues.create({
297840
297951
  owner,
@@ -297845,7 +297956,7 @@ async function issues_create(owner, repo, title, body, labels = []) {
297845
297956
  });
297846
297957
  }
297847
297958
  async function update(owner, repo, issue_number, title, body, labels = []) {
297848
- issues_log(`Updating issue ${issue_number} in ${owner}/${repo}`);
297959
+ github_src_logger.info(`Updating issue ${issue_number} in ${owner}/${repo}`);
297849
297960
  const octokit = await getOctokitForOrg(owner);
297850
297961
  return await octokit.rest.issues.update({
297851
297962
  owner,
@@ -297857,7 +297968,7 @@ async function update(owner, repo, issue_number, title, body, labels = []) {
297857
297968
  });
297858
297969
  }
297859
297970
  async function filterBy(owner, repo, title, labels, state = 'open', creator = undefined, assignee = undefined) {
297860
- issues_log(`Filtering issues by title in ${owner}/${repo}`);
297971
+ github_src_logger.info(`Filtering issues by title in ${owner}/${repo}`);
297861
297972
  const octokit = await getOctokitForOrg(owner);
297862
297973
  const resp = await octokit.rest.issues.listForRepo({
297863
297974
  owner,
@@ -297872,7 +297983,7 @@ async function filterBy(owner, repo, title, labels, state = 'open', creator = un
297872
297983
  return resp.data.filter((issue) => issue.title.includes(title));
297873
297984
  }
297874
297985
  async function upsertByTitle(owner, repo, title, body, labels = []) {
297875
- issues_log(`Upserting issue by title in ${owner}/${repo}`);
297986
+ github_src_logger.info(`Upserting issue by title in ${owner}/${repo}`);
297876
297987
  const foundIssues = await filterBy(owner, repo, title, labels.join(','));
297877
297988
  if (foundIssues.length > 0) {
297878
297989
  return update(owner, repo, foundIssues[0].number, title, body, labels);
@@ -297882,7 +297993,7 @@ async function upsertByTitle(owner, repo, title, body, labels = []) {
297882
297993
  }
297883
297994
  }
297884
297995
  async function issues_close(owner, repo, issue_number) {
297885
- issues_log(`Closing issue ${issue_number} in ${owner}/${repo}`);
297996
+ github_src_logger.info(`Closing issue ${issue_number} in ${owner}/${repo}`);
297886
297997
  const octokit = await getOctokitForOrg(owner);
297887
297998
  return await octokit.rest.issues.update({
297888
297999
  owner,
@@ -297902,10 +298013,9 @@ async function issues_close(owner, repo, issue_number) {
297902
298013
  ;// CONCATENATED MODULE: ../github/src/branches.ts
297903
298014
 
297904
298015
 
297905
- const branches_messageLog = src_default()('firestartr:github:branches');
297906
298016
  const SHA1_EMPTY_TREE = '4b825dc642cb6eb9a060e54bf8d69288fbee4904';
297907
298017
  async function listBranches(repo, owner = 'prefapp') {
297908
- branches_messageLog(`Getting branches for ${owner}/${repo}`);
298018
+ github_src_logger.info(`Getting branches for ${owner}/${repo}`);
297909
298019
  const octokit = await getOctokitForOrg(owner);
297910
298020
  const response = await octokit.rest.repos.listBranches({
297911
298021
  owner,
@@ -297916,7 +298026,7 @@ async function listBranches(repo, owner = 'prefapp') {
297916
298026
  return response.data;
297917
298027
  }
297918
298028
  async function getBranch(repo, branch, owner = 'prefapp') {
297919
- branches_messageLog(`Getting branch ${branch} for ${owner}/${repo}`);
298029
+ github_src_logger.info(`Getting branch ${branch} for ${owner}/${repo}`);
297920
298030
  const octokit = await getOctokitForOrg(owner);
297921
298031
  const response = await octokit.rest.repos.getBranch({
297922
298032
  owner,
@@ -297926,7 +298036,7 @@ async function getBranch(repo, branch, owner = 'prefapp') {
297926
298036
  return response.data;
297927
298037
  }
297928
298038
  async function createBranch(repo, branch, sha, owner = 'prefapp') {
297929
- branches_messageLog(`Creating branch ${branch} for ${owner}/${repo}`);
298039
+ github_src_logger.info(`Creating branch ${branch} for ${owner}/${repo}`);
297930
298040
  const octokit = await getOctokitForOrg(owner);
297931
298041
  const response = await octokit.rest.git.createRef({
297932
298042
  owner,
@@ -297937,7 +298047,7 @@ async function createBranch(repo, branch, sha, owner = 'prefapp') {
297937
298047
  return response.data;
297938
298048
  }
297939
298049
  async function createOrphanBranch(repo, branch, owner = 'prefapp') {
297940
- branches_messageLog(`Creating orphan branch ${branch} for ${owner}/${repo}`);
298050
+ github_src_logger.info(`Creating orphan branch ${branch} for ${owner}/${repo}`);
297941
298051
  const octokit = await getOctokitForOrg(owner);
297942
298052
  // Create a commit with an empty tree
297943
298053
  const { data: commit } = await octokit.request('POST /repos/{owner}/{repo}/git/commits', {
@@ -297963,6 +298073,268 @@ async function createOrphanBranch(repo, branch, owner = 'prefapp') {
297963
298073
  createOrphanBranch,
297964
298074
  });
297965
298075
 
298076
+ ;// CONCATENATED MODULE: ../github/src/check_run.ts
298077
+
298078
+
298079
+ const FLUSH_TIMEOUT = 4; // seconds
298080
+ const GITHUB_OUTPUT_TEXT_LIMIT = 65000; // ~65k hard limit for output.text
298081
+ /**
298082
+ * Streams text updates to a callback on a fixed cadence, with a size-triggered early flush.
298083
+ * Does NOT clear content on flush (so the consumer can send the full, current log each time).
298084
+ */
298085
+ class CheckRunBuffer {
298086
+ constructor(initial = '', onFlush, opts) {
298087
+ this.content = initial;
298088
+ this.updated = Boolean(initial);
298089
+ this.onFlush = onFlush;
298090
+ this.flushIntervalMs = (opts?.intervalSec ?? FLUSH_TIMEOUT) * 1000;
298091
+ this.timer = setInterval(() => {
298092
+ this.flush();
298093
+ }, this.flushIntervalMs);
298094
+ if (initial)
298095
+ this.flush();
298096
+ }
298097
+ stop() {
298098
+ if (this.timer !== null) {
298099
+ clearInterval(this.timer);
298100
+ this.timer = null;
298101
+ }
298102
+ }
298103
+ update(data = '') {
298104
+ if (!data)
298105
+ return;
298106
+ this.content += data;
298107
+ this.updated = true;
298108
+ }
298109
+ flush() {
298110
+ if (!this.updated)
298111
+ return;
298112
+ try {
298113
+ this.onFlush(this.content);
298114
+ }
298115
+ finally {
298116
+ this.updated = false;
298117
+ }
298118
+ }
298119
+ snapshot() {
298120
+ return this.content;
298121
+ }
298122
+ }
298123
+ class GithubCheckRun {
298124
+ constructor(octokit, params) {
298125
+ this.hasCommented = false;
298126
+ this.closing = false;
298127
+ this.closed = false;
298128
+ this.lastStatus = 'in_progress';
298129
+ this.detailsFormatter = (s) => s;
298130
+ this.octokit = octokit;
298131
+ this.owner = params.owner;
298132
+ this.repo = params.repo;
298133
+ this.headSHA = params.headSHA;
298134
+ this.name = params.name;
298135
+ this.detailsUrl = params.detailsUrl;
298136
+ this.title = params.title ?? params.name;
298137
+ if (params.summary)
298138
+ this._summaryOverride = params.summary;
298139
+ this.pullNumber = params.pullNumber;
298140
+ this.includeCheckRunComment = Boolean(params.includeCheckRunComment);
298141
+ this.checkRunComment = params.checkRunComment;
298142
+ this.buffer = new CheckRunBuffer('', (data) => this.__updateCheckRun(data).catch(() => { }), { intervalSec: FLUSH_TIMEOUT });
298143
+ }
298144
+ /**
298145
+ * Configure markdown formatting for the details (output.text).
298146
+ * Example: ch.mdOptionsDetails({ quotes: 'terraform' })
298147
+ * Result:
298148
+ * ```terraform
298149
+ * <log>
298150
+ * ```
298151
+ */
298152
+ mdOptionsDetails(opts) {
298153
+ const lang = (opts?.quotes ?? '').trim();
298154
+ if (!lang) {
298155
+ this.detailsFormatter = (s) => s;
298156
+ return;
298157
+ }
298158
+ const fenceOpen = '```' + lang + '\n';
298159
+ const fenceClose = '\n```';
298160
+ const overhead = fenceOpen.length + fenceClose.length;
298161
+ this.detailsFormatter = (body) => {
298162
+ const maxBody = Math.max(0, GITHUB_OUTPUT_TEXT_LIMIT - overhead);
298163
+ const safeBody = body.length > maxBody ? truncateRight(body, maxBody) : body;
298164
+ return fenceOpen + safeBody + fenceClose;
298165
+ };
298166
+ }
298167
+ set summary(data) {
298168
+ this._summaryOverride = data;
298169
+ // Push an immediate update if already created and not closed.
298170
+ if (!this.closed && this.checkRunId) {
298171
+ // do not mutate buffer flags; just send current snapshot using new summary
298172
+ this.__updateCheckRun(this.buffer.snapshot()).catch(() => { });
298173
+ }
298174
+ }
298175
+ get summary() {
298176
+ return this._summaryOverride;
298177
+ }
298178
+ /**
298179
+ * Append log text and optionally set status ('queued' | 'in_progress').
298180
+ */
298181
+ update(text, status) {
298182
+ if (this.closed)
298183
+ return;
298184
+ if (status)
298185
+ this.lastStatus = status;
298186
+ if (text)
298187
+ this.buffer.update(text);
298188
+ }
298189
+ /**
298190
+ * Finalize the check with a conclusion. Flushes buffered text, marks completed.
298191
+ */
298192
+ async close(finalText, ok) {
298193
+ if (this.closed || this.closing)
298194
+ return;
298195
+ this.closing = true;
298196
+ this.buffer.stop();
298197
+ const finalContent = this.buffer.snapshot() + (finalText || '');
298198
+ try {
298199
+ await this.__ensureCreated();
298200
+ const { text, summary } = this.buildOutputTextAndSummary(finalContent);
298201
+ await this.octokit.rest.checks.update({
298202
+ owner: this.owner,
298203
+ repo: this.repo,
298204
+ check_run_id: this.checkRunId,
298205
+ conclusion: ok ? 'success' : 'failure',
298206
+ completed_at: new Date().toISOString(),
298207
+ output: {
298208
+ title: this.title,
298209
+ summary,
298210
+ text,
298211
+ },
298212
+ });
298213
+ this.closed = true;
298214
+ }
298215
+ finally {
298216
+ this.closing = false;
298217
+ }
298218
+ }
298219
+ // -------------------- Internals --------------------
298220
+ async __ensureCreated() {
298221
+ if (this.checkRunId)
298222
+ return;
298223
+ const startedAt = new Date().toISOString();
298224
+ const res = await this.octokit.rest.checks.create({
298225
+ owner: this.owner,
298226
+ repo: this.repo,
298227
+ name: this.name,
298228
+ head_sha: this.headSHA,
298229
+ status: 'in_progress',
298230
+ started_at: startedAt,
298231
+ details_url: this.detailsUrl,
298232
+ output: {
298233
+ title: this.title,
298234
+ summary: this._summaryOverride ?? '',
298235
+ text: undefined,
298236
+ },
298237
+ });
298238
+ this.checkRunId = res.data.id;
298239
+ if (this.includeCheckRunComment &&
298240
+ this.pullNumber !== undefined &&
298241
+ !this.hasCommented) {
298242
+ const link = this.__buildCheckRunUrl();
298243
+ const formattedLink = `[here](${link})`;
298244
+ const base = this.checkRunComment ?? '';
298245
+ const body = base ? `${base}${formattedLink}` : formattedLink;
298246
+ await this.octokit.rest.issues.createComment({
298247
+ owner: this.owner,
298248
+ repo: this.repo,
298249
+ issue_number: this.pullNumber,
298250
+ body,
298251
+ });
298252
+ this.hasCommented = true;
298253
+ }
298254
+ }
298255
+ async __updateCheckRun(allContent) {
298256
+ if (this.closed || this.closing)
298257
+ return;
298258
+ await this.__ensureCreated();
298259
+ const { text, summary } = this.buildOutputTextAndSummary(allContent);
298260
+ await this.octokit.rest.checks.update({
298261
+ owner: this.owner,
298262
+ repo: this.repo,
298263
+ check_run_id: this.checkRunId,
298264
+ status: this.lastStatus,
298265
+ output: {
298266
+ title: this.title,
298267
+ summary,
298268
+ text,
298269
+ },
298270
+ });
298271
+ }
298272
+ __buildCheckRunUrl() {
298273
+ if (this.checkRunId) {
298274
+ return `https://github.com/${this.owner}/${this.repo}/runs/${this.checkRunId}?check_suite_focus=true`;
298275
+ }
298276
+ return `https://github.com/${this.owner}/${this.repo}/commit/${this.headSHA}/checks?check_suite_focus=true`;
298277
+ }
298278
+ buildOutputTextAndSummary(full) {
298279
+ if (!full) {
298280
+ return {
298281
+ text: undefined,
298282
+ summary: this._summaryOverride ?? '',
298283
+ };
298284
+ }
298285
+ let text = this.detailsFormatter(full);
298286
+ let truncated = false;
298287
+ if (text.length > GITHUB_OUTPUT_TEXT_LIMIT) {
298288
+ text = truncateRight(text, GITHUB_OUTPUT_TEXT_LIMIT);
298289
+ truncated = true;
298290
+ }
298291
+ else {
298292
+ truncated = text.length < full.length;
298293
+ }
298294
+ let summary = this._summaryOverride ?? '';
298295
+ if (this._summaryOverride && truncated) {
298296
+ summary = `${summary}\n\n... (log truncated to ~${GITHUB_OUTPUT_TEXT_LIMIT.toLocaleString()} chars)`;
298297
+ }
298298
+ return { text, summary };
298299
+ }
298300
+ }
298301
+ // -------------------- Helpers --------------------
298302
+ function truncateRight(s, max) {
298303
+ if (s.length <= max)
298304
+ return s;
298305
+ const HARD = Math.max(0, max - 3);
298306
+ return s.slice(0, HARD) + '...';
298307
+ }
298308
+ /**
298309
+ * Factory: build a GithubCheckRun using an installation token for the given org.
298310
+ */
298311
+ async function createCheckRunForOrg(org, owner, repo, name, opts) {
298312
+ const octokit = await getOctokitForOrg(org);
298313
+ let headSHA = opts?.headSHA;
298314
+ if (!headSHA && typeof opts?.pullNumber === 'number') {
298315
+ headSHA = await getPrMergeCommitSHA(opts.pullNumber, repo, owner);
298316
+ }
298317
+ if (!headSHA) {
298318
+ throw new Error('createCheckRunForOrg: either opts.headSHA or opts.pullNumber must be provided');
298319
+ }
298320
+ return new GithubCheckRun(octokit, {
298321
+ owner,
298322
+ repo,
298323
+ headSHA,
298324
+ name,
298325
+ detailsUrl: opts?.detailsUrl,
298326
+ title: opts?.title,
298327
+ summary: opts?.summary,
298328
+ pullNumber: opts?.pullNumber,
298329
+ includeCheckRunComment: Boolean(opts?.includeCheckRunComment),
298330
+ checkRunComment: opts?.checkRunComment,
298331
+ });
298332
+ }
298333
+ async function createCheckRun(owner, repo, name, opts) {
298334
+ return createCheckRunForOrg(owner, owner, repo, name, opts);
298335
+ }
298336
+ const CheckRun = GithubCheckRun;
298337
+
297966
298338
  ;// CONCATENATED MODULE: ../github/index.ts
297967
298339
 
297968
298340
 
@@ -297973,6 +298345,7 @@ async function createOrphanBranch(repo, branch, owner = 'prefapp') {
297973
298345
 
297974
298346
 
297975
298347
 
298348
+
297976
298349
  /* harmony default export */ const github_0 = ({
297977
298350
  org: organization,
297978
298351
  repo: repository,
@@ -297986,6 +298359,10 @@ async function createOrphanBranch(repo, branch, owner = 'prefapp') {
297986
298359
  pulls: pull_request,
297987
298360
  issues: issues,
297988
298361
  branches: branches,
298362
+ feedback: {
298363
+ createCheckRun: createCheckRun,
298364
+ CheckRun: CheckRun,
298365
+ },
297989
298366
  });
297990
298367
 
297991
298368
  ;// CONCATENATED MODULE: ../cdk8s_renderer/src/patches/base.ts
@@ -300247,11 +300624,17 @@ class TFWorkspaceNormalizer extends Normalizer {
300247
300624
  }
300248
300625
  async function normalizeModuleContent(tfRootModulePath) {
300249
300626
  let content = '';
300627
+ const files = {};
300250
300628
  await crawl(tfRootModulePath, (entry) => {
300251
300629
  return entry.endsWith('.tf');
300252
300630
  }, (entry, data) => {
300631
+ files[entry] = data;
300632
+ });
300633
+ Object.keys(files)
300634
+ .sort()
300635
+ .forEach((entry) => {
300253
300636
  content += `# ${external_path_.basename(entry)}
300254
- ${data}
300637
+ ${files[entry]}
300255
300638
  `;
300256
300639
  });
300257
300640
  return content;
@@ -301500,14 +301883,14 @@ async function loadSchema(schemaURI) {
301500
301883
 
301501
301884
 
301502
301885
  const ajv = new (_2020_default())({ useDefaults: true });
301503
- let initiated = false;
301886
+ let validation_initiated = false;
301504
301887
  const validations = {};
301505
301888
  function prepareValidation(schemaId) {
301506
- if (!initiated)
301889
+ if (!validation_initiated)
301507
301890
  ajv.addSchema(base_schemas.schemas);
301508
301891
  if (!validations[schemaId])
301509
301892
  validations[schemaId] = ajv.getSchema(schemaId);
301510
- initiated = true;
301893
+ validation_initiated = true;
301511
301894
  return validations[schemaId];
301512
301895
  }
301513
301896
  function validateClaim(data, schemaId = 'firestartr.dev://common/ClaimEnvelope') {
@@ -302081,6 +302464,19 @@ function isTerraformWorkspace(cr) {
302081
302464
  return cr.kind === 'FirestartrTerraformWorkspace';
302082
302465
  }
302083
302466
 
302467
+ ;// CONCATENATED MODULE: ../cdk8s_renderer/src/validations/crSize.ts
302468
+
302469
+ const K8S_OBJECT_SIZE_LIMIT = 1572864; // 1.5 MiB in bytes
302470
+ function validateCrSizes(crs) {
302471
+ for (const [key, cr] of Object.entries(crs)) {
302472
+ const serialized = catalog_common.io.toYaml(cr);
302473
+ const size = Buffer.byteLength(serialized, 'utf8');
302474
+ if (size > K8S_OBJECT_SIZE_LIMIT) {
302475
+ throw new Error(`CR "${key}" exceeds the Kubernetes object size limit by ${size - K8S_OBJECT_SIZE_LIMIT} bytes`);
302476
+ }
302477
+ }
302478
+ }
302479
+
302084
302480
  ;// CONCATENATED MODULE: ../cdk8s_renderer/src/refsSorter/refsSorter.ts
302085
302481
 
302086
302482
  /**
@@ -303141,7 +303537,7 @@ function addTraceabilityStamp(context, content) {
303141
303537
 
303142
303538
 
303143
303539
 
303144
- const render_messageLog = src_default()('firestartr:features_renderer');
303540
+ const messageLog = src_default()('firestartr:features_renderer');
303145
303541
  function render(featurePath, featureRenderPath, entity, firestartrConfig = {}, featureArgs = {}) {
303146
303542
  const configData = validate_validate(featurePath);
303147
303543
  const context = buildContext(entity, configData.args, firestartrConfig, featureArgs);
@@ -303152,7 +303548,7 @@ function render(featurePath, featureRenderPath, entity, firestartrConfig = {}, f
303152
303548
  // For now let's keep upgradeable flag for backward compatibility
303153
303549
  // by default it's false
303154
303550
  const userManaged = file.user_managed ?? file.upgradeable ?? false;
303155
- render_messageLog(`Rendering ${src} to ${dest}`);
303551
+ messageLog(`Rendering ${src} to ${dest}`);
303156
303552
  // render the content of the file
303157
303553
  const content = addTraceability(context, src, renderContent(external_fs_default().readFileSync(external_path_default().join(featurePath, 'templates', src)).toString(), context));
303158
303554
  const destFilePath = external_path_default().join(`${featureRenderPath}`, dest);
@@ -303205,6 +303601,209 @@ function renderContent(template, ctx) {
303205
303601
  return mustache_mustache.render(template, ctx, {}, ['{{|', '|}}']);
303206
303602
  }
303207
303603
 
303604
+ // EXTERNAL MODULE: external "node:fs"
303605
+ var external_node_fs_ = __nccwpck_require__(87561);
303606
+ // EXTERNAL MODULE: external "node:path"
303607
+ var external_node_path_ = __nccwpck_require__(49411);
303608
+ ;// CONCATENATED MODULE: external "node:os"
303609
+ const external_node_os_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("node:os");
303610
+ ;// CONCATENATED MODULE: ../features_renderer/src/auxiliar.ts
303611
+ // src/auxiliar.ts
303612
+
303613
+
303614
+
303615
+
303616
+
303617
+
303618
+
303619
+ const renderTestsSchema = {
303620
+ $schema: 'http://json-schema.org/draft-07/schema#',
303621
+ type: 'object',
303622
+ additionalProperties: false,
303623
+ required: ['tests'],
303624
+ properties: {
303625
+ tests: {
303626
+ type: 'array',
303627
+ minItems: 1,
303628
+ items: {
303629
+ type: 'object',
303630
+ additionalProperties: false,
303631
+ required: ['name', 'cr'],
303632
+ properties: {
303633
+ name: { type: 'string', minLength: 1 },
303634
+ cr: { type: 'string', minLength: 1 },
303635
+ args: { type: 'object' },
303636
+ },
303637
+ },
303638
+ },
303639
+ },
303640
+ };
303641
+ const YAML_FILE_REGEX = /\.[yY]a?ml$/;
303642
+ /* ---------- Core helpers ---------- */
303643
+ function formatAjvErrors(errors) {
303644
+ if (!errors || errors.length === 0)
303645
+ return 'Unknown schema error';
303646
+ return errors
303647
+ .map((e) => {
303648
+ const where = e.instancePath && e.instancePath.length ? e.instancePath : '/';
303649
+ const msg = e.message ?? 'validation error';
303650
+ return `- ${where} ${msg}`;
303651
+ })
303652
+ .join('\n');
303653
+ }
303654
+ function ensureUniqueTestNames(doc) {
303655
+ const seen = new Set();
303656
+ for (const t of doc.tests) {
303657
+ if (seen.has(t.name)) {
303658
+ throw new Error(`Duplicate test name "${t.name}" in render_tests.yaml`);
303659
+ }
303660
+ seen.add(t.name);
303661
+ }
303662
+ }
303663
+ function loadAndValidateRenderTests(featurePath) {
303664
+ const file = external_node_path_.join(featurePath, 'render_tests.yaml');
303665
+ if (!external_node_fs_.existsSync(file)) {
303666
+ throw new Error(`render_tests.yaml is required but not found at ${file}`);
303667
+ }
303668
+ const raw = loadYaml(file);
303669
+ const ajv = new (ajv_default())({ allErrors: true, strict: true });
303670
+ const validate = ajv.compile(renderTestsSchema);
303671
+ const ok = validate(raw);
303672
+ if (!ok) {
303673
+ throw new Error(`render_tests.yaml schema validation failed:\n${formatAjvErrors(validate.errors ?? [])}`);
303674
+ }
303675
+ const doc = raw;
303676
+ ensureUniqueTestNames(doc);
303677
+ return doc;
303678
+ }
303679
+ function resolveCrPath(featurePath, crRelPath) {
303680
+ if (external_node_path_.isAbsolute(crRelPath)) {
303681
+ throw new Error(`CR path must be relative to the feature root, got absolute: ${crRelPath}`);
303682
+ }
303683
+ const resolved = external_node_path_.resolve(featurePath, crRelPath);
303684
+ if (!external_node_fs_.existsSync(resolved)) {
303685
+ throw new Error(`CR file not found (resolved from "${crRelPath}"): ${resolved}`);
303686
+ }
303687
+ return resolved;
303688
+ }
303689
+ function listYamlFiles(dir) {
303690
+ if (!fs.existsSync(dir))
303691
+ return [];
303692
+ let entries;
303693
+ try {
303694
+ entries = fs.readdirSync(dir, { withFileTypes: true });
303695
+ }
303696
+ catch (e) {
303697
+ const msg = e instanceof Error ? e.message : String(e);
303698
+ throw new Error(`Failed to read directory "${dir}": ${msg}`);
303699
+ }
303700
+ return entries
303701
+ .filter((e) => e.isFile() && YAML_FILE_REGEX.test(e.name))
303702
+ .map((e) => path.join(dir, e.name));
303703
+ }
303704
+ function loadYaml(file) {
303705
+ try {
303706
+ const configDataRaw = common_slurpFile(external_node_path_.join(file));
303707
+ return catalog_common.io.fromYaml(configDataRaw);
303708
+ }
303709
+ catch (e) {
303710
+ const msg = e instanceof Error ? e.message : String(e);
303711
+ throw new Error(`Failed to parse YAML "${file}": ${msg}`);
303712
+ }
303713
+ }
303714
+ function ensureSafeTmpNames(name) {
303715
+ if (typeof name !== 'string' || !name.trim()) {
303716
+ throw new Error('Test "name" must be a non-empty string');
303717
+ }
303718
+ if (name.length > 128) {
303719
+ throw new Error('Test "name" is too long (max 128 characters)');
303720
+ }
303721
+ if (external_node_path_.isAbsolute(name)) {
303722
+ throw new Error(`Test "name" must be relative, got absolute: "${name}"`);
303723
+ }
303724
+ if (name.includes('..')) {
303725
+ throw new Error('Test "name" must not contain ".."');
303726
+ }
303727
+ if (!/^[A-Za-z0-9._-]+$/.test(name)) {
303728
+ throw new Error('Test "name" may only contain letters, numbers, ".", "_", or "-"');
303729
+ }
303730
+ }
303731
+ async function mkNamedTmp(...names) {
303732
+ for (const name of names) {
303733
+ ensureSafeTmpNames(name);
303734
+ }
303735
+ const dir = external_node_path_.join(external_node_os_namespaceObject.tmpdir(), ...names);
303736
+ await promises_namespaceObject.rm(dir, { recursive: true, force: true });
303737
+ await promises_namespaceObject.mkdir(dir, { recursive: true });
303738
+ return dir;
303739
+ }
303740
+ async function mkTmp(prefix = 'feature-render-') {
303741
+ return await fsp.mkdtemp(path.join(os.tmpdir(), prefix));
303742
+ }
303743
+ function buildExpectedOutput(config, renderDir) {
303744
+ const files = (config.files || []).map((f) => ({
303745
+ localPath: external_node_path_.join(renderDir, f.dest),
303746
+ repoPath: f.dest,
303747
+ userManaged: f.user_managed,
303748
+ }));
303749
+ return {
303750
+ files,
303751
+ patches: config.patches || [],
303752
+ };
303753
+ }
303754
+ /* ---------- Context-style API for a render temp dir ---------- */
303755
+ async function createRenderContext(prefix = 'feature-render-') {
303756
+ const dir = await mkTmp(prefix);
303757
+ const join = (...p) => path.join(dir, ...p);
303758
+ return {
303759
+ getContextPath: () => dir,
303760
+ join,
303761
+ getFile: async (relPath, { yaml: asYaml = false, json: asJson = false, } = {}) => {
303762
+ const data = await fsp.readFile(join(relPath), 'utf8');
303763
+ if (asYaml)
303764
+ return common.io.fromYaml(data);
303765
+ if (asJson)
303766
+ return JSON.parse(data);
303767
+ return data;
303768
+ },
303769
+ getFilePath: (relPath) => join(relPath),
303770
+ setFile: async (relPath, contents) => {
303771
+ await fsp.mkdir(path.dirname(join(relPath)), { recursive: true });
303772
+ await fsp.writeFile(join(relPath), contents);
303773
+ },
303774
+ exists: async (relPath) => {
303775
+ try {
303776
+ await fsp.access(join(relPath));
303777
+ return true;
303778
+ }
303779
+ catch {
303780
+ return false;
303781
+ }
303782
+ },
303783
+ getOutputJson: async () => {
303784
+ const p = join('output.json');
303785
+ const raw = await fsp.readFile(p, 'utf8');
303786
+ return JSON.parse(raw);
303787
+ },
303788
+ list: async (relPath = '.') => {
303789
+ const entries = await fsp.readdir(join(relPath), {
303790
+ withFileTypes: true,
303791
+ });
303792
+ return entries.map((e) => ({ name: e.name, isDir: e.isDirectory() }));
303793
+ },
303794
+ remove: async () => {
303795
+ await fsp.rm(dir, { recursive: true, force: true });
303796
+ },
303797
+ };
303798
+ }
303799
+ /* harmony default export */ const auxiliar = ({
303800
+ mkNamedTmp,
303801
+ loadYaml,
303802
+ buildExpectedOutput,
303803
+ loadAndValidateRenderTests,
303804
+ resolveCrPath,
303805
+ });
303806
+
303208
303807
  ;// CONCATENATED MODULE: ../features_renderer/src/update_file.ts
303209
303808
 
303210
303809
 
@@ -303216,22 +303815,29 @@ function updateFileContent(featureRenderPath, filePath, content) {
303216
303815
 
303217
303816
 
303218
303817
 
303818
+
303219
303819
  /* harmony default export */ const features_renderer = ({
303220
303820
  validate: validate_validate,
303221
303821
  render: render,
303222
303822
  updateFileContent: updateFileContent,
303823
+ auxiliar: auxiliar,
303824
+ buildContext: buildContext,
303825
+ renderContent: renderContent,
303223
303826
  });
303224
303827
 
303828
+ ;// CONCATENATED MODULE: ../features_preparer/src/logger.ts
303829
+
303830
+ /* harmony default export */ const features_preparer_src_logger = (catalog_common.logger);
303831
+
303225
303832
  ;// CONCATENATED MODULE: ../features_preparer/src/renderer.ts
303226
303833
 
303227
303834
 
303228
303835
 
303229
303836
 
303230
- const renderer_messageLog = src_default()('firestartr:features_preparer:renderer');
303231
303837
  function renderFeature(featureName, version, owner, repo, featureOwner, renderPath = '/tmp', featureArgs = {}) {
303232
303838
  const extractPath = external_path_default().join(catalog_common.features.tarballs.getFeaturesExtractPath(featureName, version, owner, repo), 'packages', featureName);
303233
303839
  const renderedPath = catalog_common.features.features.getFeatureRenderedPathForEntity(featureOwner, featureName, renderPath);
303234
- renderer_messageLog(`Rendering feature ${featureName} to ${renderedPath} with component ${JSON.stringify(featureOwner)}`);
303840
+ features_preparer_src_logger.info(`Rendering feature ${featureName} to ${renderedPath} with component ${JSON.stringify(featureOwner)}`);
303235
303841
  return features_renderer.render(extractPath, renderedPath, featureOwner, {}, featureArgs);
303236
303842
  }
303237
303843
 
@@ -303270,7 +303876,6 @@ async function downloadZipBall(url, filePath) {
303270
303876
 
303271
303877
 
303272
303878
 
303273
- const installer_log = src_default()('firestartr:features_preparer:installer');
303274
303879
  async function getFeatureConfigFromRef(featureName, featureRef, featureOwner, // -> cr
303275
303880
  featureArgs = {}, repo = 'features', owner = 'prefapp') {
303276
303881
  // reference is the featureRef directly
@@ -303301,12 +303906,12 @@ async function prepareFeature(featureName, version, repo = 'features', owner = '
303301
303906
  async function downloadFeatureZip(repo, featureName, reference, owner = 'prefapp') {
303302
303907
  try {
303303
303908
  const zipballExtractPath = catalog_common.features.tarballs.getFeaturesExtractPath(featureName, reference, owner, repo, { createIfNotExists: false });
303304
- console.log(`Zipball extract path: ${zipballExtractPath}`);
303909
+ features_preparer_src_logger.debug(`Zipball extract path: ${zipballExtractPath}`);
303305
303910
  if (external_fs_.existsSync(zipballExtractPath)) {
303306
- console.log(`Zipball extract path ${zipballExtractPath} already exists, reusing it.`);
303911
+ features_preparer_src_logger.debug(`Zipball extract path ${zipballExtractPath} already exists, reusing it.`);
303307
303912
  return zipballExtractPath;
303308
303913
  }
303309
- installer_log(`Feature ${[featureName, reference, owner, repo].join('-')} has not been downloaded yet, downloading`);
303914
+ features_preparer_src_logger.info(`Feature ${[featureName, reference, owner, repo].join('-')} has not been downloaded yet, downloading`);
303310
303915
  const octokit = await github_0.getOctokitForOrg(owner);
303311
303916
  const response = await octokit.request('GET /repos/{owner}/{repo}/zipball/{reference}', {
303312
303917
  request: {
@@ -303317,28 +303922,28 @@ async function downloadFeatureZip(repo, featureName, reference, owner = 'prefapp
303317
303922
  reference,
303318
303923
  });
303319
303924
  const randomZipTmpPath = `/tmp/${catalog_common.generic.randomString(20)}.zip`;
303320
- console.log(`Downloading feature ${featureName} version ${reference} to ${randomZipTmpPath}`);
303925
+ features_preparer_src_logger.info(`Downloading feature ${featureName} version ${reference} to ${randomZipTmpPath}`);
303321
303926
  if (external_fs_.existsSync(randomZipTmpPath)) {
303322
- console.log(`Temporary zip file ${randomZipTmpPath} already exists, removing it.`);
303927
+ features_preparer_src_logger.debug(`Temporary zip file ${randomZipTmpPath} already exists, removing it.`);
303323
303928
  external_fs_.unlinkSync(randomZipTmpPath);
303324
303929
  }
303325
303930
  const randomExtractPath = `/tmp/${catalog_common.generic.randomString(20)}`;
303326
- console.log(`Extracting feature ${featureName} version ${reference} to ${randomExtractPath}`);
303931
+ features_preparer_src_logger.debug(`Extracting feature ${featureName} version ${reference} to ${randomExtractPath}`);
303327
303932
  external_fs_.rmSync(randomExtractPath, { recursive: true, force: true });
303328
303933
  await downloadZipBall(response.url, randomZipTmpPath);
303329
303934
  const zip = new (adm_zip_default())(randomZipTmpPath);
303330
303935
  const mainEntry = zip.getEntries()[0].entryName;
303331
- console.log(`Main entry in zip: ${mainEntry}`);
303332
- console.log(`Extracting zip to ${randomExtractPath}`);
303936
+ features_preparer_src_logger.debug(`Main entry in zip: ${mainEntry}`);
303937
+ features_preparer_src_logger.debug(`Extracting zip to ${randomExtractPath}`);
303333
303938
  zip.extractAllTo(randomExtractPath, true);
303334
- console.log(`Renaming main entry ${mainEntry} to ${zipballExtractPath}`);
303939
+ features_preparer_src_logger.debug(`Renaming main entry ${mainEntry} to ${zipballExtractPath}`);
303335
303940
  external_fs_.renameSync(`${randomExtractPath}/${mainEntry}`, zipballExtractPath);
303336
- console.log(`Removing temporary zip file ${randomZipTmpPath}`);
303941
+ features_preparer_src_logger.debug(`Removing temporary zip file ${randomZipTmpPath}`);
303337
303942
  external_fs_.unlinkSync(randomZipTmpPath);
303338
303943
  return zipballExtractPath;
303339
303944
  }
303340
303945
  catch (error) {
303341
- console.error(error);
303946
+ features_preparer_src_logger.error(`Error on prepare feature with tag ${reference}: ${error}`);
303342
303947
  throw new Error(`Error for feature with tag ${reference}: ${error}. GitHub response: ${error}`);
303343
303948
  }
303344
303949
  }
@@ -307224,6 +307829,7 @@ async function renderClaim(catalogScope, firestartrScope, claim, patches, previo
307224
307829
 
307225
307830
 
307226
307831
 
307832
+
307227
307833
  /*
307228
307834
  * Function called when rendering but not importing
307229
307835
  *
@@ -307239,6 +307845,7 @@ async function renderer_render(catalogScope, firestartrScope, activateReferentia
307239
307845
  const data = await loadClaimsList(claimList);
307240
307846
  const result = await renderClaims(catalogScope, firestartrScope, data);
307241
307847
  validateTfStateKeyUniqueness(result);
307848
+ validateCrSizes(result);
307242
307849
  return result;
307243
307850
  }
307244
307851
 
@@ -308927,60 +309534,9 @@ const scaffoldSubcommand = {
308927
309534
 
308928
309535
  // EXTERNAL MODULE: ../../node_modules/@kubernetes/client-node/dist/index.js
308929
309536
  var client_node_dist = __nccwpck_require__(54851);
308930
- // EXTERNAL MODULE: ../../node_modules/winston/lib/winston.js
308931
- var winston = __nccwpck_require__(66752);
308932
- var winston_default = /*#__PURE__*/__nccwpck_require__.n(winston);
308933
309537
  ;// CONCATENATED MODULE: ../operator/src/logger.ts
308934
309538
 
308935
- const validLogLevels = [
308936
- 'error',
308937
- 'warn',
308938
- 'info',
308939
- 'debug',
308940
- 'verbose',
308941
- 'silly',
308942
- ];
308943
- let logger_initiated = false;
308944
- let logger = null;
308945
- // Type guard to check if a value is a valid LogLevel
308946
- function isValidLogLevel(level) {
308947
- return (typeof level === 'string' && validLogLevels.includes(level));
308948
- }
308949
- function initLogger() {
308950
- if (logger_initiated)
308951
- return;
308952
- const logLevel = process.env.LOG_LEVEL && isValidLogLevel(process.env.LOG_LEVEL)
308953
- ? process.env.LOG_LEVEL
308954
- : 'info';
308955
- logger = winston_default().createLogger({
308956
- level: logLevel,
308957
- exitOnError: false,
308958
- format: winston.format.combine(winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }), winston.format.json()),
308959
- transports: [
308960
- new winston.transports.Console({
308961
- level: logLevel,
308962
- }),
308963
- ],
308964
- });
308965
- logger_initiated = true;
308966
- }
308967
- function doLog(level, args) {
308968
- initLogger();
308969
- const [message, metadata] = args;
308970
- // eslint-disable-next-line prefer-spread
308971
- logger[level].apply(logger, [
308972
- message + ' | ' + JSON.stringify({ ...metadata }),
308973
- ]);
308974
- }
308975
- const logger_log = {
308976
- error: (...args) => doLog('error', args),
308977
- warn: (...args) => doLog('warn', args),
308978
- info: (...args) => doLog('info', args),
308979
- debug: (...args) => doLog('debug', args),
308980
- verbose: (...args) => doLog('verbose', args),
308981
- silly: (...args) => doLog('silly', args),
308982
- };
308983
- /* harmony default export */ const src_logger = (logger_log);
309539
+ /* harmony default export */ const operator_src_logger = (catalog_common.logger);
308984
309540
 
308985
309541
  ;// CONCATENATED MODULE: ../operator/src/store.ts
308986
309542
 
@@ -308991,13 +309547,7 @@ class Store {
308991
309547
  this.kind = kind;
308992
309548
  }
308993
309549
  add(item) {
308994
- src_logger.debug('STORE_ADD_ITEM', {
308995
- metadata: {
308996
- name: item.metadata.name,
308997
- kind: this.kind,
308998
- namespace: item.metadata.namespace,
308999
- },
309000
- });
309550
+ operator_src_logger.debug(`Added item '${item.metadata.name}' of kind '${this.kind}' to the store in namespace '${item.metadata.namespace}'`);
309001
309551
  this.store[itemPath(this.kind, item)] = {
309002
309552
  item,
309003
309553
  };
@@ -309006,13 +309556,7 @@ class Store {
309006
309556
  return 'deletionTimestamp' in item.metadata;
309007
309557
  }
309008
309558
  markToDelete(item) {
309009
- src_logger.debug('STORE_MARKED_ITEM_TO_DELETE', {
309010
- metadata: {
309011
- name: item.metadata.name,
309012
- kind: this.kind,
309013
- namespace: item.metadata.namespace,
309014
- },
309015
- });
309559
+ operator_src_logger.debug(`Marked item '${item.metadata.name}' of kind '${this.kind}' for deletion in namespace '${item.metadata.namespace}'`);
309016
309560
  this.store[itemPath(this.kind, item)] = {
309017
309561
  item,
309018
309562
  markedToDelete: true,
@@ -309045,24 +309589,11 @@ class Store {
309045
309589
  item,
309046
309590
  };
309047
309591
  if (updated)
309048
- src_logger.debug('STORE_ITEM_MODIFIED', {
309049
- metadata: {
309050
- name: item.metadata.name,
309051
- kind: this.kind,
309052
- namespace: item.metadata.namespace,
309053
- patches,
309054
- },
309055
- });
309592
+ operator_src_logger.debug(`Modified item '${item.metadata.name}' of kind '${this.kind}' in namespace '${item.metadata.namespace}' with patches ${JSON.stringify(patches)}`);
309056
309593
  return updated;
309057
309594
  }
309058
309595
  remove(item) {
309059
- src_logger.debug('STORE_ITEM_REMOVED', {
309060
- metadata: {
309061
- name: item.metadata.name,
309062
- kind: this.kind,
309063
- namespace: item.metadata.namespace,
309064
- },
309065
- });
309596
+ operator_src_logger.debug(`Removed item '${item.metadata.name}' of kind '${this.kind}' from namespace '${item.metadata.namespace}'`);
309066
309597
  delete this.store[itemPath(this.kind, item)];
309067
309598
  }
309068
309599
  getItem(item) {
@@ -309109,7 +309640,7 @@ async function getItem(kind, namespace, item) {
309109
309640
  }
309110
309641
  async function getItemByItemPath(itemPath, apiGroup = catalog_common.types.controller.FirestartrApiGroup, apiVersion = 'v1') {
309111
309642
  try {
309112
- src_logger.debug('CTL_GET_ITEM', { metadata: { itemPath } });
309643
+ operator_src_logger.debug(`The ctl is getting the item at '${itemPath}'.`);
309113
309644
  const { kc, opts } = await ctl_getConnection();
309114
309645
  opts.headers['Content-Type'] = 'application/json';
309115
309646
  opts.headers['Accept'] = 'application/json';
@@ -309119,14 +309650,14 @@ async function getItemByItemPath(itemPath, apiGroup = catalog_common.types.contr
309119
309650
  const r = await fetch(url, { method: 'get', headers: opts.headers });
309120
309651
  if (!r.ok) {
309121
309652
  const err = new Error(`Error on getItemByItemPath: ${itemPath}: ${r.statusText}`);
309122
- console.log(err.stack);
309653
+ operator_src_logger.error(`Error on getItemByItemPath: ${itemPath}: ${r.statusText}`);
309123
309654
  throw err;
309124
309655
  }
309125
309656
  const jsonResponse = await r.json();
309126
309657
  return jsonResponse;
309127
309658
  }
309128
309659
  catch (e) {
309129
- console.dir(e, { depth: null });
309660
+ operator_src_logger.error(`Error on getItemByItemPath: ${e}`);
309130
309661
  throw e;
309131
309662
  }
309132
309663
  }
@@ -309146,17 +309677,15 @@ async function writeManifest(kind, namespace, item, apiSlug) {
309146
309677
  return jsonResponse;
309147
309678
  }
309148
309679
  function writeSecret(secret, namespace) {
309149
- log.debug('CTL_WRITE_SECRET', {
309150
- metadata: { namespace, name: secret.metadata.name },
309151
- });
309680
+ log.debug(`The ctl is writing the secret '${secret.metadata.name}' in namespace '${namespace}'.`);
309152
309681
  return writeManifest('secrets', namespace, secret, `api/v1/namespaces/${namespace}/secrets/${secret.metadata.name}`);
309153
309682
  }
309154
309683
  async function writeStatus(kind, namespace, item) {
309155
- src_logger.debug('CTL_WRITE_STATUS', { metadata: { item } });
309684
+ operator_src_logger.debug(`The ctl is writing the status for item '${item.kind}/${item.metadata.name}' in namespace '${item.metadata.namespace}'.`);
309156
309685
  return await writeManifest(kind, namespace, item, `apis/firestartr.dev/v1/namespaces/${namespace}/${kind}/${item.metadata.name}/status`);
309157
309686
  }
309158
309687
  function writeFinalizer(kind, namespace, item) {
309159
- log.debug('CTL_WRITE_FINALIZER', { metadata: { item } });
309688
+ log.debug(`The ctl is writing the status for item '${item.kind}/${item.metadata.name}' in namespace '${item.metadata.namespace}'.`);
309160
309689
  return writeManifest(kind, namespace, item, `apis/firestartr.dev/v1/namespaces/${namespace}/${kind}/${item.metadata.name}/metadata/finalizers`);
309161
309690
  }
309162
309691
  async function listItems(kind, namespace, kc, opts) {
@@ -309171,7 +309700,7 @@ async function listItems(kind, namespace, kc, opts) {
309171
309700
  return await r.json();
309172
309701
  }
309173
309702
  catch (err) {
309174
- console.dir(err);
309703
+ log.error(`On listItems: ${err}`);
309175
309704
  throw err;
309176
309705
  }
309177
309706
  }
@@ -309198,15 +309727,10 @@ async function* observeList(kind, namespace, revision, kc, opts) {
309198
309727
  }
309199
309728
  catch (err) {
309200
309729
  if (err instanceof TypeError) {
309201
- log.error('CTL_OBSERVE_LIST_ERROR_CHUNKS', {
309202
- metadata: { namespace, kind, revision, error: err },
309203
- });
309730
+ log.error(`The ctl encountered an error while listing chunks for '${kind}' with revision '${revision}' in namespace '${namespace}': '${err}'.`);
309204
309731
  }
309205
309732
  else {
309206
- log.error('CTL_OBSERVE_LIST_UNKNOWN_ERROR_CHUNKS', {
309207
- metadata: { namespace, kind, revision, error: err },
309208
- });
309209
- console.error(err);
309733
+ log.error(`The ctl encountered an unknown error while listing chunks for '${kind}' with revision '${revision}' in namespace '${namespace}': '${err}'.`);
309210
309734
  }
309211
309735
  }
309212
309736
  }
@@ -309229,7 +309753,7 @@ async function ctl_getConnection() {
309229
309753
  return { kc, opts };
309230
309754
  }
309231
309755
  catch (err) {
309232
- console.dir(err, { depth: null });
309756
+ operator_src_logger.error(`getConnection: ${err}`);
309233
309757
  throw err;
309234
309758
  }
309235
309759
  }
@@ -309288,9 +309812,7 @@ async function deleteSecret(secretName, namespace) {
309288
309812
  }
309289
309813
  catch (e) {
309290
309814
  if (e && e.code === 404) {
309291
- src_logger.error('CTL_DELETE_SECRET_NOT_FOUND', {
309292
- metadata: { secretName, namespace },
309293
- });
309815
+ operator_src_logger.error(`The ctl failed to delete the secret '${secretName}' in namespace '${namespace}' because it was not found.`);
309294
309816
  return null;
309295
309817
  }
309296
309818
  else {
@@ -309324,9 +309846,7 @@ async function getSecret(namespace, secretName) {
309324
309846
  }
309325
309847
  catch (e) {
309326
309848
  if (e.response && e.response.statusCode === 404) {
309327
- src_logger.error('CTL_SECRET_NOT_FOUND', {
309328
- metadata: { secretName, namespace },
309329
- });
309849
+ operator_src_logger.error(`The ctl could not find the secret '${secretName}' in namespace '${namespace}'.`);
309330
309850
  return null;
309331
309851
  }
309332
309852
  else {
@@ -309357,13 +309877,7 @@ async function getTFResult(namespace, item) {
309357
309877
  * @param {any} item - Object to check if has been renamed
309358
309878
  */
309359
309879
  async function checkIfRenamed(namespace, item) {
309360
- log.debug('CTL_CHECK_IF_RENAMED', {
309361
- metadata: {
309362
- kind: item.kind,
309363
- name: item.metadata.name,
309364
- namespace,
309365
- },
309366
- });
309880
+ log.debug(`The ctl is checking if item '${item.kind}/${item.metadata.name}' in namespace '${namespace}' has been renamed.`);
309367
309881
  const oldName = item.metadata?.labels?.[common.types.controller.FirestartrLabelOldName];
309368
309882
  // If the item does not have firestartr.dev/old-name label, it has not been renamed
309369
309883
  if (!oldName)
@@ -309379,9 +309893,7 @@ async function checkIfRenamed(namespace, item) {
309379
309893
  });
309380
309894
  if (!r.ok) {
309381
309895
  if (r.status === 404) {
309382
- log.debug('CTL_CHECK_IF_RENAMED_OLDNAME_NOT_FOUND', {
309383
- metadata: { kind: item.kind, name: item.metadata.name, namespace },
309384
- });
309896
+ log.debug(`The ctl is checking for a rename of item '${item.kind}/${item.metadata.name}' in namespace '${namespace}', but the old item name was not found.`);
309385
309897
  return false;
309386
309898
  }
309387
309899
  }
@@ -309389,21 +309901,17 @@ async function checkIfRenamed(namespace, item) {
309389
309901
  return true;
309390
309902
  }
309391
309903
  catch (err) {
309392
- console.log(err);
309904
+ log.debug(err);
309393
309905
  return false;
309394
309906
  }
309395
309907
  }
309396
309908
  async function upsertFinalizer(kind, namespace, item, finalizer) {
309397
309909
  if ('finalizers' in item.metadata &&
309398
309910
  item.metadata.finalizers.includes(finalizer)) {
309399
- src_logger.debug('CTL_UPSERT_FINALIZER_ALREADY_SET', {
309400
- metadata: { finalizer, kind, name: item.metadata.name, namespace },
309401
- });
309911
+ operator_src_logger.debug(`The ctl tried to upsert the finalizer '${finalizer}' for '${kind}/${item.metadata.name}' in namespace '${namespace}', but it was already set.`);
309402
309912
  return;
309403
309913
  }
309404
- src_logger.debug('CTL_UPSERT_FINALIZER_SETTING', {
309405
- metadata: { finalizer, kind, name: item.metadata.name, namespace },
309406
- });
309914
+ operator_src_logger.debug(`The ctl is setting the finalizer '${finalizer}' for '${kind}/${item.metadata.name}' in namespace '${namespace}'.`);
309407
309915
  const { kc, opts } = await ctl_getConnection();
309408
309916
  const url = `${kc.getCurrentCluster().server}/apis/firestartr.dev/v1/namespaces/${namespace}/${kind}/${item.metadata.name}`;
309409
309917
  opts.headers['Content-Type'] = 'application/json-patch+json';
@@ -309444,14 +309952,7 @@ async function upsertFinalizer(kind, namespace, item, finalizer) {
309444
309952
  async function unsetFinalizer(kind, namespace, item, finalizer) {
309445
309953
  const { kc, opts } = await ctl_getConnection();
309446
309954
  const name = typeof item === 'string' ? item : item.metadata.name;
309447
- src_logger.debug('CTL_REMOVE_FINALIZER', {
309448
- metadata: {
309449
- finalizer,
309450
- kind,
309451
- name,
309452
- namespace,
309453
- },
309454
- });
309955
+ operator_src_logger.debug(`The ctl is removing the finalizer '${finalizer}' from '${kind}/${name}' in namespace '${namespace}'.`);
309455
309956
  const url = `${kc.getCurrentCluster().server}/apis/firestartr.dev/v1/namespaces/${namespace}/${kind}/${name}`;
309456
309957
  opts.headers['Content-Type'] = 'application/json-patch+json';
309457
309958
  opts.headers['Accept'] = '*';
@@ -309513,8 +310014,7 @@ async function writePlanInGithubPR(prUrl, planText) {
309513
310014
  await github_0.pulls.commentInPR(message, +pr_number, repo, owner);
309514
310015
  }
309515
310016
  catch (err) {
309516
- console.error(err);
309517
- console.log('Cannot write plan in PR');
310017
+ operator_src_logger.error(`writePlanInGithubPR: Cannot write plan in PR: ${err}`);
309518
310018
  }
309519
310019
  }
309520
310020
  async function addApplyCommitStatus(cr, state, targetURL = '', description = '', context = '') {
@@ -309522,15 +310022,7 @@ async function addApplyCommitStatus(cr, state, targetURL = '', description = '',
309522
310022
  await addCommitStatusToPrMergeCommit(cr.metadata.annotations['firestartr.dev/last-state-pr'], state, targetURL, description, context);
309523
310023
  }
309524
310024
  catch (e) {
309525
- src_logger.error('CTL_ADD_APPLY_COMMIT_STATUS_ERROR', {
309526
- metadata: {
309527
- state,
309528
- targetURL,
309529
- description,
309530
- cr_metadata: cr.metadata,
309531
- error: e,
309532
- },
309533
- });
310025
+ log.error(`The ctl encountered an error while adding commit status for custom resource '${cr.metadata.name}' in namespace '${cr.metadata.namespace}'. State: '${state}'. Target URL: '${targetURL}'. Description: '${description}'. Error: '${e}'.`);
309534
310026
  }
309535
310027
  }
309536
310028
  async function addDestroyCommitStatus(cr, state, description = '', context = '') {
@@ -309539,50 +310031,33 @@ async function addDestroyCommitStatus(cr, state, description = '', context = '')
309539
310031
  await addCommitStatusToPrMergeCommit(prUrl, state, '', description, context);
309540
310032
  }
309541
310033
  catch (e) {
309542
- src_logger.error('CTL_ADD_DESTROY_COMMIT_STATUS_ERROR', {
309543
- metadata: {
309544
- state,
309545
- description,
309546
- cr_metadata: cr.metadata,
309547
- error: e,
309548
- },
309549
- });
310034
+ operator_src_logger.error(`The ctl encountered an error while adding the destroy commit status for custom resource '${cr.metadata.name}' in namespace '${cr.metadata.namespace}'. State: '${state}'. Description: '${description}'. Error: '${e}'.`);
309550
310035
  }
309551
310036
  }
309552
310037
  async function addPlanStatusCheck(prUrl, summary, status = 'in_progress', isFailure = false) {
309553
310038
  try {
309554
- src_logger.debug('CTL_ADD_PLAN_STATUS_CHECK_SUMMARY_LENGTH', {
309555
- metadata: { length: summary.length },
309556
- });
310039
+ operator_src_logger.debug(`The ctl is checking the length of the plan summary, which is '${summary.length}'.`);
309557
310040
  if (summary.length > MAX_CHARS_OUPUT_PLAN) {
309558
310041
  const mustDrop = summary.length - MAX_CHARS_OUPUT_PLAN;
309559
310042
  summary = summary.substring(mustDrop);
309560
- src_logger.debug('CTL_ADD_PLAN_STATUS_CHECK_SUMMARY_TOO_LENGTHY', {
309561
- metadata: { mustDrop, length: summary.length },
309562
- });
310043
+ operator_src_logger.debug(`The ctl found the plan summary too lengthy (length: '${summary.length}'). The summary must drop because '${mustDrop}'.`);
309563
310044
  }
309564
310045
  await ctl_addStatusCheck({ summary, title: 'Terraform Plan Results' }, isFailure, 'terraform_plan', prUrl, status);
309565
310046
  }
309566
310047
  catch (e) {
309567
- src_logger.error('CTL_ADD_PLAN_STATUS_CHECK_ERROR', {
309568
- metadata: { prUrl, status, isFailure, error: e },
309569
- });
310048
+ operator_src_logger.error(`The ctl encountered an error while adding plan status for PR '${prUrl}' with status '${status}'. Is Failure: '${isFailure}'. Error: '${e}'.`);
309570
310049
  }
309571
310050
  }
309572
310051
  async function ctl_addStatusCheck(output, isFailure, name, prAnnotationValue, status) {
309573
310052
  const { owner, repo, prNumber } = catalog_common.generic.getOwnerRepoPrNumberFromAnnotationValue(prAnnotationValue);
309574
310053
  const branchSha = await github_0.pulls.getPrLastCommitSHA(prNumber, repo, owner);
309575
- src_logger.info('CTL_ADD_STATUS_CHECK', {
309576
- metadata: { owner, repo, branchSha, prAnnotationValue, name },
309577
- });
310054
+ operator_src_logger.info(`The ctl is adding a status check for '${owner}/${repo}' on branch '${branchSha}' with PR annotation value '${prAnnotationValue}' and name '${name}'.`);
309578
310055
  await github_0.repo.addStatusCheck(output, isFailure, branchSha, name, status, repo, owner);
309579
310056
  }
309580
310057
  async function addCommitStatusToPrMergeCommit(prAnnotationValue, state, targetURL, description, context) {
309581
310058
  const { owner, repo, prNumber } = catalog_common.generic.getOwnerRepoPrNumberFromAnnotationValue(prAnnotationValue);
309582
310059
  const branchSha = await github_0.pulls.getPrMergeCommitSHA(prNumber, repo, owner);
309583
- src_logger.info('CTL_ADD_COMMIT_STATUS', {
309584
- metadata: { owner, repo, branchSha, state, targetURL },
309585
- });
310060
+ operator_src_logger.info(`The ctl is adding a commit status for '${owner}/${repo}' on branch '${branchSha}'. State: '${state}'. Target URL: '${targetURL}'.`);
309586
310061
  await github_0.repo.addCommitStatus(state, branchSha, repo, owner, targetURL, description, context);
309587
310062
  }
309588
310063
  async function getLastStatePrInfo(cr) {
@@ -309650,62 +310125,42 @@ async function observe(plural, namespace, onAdd, onChange, onDelete, _onRename)
309650
310125
  informer.on('add', (obj) => {
309651
310126
  store.add(obj);
309652
310127
  if (store.hasDeletionTimestamp(obj)) {
309653
- src_logger.info('REFLECTOR_ITEM_MARKED_TO_DELETION', {
309654
- metadata: { kind: obj.kind, name: obj.metadata.name },
309655
- });
310128
+ operator_src_logger.info(`Reflector has marked item '${obj.kind}/${obj.metadata.name}' for deletion.`);
309656
310129
  store.markToDelete(obj);
309657
310130
  onDelete(obj);
309658
310131
  }
309659
310132
  else {
309660
- src_logger.info('REFLECTOR_ITEM_ADDED', {
309661
- metadata: { kind: obj.kind, name: obj.metadata.name },
309662
- });
310133
+ operator_src_logger.info(`Reflector has added item '${obj.kind}/${obj.metadata.name}'.`);
309663
310134
  onAdd(obj);
309664
310135
  }
309665
310136
  });
309666
310137
  informer.on('update', (obj) => {
309667
- src_logger.info('REFLECTOR_ITEM_UPDATED', {
309668
- metadata: {
309669
- kind: obj.kind,
309670
- name: obj.metadata.name,
309671
- resourceVersion: obj.metadata.resourceVersion,
309672
- },
309673
- });
310138
+ operator_src_logger.info(`Reflector has updated item '${obj.kind}/${obj.metadata.name}' to a new resource version: '${obj.metadata.resourceVersion}'.`);
309674
310139
  if (!store.getItem(obj).markedToDelete &&
309675
310140
  store.hasDeletionTimestamp(obj) &&
309676
310141
  (store.hasBeenMarkedToDelete(obj) || store.modified(obj))) {
309677
- src_logger.info('REFLECTOR_ITEM_UPDATED_MARKED_TO_DELETION', {
309678
- metadata: { kind: obj.kind, name: obj.metadata.name },
309679
- });
310142
+ operator_src_logger.info(`Reflector has updated item '${obj.kind}/${obj.metadata.name}' and marked it for deletion.`);
309680
310143
  store.markToDelete(obj);
309681
310144
  onDelete(obj);
309682
310145
  }
309683
310146
  else if (store.modified(obj)) {
309684
- src_logger.info('REFLECTOR_ITEM_UPDATED_AND_MODIFIED', {
309685
- metadata: { kind: obj.kind, name: obj.metadata.name },
309686
- });
310147
+ operator_src_logger.info(`Reflector has updated and modified item '${obj.kind}/${obj.metadata.name}'.`);
309687
310148
  onChange(obj);
309688
310149
  }
309689
310150
  });
309690
310151
  informer.on('delete', (obj) => {
309691
310152
  // deleted from the etcd
309692
- src_logger.info('REFLECTOR_ITEM_DELETED', {
309693
- metadata: { kind: obj.kind, name: obj.metadata.name },
309694
- });
310153
+ operator_src_logger.info(`Reflector has deleted item '${obj.kind}/${obj.metadata.name}' from the etcd.`);
309695
310154
  store.remove(obj);
309696
310155
  });
309697
310156
  informer.on('error', (err) => {
309698
- src_logger.error('REFLECTOR_ITEM_ERROR', {
309699
- metadata: { error: err, plural, namespace },
309700
- });
310157
+ operator_src_logger.error(`An error occurred in the reflector for '${plural}' in namespace '${namespace}': '${err}'.`);
309701
310158
  setTimeout(async () => {
309702
310159
  try {
309703
310160
  await informer.start();
309704
310161
  }
309705
310162
  catch (err) {
309706
- src_logger.error('REFLECTOR_INFORMER_START_ERROR', {
309707
- metadata: { error: err, plural, namespace },
309708
- });
310163
+ operator_src_logger.error(`Failed to start the reflector informer for '${plural}' in namespace '${namespace}': '${err}'.`);
309709
310164
  }
309710
310165
  }, 5000);
309711
310166
  });
@@ -309725,13 +310180,13 @@ async function needsProvisioningOnCreate(cr) {
309725
310180
  const fCrLog = (cr) => `The item ${cr.kind}: ${cr.metadata.name}`;
309726
310181
  // NO STATUS
309727
310182
  if (!('status' in cr) || !('conditions' in cr.status)) {
309728
- src_logger.debug('STATUS_NO_STATUS_NOR_CONDITION', { metadata: { cr } });
310183
+ operator_src_logger.debug(`The custom resource '${cr.kind}/${cr.metadata.name}' is missing a status and any conditions.`);
309729
310184
  return true;
309730
310185
  }
309731
310186
  // ERROR
309732
310187
  const errCond = getConditionByType(cr.status.conditions, 'ERROR');
309733
310188
  if (errCond && errCond.status === 'True') {
309734
- src_logger.debug('STATUS_ERROR_SKIP_PROVISION', { metadata: { cr } });
310189
+ operator_src_logger.debug(`Skipping the provisioning process for custom resource '${cr.kind}/${cr.metadata.name}' due to a status error.`);
309735
310190
  return false;
309736
310191
  }
309737
310192
  // PROVISIONED
@@ -309739,7 +310194,7 @@ async function needsProvisioningOnCreate(cr) {
309739
310194
  if (provCond &&
309740
310195
  provCond.status === 'True' &&
309741
310196
  provCond.observedGeneration >= cr.metadata.generation) {
309742
- src_logger.debug('STATUS_ALREADY_PROVISIONED', { metadata: { cr } });
310197
+ operator_src_logger.debug(`The custom resource '${cr.kind}/${cr.metadata.name}' is already provisioned; skipping the process.`);
309743
310198
  return false;
309744
310199
  }
309745
310200
  // DELETED
@@ -309747,29 +310202,20 @@ async function needsProvisioningOnCreate(cr) {
309747
310202
  if (delCond &&
309748
310203
  delCond.status === 'True' &&
309749
310204
  delCond.observedGeneration >= cr.metadata.generation) {
309750
- src_logger.debug('STATUS_ALREADY_DELETED', { metadata: { cr } });
310205
+ operator_src_logger.debug(`The custom resource '${cr.kind}/${cr.metadata.name}' has already been deleted; no action is required.`);
309751
310206
  return false;
309752
310207
  }
309753
310208
  // PROVISIONING
309754
310209
  const provisioningCondition = getConditionByType(cr.status.conditions, 'PROVISIONING');
309755
310210
  if (provisioningCondition && provisioningCondition.status === 'True') {
309756
- src_logger.debug('STATUS_IN_PROVISIONING_REPROVISIONING', { metadata: { cr } });
310211
+ operator_src_logger.debug(`The custom resource '${cr.kind}/${cr.metadata.name}' is currently in a provisioning or reprovisioning state.`);
309757
310212
  return true;
309758
310213
  }
309759
- src_logger.debug('STATUS_NOT_HANDLED_STATE_SKIP_PROVISIONING', { metadata: { cr } });
310214
+ operator_src_logger.debug(`Skipping the provisioning process for custom resource '${cr.kind}/${cr.metadata.name}' because its current state is not handled.`);
309760
310215
  return false;
309761
310216
  }
309762
310217
  async function updateTransition(itemPath, reason, type, statusValue, message = '', updateStatusOnly = false) {
309763
- src_logger.info('STATUS_UPDATE_TRANSITION_FOR_ITEM', {
309764
- metadata: {
309765
- itemPath,
309766
- reason,
309767
- type,
309768
- statusValue,
309769
- message,
309770
- updateStatusOnly,
309771
- },
309772
- });
310218
+ operator_src_logger.info(`The item at '${itemPath}' transitioned to a new status of '${statusValue}' (type: '${type}'). The reason for the change is '${reason}' with the message: '${message}'. This was a status-only update: '${updateStatusOnly}'.`);
309773
310219
  const k8sItem = await getItemByItemPath(itemPath);
309774
310220
  if (!('status' in k8sItem))
309775
310221
  k8sItem.status = {};
@@ -309852,7 +310298,7 @@ async function syncer(enqueue) {
309852
310298
  void loop(enqueue);
309853
310299
  return {
309854
310300
  addItem(itemPath) {
309855
- src_logger.info('SYNC_ADD_ITEM', { metadata: { itemPath } });
310301
+ operator_src_logger.info(`Added item of path '${itemPath}' for synchronization`);
309856
310302
  void itemIsSyncable(itemPath).then((itemSyncInfo) => {
309857
310303
  if (!itemSyncInfo.syncable) {
309858
310304
  return;
@@ -309864,7 +310310,7 @@ async function syncer(enqueue) {
309864
310310
  }, helperCalculateRevisionTime(itemSyncInfo.period)),
309865
310311
  needsRevision: false,
309866
310312
  };
309867
- src_logger.info('Configured syncing for item %s %s', itemPath, syncWatchers[itemPath]);
310313
+ operator_src_logger.info(`Configured synchronization for item at path '${itemPath}'`);
309868
310314
  });
309869
310315
  },
309870
310316
  updateItem(itemPath) {
@@ -309872,13 +310318,13 @@ async function syncer(enqueue) {
309872
310318
  // log('Item %s not found, ignoring...', itemPath)
309873
310319
  // return
309874
310320
  //}
309875
- src_logger.debug('SYNC_UPDATE_ITEM', { metadata: { itemPath } });
310321
+ operator_src_logger.debug(`Updated item of path '${itemPath}' during synchronization`);
309876
310322
  void itemIsSyncable(itemPath).then((itemSyncInfo) => {
309877
310323
  if (!itemSyncInfo.syncable) {
309878
310324
  if (syncWatchers[itemPath]) {
309879
310325
  clearInterval(syncWatchers[itemPath].lastRevision);
309880
310326
  delete syncWatchers[itemPath];
309881
- src_logger.info('SYNC_REMOVE_FOR_ITEM', { metadata: { itemPath } });
310327
+ operator_src_logger.info(`Removed item of path '${itemPath}' from synchronization`);
309882
310328
  }
309883
310329
  }
309884
310330
  else {
@@ -309892,26 +310338,19 @@ async function syncer(enqueue) {
309892
310338
  }, helperCalculateRevisionTime(itemSyncInfo.period)),
309893
310339
  needsRevision: false,
309894
310340
  };
309895
- src_logger.debug('SYNC_CONFIGURED_FOR_ITEM', {
309896
- metadata: {
309897
- itemPath,
309898
- watcher: syncWatchers[itemPath],
309899
- },
309900
- });
310341
+ operator_src_logger.debug(`Configured synchronization for item at path '${itemPath}' with watcher '${syncWatchers[itemPath]}'`);
309901
310342
  }
309902
310343
  });
309903
310344
  },
309904
310345
  deleteItem(itemPath) {
309905
310346
  if (!syncWatchers[itemPath]) {
309906
- src_logger.debug('SYNC_DELETE_ITEM_NOT_FOUND_IGNORE', {
309907
- metadata: { itemPath },
309908
- });
310347
+ operator_src_logger.debug(`Ignored deletion attempt for item at path '${itemPath}' as it was not found during synchronization`);
309909
310348
  return;
309910
310349
  }
309911
- src_logger.debug('SYNC_DELETE_ITEM', { metadata: { itemPath } });
310350
+ operator_src_logger.debug(`Deleted item of path '${itemPath}' during synchronization`);
309912
310351
  clearInterval(syncWatchers[itemPath].lastRevision);
309913
310352
  delete syncWatchers[itemPath];
309914
- src_logger.debug('SYNC_DELETE_ITEM_DELETED', { metadata: { itemPath } });
310353
+ operator_src_logger.debug(`Successfully deleted item at path '${itemPath}' during synchronization`);
309915
310354
  },
309916
310355
  };
309917
310356
  }
@@ -309983,13 +310422,7 @@ async function initRetry(enqueue) {
309983
310422
  function retry(itemPath) {
309984
310423
  if (retryWatchers[itemPath]) {
309985
310424
  retryWatchers[itemPath].retryCounter++;
309986
- src_logger.debug('RETRY_FAILED', {
309987
- metadata: {
309988
- itemPath,
309989
- remainRetries: MAXRETRY - retryWatchers[itemPath].retryCounter,
309990
- nextRetry: NEXT_RETRY_SECS * retryWatchers[itemPath].retryCounter,
309991
- },
309992
- });
310425
+ operator_src_logger.debug(`Failed to process item '${itemPath}'. Retrying in '${NEXT_RETRY_SECS * retryWatchers[itemPath].retryCounter}' seconds. Remaining retries: '${MAXRETRY - retryWatchers[itemPath].retryCounter}'.`);
309993
310426
  retryWatchers[itemPath].retry = false;
309994
310427
  retryWatchers[itemPath].nextRetry = setTimeout(() => {
309995
310428
  if (itemPath in retryWatchers)
@@ -310040,12 +310473,7 @@ async function getItemIfNeededRetry(watcher) {
310040
310473
  }
310041
310474
  catch (e) {
310042
310475
  if (e.message && e.message.includes('Error on getItemByItemPath')) {
310043
- src_logger.debug('RETRY_ERROR_ITEM_NOT_FOUND', {
310044
- metadata: {
310045
- message: 'item not found, removed from the retry process',
310046
- itemPath: watcher.itemPath,
310047
- },
310048
- });
310476
+ operator_src_logger.debug(`Item '${watcher.itemPath}' not found, so it has been removed from the retry process.`);
310049
310477
  removeFromRetry(watcher.itemPath);
310050
310478
  return null;
310051
310479
  }
@@ -310085,9 +310513,7 @@ async function resolve(cr, getItemByItemPath, getSecret, namespace = 'default')
310085
310513
  async function resolveSecretRef(namespace, crDependency, getSecret) {
310086
310514
  let secretName = `${crDependency['kind']}-${crDependency['metadata']['name']}-outputs`.toLowerCase();
310087
310515
  if (crDependency.kind === 'FirestartrProviderConfig') {
310088
- src_logger.debug('RESOLVER_SKIP_SECRET_RESOLUTION_FOR', {
310089
- metadata: { kind: 'FirestartrProviderConfig', namespace, crDependency },
310090
- });
310516
+ operator_src_logger.debug(`The resolver is skipping secret resolution for '${crDependency.kind}/${crDependency.metadata.name}' of kind 'FirestartrProviderConfig' in namespace '${namespace}'.`);
310091
310517
  return undefined;
310092
310518
  }
310093
310519
  if (crDependency.kind === 'ExternalSecret') {
@@ -310095,9 +310521,7 @@ async function resolveSecretRef(namespace, crDependency, getSecret) {
310095
310521
  }
310096
310522
  const secret = await getSecret(namespace, secretName);
310097
310523
  if (!secret) {
310098
- src_logger.error('RESOLVER_SECRET_NOT_SOLVABLE', {
310099
- metadata: { secretName, crDependency, namespace },
310100
- });
310524
+ operator_src_logger.error(`The resolver could not find the secret '${secretName}' required by custom resource dependency '${crDependency}' in namespace '${namespace}'.`);
310101
310525
  console.error(`Could not resolve secret ${secretName}`);
310102
310526
  }
310103
310527
  return secret;
@@ -310233,9 +310657,7 @@ const kindsWithFinalizer = [
310233
310657
  */
310234
310658
  async function observeKind(pluralKind, namespace, queue, compute) {
310235
310659
  const lastWorkItems = {};
310236
- src_logger.info('INFORMER_OBSERVE_START', {
310237
- metadata: { kind: pluralKind, namespace },
310238
- });
310660
+ operator_src_logger.info(`The informer has started observing the '${pluralKind}' resource in namespace '${namespace}'.`);
310239
310661
  // onSync
310240
310662
  const enqueueCallback = (event) => {
310241
310663
  return async (item) => {
@@ -310252,13 +310674,7 @@ async function observeKind(pluralKind, namespace, queue, compute) {
310252
310674
  await observe(pluralKind, namespace,
310253
310675
  // on add
310254
310676
  async (item) => {
310255
- src_logger.info('INFORMER_ON_ITEM_ADDED', {
310256
- metadata: {
310257
- kind: pluralKind,
310258
- namespace,
310259
- name: item.metadata.name,
310260
- },
310261
- });
310677
+ operator_src_logger.info(`The informer has detected a new item, '${item.metadata.name}', for '${pluralKind}' in namespace '${namespace}'.`);
310262
310678
  await handleUpsertFinalizer(pluralKind, namespace, item);
310263
310679
  const workItem = await inform(pluralKind, item, 'onAdd', getLastWorkItem(pluralKind, lastWorkItems, item));
310264
310680
  syncCtl.addItem(informer_itemPath(pluralKind, item));
@@ -310269,13 +310685,7 @@ async function observeKind(pluralKind, namespace, queue, compute) {
310269
310685
  },
310270
310686
  // on modify
310271
310687
  async (item) => {
310272
- src_logger.info('INFORMER_ON_ITEM_MODIFIED', {
310273
- metadata: {
310274
- kind: pluralKind,
310275
- namespace,
310276
- name: item.metadata.name,
310277
- },
310278
- });
310688
+ operator_src_logger.info(`The informer has detected that item '${item.metadata.name}' for '${pluralKind}' in namespace '${namespace}' was modified.`);
310279
310689
  const workItem = await inform(pluralKind, item, 'onUpdate', getLastWorkItem(pluralKind, lastWorkItems, item));
310280
310690
  if (workItem) {
310281
310691
  setLastWorkItem(pluralKind, lastWorkItems, item, workItem);
@@ -310284,13 +310694,7 @@ async function observeKind(pluralKind, namespace, queue, compute) {
310284
310694
  },
310285
310695
  // on delete
310286
310696
  async (item) => {
310287
- src_logger.info('INFORMER_ON_ITEM_DELETED', {
310288
- metadata: {
310289
- kind: pluralKind,
310290
- namespace,
310291
- name: item.metadata.name,
310292
- },
310293
- });
310697
+ operator_src_logger.info(`The informer has detected that item '${item.metadata.name}' for '${pluralKind}' in namespace '${namespace}' was deleted.`);
310294
310698
  const workItem = await inform(pluralKind, item, 'onMarkedToDeletion', getLastWorkItem(pluralKind, lastWorkItems, item));
310295
310699
  if (workItem) {
310296
310700
  setLastWorkItem(pluralKind, lastWorkItems, item, workItem);
@@ -310300,17 +310704,11 @@ async function observeKind(pluralKind, namespace, queue, compute) {
310300
310704
  },
310301
310705
  // on rename
310302
310706
  async (item) => {
310303
- src_logger.info('INFORMER_ON_ITEM_RENAMED', {
310304
- metadata: {
310305
- kind: pluralKind,
310306
- namespace,
310307
- name: item.metadata.name,
310308
- },
310309
- });
310707
+ operator_src_logger.info(`The informer has detected that an item for '${pluralKind}' in namespace '${namespace}' has been renamed to '${item.metadata.name}'.`);
310310
310708
  const workItem = await inform(pluralKind, item, 'onRename', getLastWorkItem(pluralKind, lastWorkItems, item));
310311
310709
  // Add the renamed item to the sync queue
310312
310710
  syncCtl.addItem(informer_itemPath(pluralKind, item));
310313
- src_logger.debug('INFORMER_RENAMING_ITEM', { metadata: { workItem } });
310711
+ operator_src_logger.debug(`The informer is renaming item '${workItem.item.metadata.name}' of kind '${workItem.item.kind}' due to a change in its name.`);
310314
310712
  if (workItem) {
310315
310713
  const oldName = workItem.item.metadata.labels[catalog_common.types.controller.FirestartrLabelOldName];
310316
310714
  await handleUnsetFinalizer(pluralKind, namespace, item);
@@ -310371,7 +310769,7 @@ function enqueue(pluralKind, workItem, queue, compute, syncCtl, retryCtl) {
310371
310769
  syncCtl.updateItem(informer_itemPath(pluralKind, item));
310372
310770
  }
310373
310771
  else {
310374
- src_logger.debug('INFORMER_NOT_SPEC_OPERATION', { metadata: { operation } });
310772
+ operator_src_logger.debug(`The informer received an item with an operation type of '${operation}', which is not a specific operation.`);
310375
310773
  }
310376
310774
  };
310377
310775
  queue(workItem);
@@ -310414,9 +310812,7 @@ async function inform(pluralKind, item, op, lastWorkItem = null) {
310414
310812
  return workItem;
310415
310813
  case 'onRename':
310416
310814
  if (await needsProvisioningOnCreate(item)) {
310417
- src_logger.debug('INFORMER_ON_RENAME_NEEDS_PROVISION_ON_CREATE', {
310418
- metadata: { item },
310419
- });
310815
+ operator_src_logger.debug(`The informer is triggering a new provisioning process for the renamed item '${item.kind}/${item.metadata.name}'.`);
310420
310816
  workItem = {
310421
310817
  operation: OperationType.RENAMED,
310422
310818
  item,
@@ -310688,15 +311084,7 @@ let INIT = false;
310688
311084
  * @param {WorkItem} workItem - WorkItem to process
310689
311085
  */
310690
311086
  async function processItem(workItem) {
310691
- src_logger.info('PROCESSOR_NEW_WORKITEM', {
310692
- metadata: {
310693
- operation: workItem.operation,
310694
- workStatus: workItem.workStatus,
310695
- kind: workItem.item.kind,
310696
- name: workItem.item.metadata.name,
310697
- namespace: workItem.item.metadata.namespace,
310698
- },
310699
- });
311087
+ operator_src_logger.info(`The processor received a new work item for '${workItem.operation}' operation on '${workItem.item.kind}/${workItem.item.metadata.name}' in namespace '${workItem.item.metadata.namespace}'. Current work status is '${workItem.workStatus}'.`);
310700
311088
  queue.push(workItem);
310701
311089
  if (!INIT) {
310702
311090
  processItem_loop().catch((err) => {
@@ -310717,15 +311105,7 @@ async function processItem_loop() {
310717
311105
  const logMessage = `${new Date().toISOString()} : Processing OPERATION: ${w.operation} ITEM: ${w.item.kind}/${w.item.metadata.name}`;
310718
311106
  catalog_common.io.writeLogFile('process_item', logMessage);
310719
311107
  const timeout = createTimeout(w);
310720
- src_logger.info('PROCESSOR_PROCESSING_WORKITEM', {
310721
- metadata: {
310722
- operation: w.operation,
310723
- workStatus: w.workStatus,
310724
- kind: w.item.kind,
310725
- name: w.item.metadata.name,
310726
- namespace: w.item.metadata.namespace,
310727
- },
310728
- });
311108
+ operator_src_logger.info(`The processor is currently handling a '${w.operation}' operation for item '${w.item.kind}/${w.item.metadata.name}' in namespace '${w.item.metadata.namespace}'. The current work status is '${w.workStatus}'.`);
310729
311109
  await runWorkItem(w);
310730
311110
  clearTimeout(timeout);
310731
311111
  }
@@ -310741,15 +311121,7 @@ function createTimeout(w) {
310741
311121
  return setTimeout(() => {
310742
311122
  //throw new Error('Timeout on workitem ' + w);
310743
311123
  console.error('Timeout on workitem %O', w);
310744
- src_logger.error('PROCESSOR_TIMEOUT_ON_WORKITEM', {
310745
- metadata: {
310746
- operation: w.operation,
310747
- workStatus: w.workStatus,
310748
- kind: w.item.kind,
310749
- name: w.item.metadata.name,
310750
- namespace: w.item.metadata.namespace,
310751
- },
310752
- });
311124
+ operator_src_logger.error(`The processor timed out while handling a '${w.operation}' operation for item '${w.item.kind}/${w.item.metadata.name}' in namespace '${w.item.metadata.namespace}'. The current work status is '${w.workStatus}'.`);
310753
311125
  process.exit(1);
310754
311126
  }, TIMEOUTS[w.operation] * 1000);
310755
311127
  }
@@ -310775,7 +311147,7 @@ function processItem_wait(t = 2000) {
310775
311147
  return new Promise((ok) => setTimeout(ok, t));
310776
311148
  }
310777
311149
  async function runWorkItem(workItem) {
310778
- src_logger.debug('PROCESSOR_RUNNING_WORK_ITEM', { metadata: { workItem } });
311150
+ operator_src_logger.debug(`The processor is now running the '${workItem.operation}' operation for item '${workItem.item.kind}/${workItem.item.metadata.name}' in namespace '${workItem.item.metadata.namespace}'.`);
310779
311151
  if (!workItem.getItem || !workItem.process || !workItem.operation)
310780
311152
  return;
310781
311153
  try {
@@ -310787,33 +311159,17 @@ async function runWorkItem(workItem) {
310787
311159
  await updateTransition(workItem.handler.itemPath(), condition.reason, condition.type, condition.status, condition.message, condition.updateStatusOnly || false);
310788
311160
  }
310789
311161
  workItem.workStatus = WorkStatus.FINISHED;
310790
- src_logger.debug('PROCESSOR_REMAIN_ITEMS_IN_QUEUE', {
310791
- metadata: { remainingItems: queue.length },
310792
- });
311162
+ operator_src_logger.debug(`The processor has '${queue.length}' items remaining in the queue.`);
310793
311163
  }
310794
311164
  catch (e) {
310795
311165
  if (e instanceof Error &&
310796
311166
  e.message.includes('Error on getItemByItemPath')) {
310797
- src_logger.debug('PROCESSOR_ERROR_ITEM_NOT_FOUND', {
310798
- metadata: {
310799
- workItem,
310800
- message: 'item was not found, removing work item from queue',
310801
- },
310802
- });
311167
+ operator_src_logger.debug(`Item '${workItem.item.kind}/${workItem.item.metadata.name}' was not found, so its work item is being removed from the processor queue.`);
310803
311168
  workItem.workStatus = WorkStatus.FINISHED;
310804
311169
  return;
310805
311170
  }
310806
311171
  else {
310807
- src_logger.error('PROCESSOR_ERROR_PROCESSING_WORKITEM', {
310808
- metadata: {
310809
- operation: workItem.operation,
310810
- workStatus: workItem.workStatus,
310811
- kind: workItem.item.kind,
310812
- name: workItem.item.metadata.name,
310813
- namespace: workItem.item.metadata.namespace,
310814
- error: e,
310815
- },
310816
- });
311172
+ operator_src_logger.error(`An error occurred while the processor was handling the '${workItem.operation}' operation for item '${workItem.item.kind}/${workItem.item.metadata.name}' in namespace '${workItem.item.metadata.namespace}'. Current work status is '${workItem.workStatus}'. The error was: '${e}'.`);
310817
311173
  console.error(e);
310818
311174
  }
310819
311175
  return;
@@ -310825,11 +311181,7 @@ async function runWorkItem(workItem) {
310825
311181
  */
310826
311182
  async function workItemGarbageCollector(queue) {
310827
311183
  while (1) {
310828
- src_logger.debug('PROCESS_ITEM_GARBAGE_COLLECTOR_RUN', {
310829
- metadata: {
310830
- workItemsFound: queue.length,
310831
- },
310832
- });
311184
+ operator_src_logger.debug(`The garbage collector processed '${queue.length}' work items.`);
310833
311185
  for (const [index, wi] of queue.entries()) {
310834
311186
  if (wi.workStatus === WorkStatus.FINISHED) {
310835
311187
  // Because the queue is a constant, we cannot reassign it, instead we
@@ -310838,11 +311190,7 @@ async function workItemGarbageCollector(queue) {
310838
311190
  queue.splice(index, 1);
310839
311191
  }
310840
311192
  }
310841
- src_logger.debug('PROCESS_ITEM_GARBAGE_COLLECTOR_FINISHED', {
310842
- metadata: {
310843
- workItemsLeft: queue.length,
310844
- },
310845
- });
311193
+ operator_src_logger.debug(`The garbage collector finished its run, leaving '${queue.length}' work items in the queue.`);
310846
311194
  await processItem_wait(10 * 1000);
310847
311195
  }
310848
311196
  }
@@ -310854,11 +311202,14 @@ if (process.env.GARBAGE_QUEUE_COLLECTOR) {
310854
311202
  var cdktf_lib = __nccwpck_require__(95933);
310855
311203
  // EXTERNAL MODULE: ../../node_modules/@cdktf/provider-github/lib/provider/index.js
310856
311204
  var lib_provider = __nccwpck_require__(95107);
310857
- ;// CONCATENATED MODULE: ../provisioner/src/entities/base/Entity.ts
311205
+ ;// CONCATENATED MODULE: ../provisioner/src/logger.ts
310858
311206
 
311207
+ /* harmony default export */ const provisioner_src_logger = (catalog_common.logger);
311208
+
311209
+ ;// CONCATENATED MODULE: ../provisioner/src/entities/base/Entity.ts
310859
311210
 
310860
- const Entity_log = src_default()('firestartr:provisioner:entity:base');
310861
311211
  const EXTERNAL_NAME_ANNOTATION = 'firestartr.dev/external-name';
311212
+
310862
311213
  class Metadata {
310863
311214
  constructor(metadata) {
310864
311215
  this._metadata = metadata;
@@ -310905,11 +311256,13 @@ class Entity {
310905
311256
  }
310906
311257
  resolveRef(ref, propertyRef) {
310907
311258
  if (!this.deps) {
310908
- throw `resolveRef:
311259
+ const ErrorMessage = `resolveRef:
310909
311260
 
310910
311261
  Entity with kind ${this.kind} ${this.metadata.name}
310911
311262
 
310912
311263
  does not have any dependencies`;
311264
+ provisioner_src_logger.error(ErrorMessage);
311265
+ throw new Error(ErrorMessage);
310913
311266
  }
310914
311267
  const { kind, name, needsSecret } = ref;
310915
311268
  if (!needsSecret) {
@@ -310918,22 +311271,26 @@ class Entity {
310918
311271
  }
310919
311272
  else {
310920
311273
  if (!propertyRef) {
310921
- throw `resolveRef:
311274
+ const ErrorMessage = `resolveRef:
310922
311275
 
310923
311276
  Entity with kind ${this.kind} ${this.metadata.name}
310924
311277
 
310925
311278
  needs a propertyRef to resolve the secret`;
311279
+ provisioner_src_logger.error(ErrorMessage);
311280
+ throw new Error(ErrorMessage);
310926
311281
  }
310927
311282
  return Buffer.from(this.deps[`${kind}-${name}`].secret.data[propertyRef], 'base64').toString('utf8');
310928
311283
  }
310929
311284
  }
310930
311285
  resolveSecretRef(ref) {
310931
311286
  if (!this.deps) {
310932
- throw `resolveSecretRef:
311287
+ const ErrorMessage = `resolveSecretRef:
310933
311288
 
310934
311289
  Entity with kind ${this.kind} ${this.metadata.name}
310935
311290
 
310936
311291
  does not have any dependencies`;
311292
+ provisioner_src_logger.error(ErrorMessage);
311293
+ throw new Error(ErrorMessage);
310937
311294
  }
310938
311295
  const { name, key } = ref;
310939
311296
  return Buffer.from(this.deps[`Secret-${name}`].cr.data[key], 'base64').toString('utf8');
@@ -310941,11 +311298,13 @@ class Entity {
310941
311298
  resolveOutputs(scope) {
310942
311299
  if (this.spec.writeConnectionSecretToRef) {
310943
311300
  if (!this.mainResource) {
310944
- throw `resolveOutputs:
311301
+ const ErrorMessage = `resolveOutputs:
310945
311302
 
310946
311303
  Entity with kind ${this.kind} ${this.metadata.name}
310947
311304
 
310948
311305
  does not have a mainResource`;
311306
+ provisioner_src_logger.error(ErrorMessage);
311307
+ throw new Error(ErrorMessage);
310949
311308
  }
310950
311309
  /**
310951
311310
  * We don't currently support writing outputs to modules
@@ -310955,13 +311314,15 @@ class Entity {
310955
311314
  const keys = this.getKeysFrom(this.mainResource);
310956
311315
  const outputs = this.spec.writeConnectionSecretToRef.outputs;
310957
311316
  for (const o of outputs) {
310958
- Entity_log('OUTPUT %s', o.key);
311317
+ provisioner_src_logger.debug('OUTPUT %s', o.key);
310959
311318
  if (!keys.includes(o.key)) {
310960
- throw `resolveOutputs:
311319
+ const ErrorMessage = `resolveOutputs:
310961
311320
 
310962
311321
  Entity with kind ${this.kind} ${this.metadata.name}
310963
311322
 
310964
311323
  does not have the output ${o.key}`;
311324
+ provisioner_src_logger.error(ErrorMessage);
311325
+ throw new Error(ErrorMessage);
310965
311326
  }
310966
311327
  new cdktf_lib.TerraformOutput(scope, o.key, {
310967
311328
  value: this.mainResource.getAnyMapAttribute(this.camelToSnake(o.key)),
@@ -310994,7 +311355,6 @@ var repository_file = __nccwpck_require__(79507);
310994
311355
  ;// CONCATENATED MODULE: ../provisioner/src/entities/firestartrgithubrepository/helpers/CodeownersHelper.ts
310995
311356
 
310996
311357
 
310997
- const CodeownersHelper_messageLog = src_default()('firestartr:provisioner:entities:component:helpers:codeownerscreator');
310998
311358
  function provisionCodeowners(scope, repo, branchDefault, fsGithubRepository) {
310999
311359
  const config = {
311000
311360
  dependsOn: [repo, branchDefault],
@@ -311005,7 +311365,7 @@ function provisionCodeowners(scope, repo, branchDefault, fsGithubRepository) {
311005
311365
  overwriteOnCreate: true,
311006
311366
  repository: repo.name,
311007
311367
  };
311008
- CodeownersHelper_messageLog(`Content of the codeowners: ${fsGithubRepository.spec.repo.codeowners}`);
311368
+ provisioner_src_logger.debug(`Content of the codeowners: ${fsGithubRepository.spec.repo.codeowners}`);
311009
311369
  const tfStateKey = `_${fsGithubRepository.getTfStateKey()}-codeowners`;
311010
311370
  new repository_file/* RepositoryFile */.h(scope, tfStateKey, config);
311011
311371
  }
@@ -311018,9 +311378,8 @@ var repository_collaborator = __nccwpck_require__(33786);
311018
311378
 
311019
311379
 
311020
311380
 
311021
- const RepositoryTeamsHelper_messageLog = src_default()('firestartr:provisioner:entities:component:helpers:repositoryteamshelper');
311022
311381
  function provisionPermissions(scope, repo, fsGithubRepository) {
311023
- RepositoryTeamsHelper_messageLog(`provisionRepositoryTeams with name ${fsGithubRepository.metadata.name} in org ${fsGithubRepository.spec.org}`);
311382
+ provisioner_src_logger.info(`provisionRepositoryTeams with name ${fsGithubRepository.metadata.name} in org ${fsGithubRepository.spec.org}`);
311024
311383
  for (const permission of fsGithubRepository.spec.permissions) {
311025
311384
  if ('ref' in permission) {
311026
311385
  const tfStateKey = `_${fsGithubRepository.getTfStateKey()}-${permission.ref.kind}-${permission.ref.name}-tr`;
@@ -311066,9 +311425,8 @@ var branch_protection_v3 = __nccwpck_require__(31706);
311066
311425
 
311067
311426
 
311068
311427
 
311069
- const RepositoryHelper_messageLog = src_default()('firestartr:provisioner:entities:component:helpers:repositoryhelper');
311070
311428
  function provisionRepository(scope, fsGithubRepository) {
311071
- RepositoryHelper_messageLog(`provisionRepository with name ${fsGithubRepository.metadata.name} in org ${fsGithubRepository.spec.org}`);
311429
+ provisioner_src_logger.info(`provisionRepository with name ${fsGithubRepository.metadata.name} in org ${fsGithubRepository.spec.org}`);
311072
311430
  const config = {
311073
311431
  name: fsGithubRepository.metadata.name,
311074
311432
  description: fsGithubRepository.spec.repo.description,
@@ -311105,7 +311463,7 @@ function provisionRepository(scope, fsGithubRepository) {
311105
311463
  return repo;
311106
311464
  }
311107
311465
  function provisionBranchProtections(scope, repo, fsGithubRepository) {
311108
- RepositoryHelper_messageLog(`provisionBranchProtections with name ${fsGithubRepository.metadata.name} in org ${fsGithubRepository.spec.org}`);
311466
+ provisioner_src_logger.info(`provisionBranchProtections with name ${fsGithubRepository.metadata.name} in org ${fsGithubRepository.spec.org}`);
311109
311467
  for (const branchProtection of fsGithubRepository.spec.branchProtections) {
311110
311468
  const tfStateKey = `_${fsGithubRepository.getTfStateKey()}-${branchProtection.pattern}-bp`;
311111
311469
  const statusChecks = {
@@ -311130,8 +311488,6 @@ function provisionBranchProtections(scope, repo, fsGithubRepository) {
311130
311488
 
311131
311489
  ;// CONCATENATED MODULE: ../provisioner/src/config/config.ts
311132
311490
 
311133
-
311134
- const config_messageLog = src_default()('firestartr:provisioner:config');
311135
311491
  /**
311136
311492
  * @description Valid plans for the account
311137
311493
  * @type {Set<string>}
@@ -311238,13 +311594,12 @@ class FirestartrGithubRepository_FirestartrGithubRepository extends Entity {
311238
311594
 
311239
311595
 
311240
311596
 
311241
- const provisioner_messageLog = src_default()('firestartr:provisioner:features:provisioner');
311242
311597
  function provisionFeatureFiles(scope, feature) {
311243
- provisioner_messageLog(`Provisioning feature ${feature.spec.type} for ${feature.spec.repositoryTarget.name}`);
311244
- provisioner_messageLog('Feature output json: %O', feature);
311598
+ provisioner_src_logger.info(`Provisioning feature ${feature.spec.type} for ${feature.spec.repositoryTarget.name}`);
311599
+ provisioner_src_logger.debug('Feature output json: %O', feature);
311245
311600
  if (feature.spec.files) {
311246
311601
  for (const file of feature.spec.files) {
311247
- provisioner_messageLog('Provisioning file %O', file);
311602
+ provisioner_src_logger.debug('Provisioning file %O', file);
311248
311603
  const lifecycleArg = file.userManaged
311249
311604
  ? { ignoreChanges: ['content'] }
311250
311605
  : {};
@@ -311279,8 +311634,6 @@ class FirestartrGithubRepositoryFeature_FirestartrGithubRepositoryFeature extend
311279
311634
  var lib_membership = __nccwpck_require__(27501);
311280
311635
  ;// CONCATENATED MODULE: ../provisioner/src/entities/firestartrgithubmembership/helpers/MembershipHelper.ts
311281
311636
 
311282
-
311283
- const MembershipHelper_messageLog = src_default()('firestartr:provisioner:modules:artifacts:userartifact');
311284
311637
  function provisionMembership(scope, fsGithubMembership) {
311285
311638
  const tfStateKey = `_${fsGithubMembership.getTfStateKey()}`;
311286
311639
  const membership = new lib_membership/* Membership */.E(scope, tfStateKey, {
@@ -311295,8 +311648,6 @@ function provisionMembership(scope, fsGithubMembership) {
311295
311648
  var team_membership = __nccwpck_require__(93268);
311296
311649
  ;// CONCATENATED MODULE: ../provisioner/src/entities/firestartrgithubmembership/helpers/MembershipAllGroupHelper.ts
311297
311650
 
311298
-
311299
- const MembershipAllGroupHelper_messageLog = src_default()('firestartr:provisioner:modules:artifacts:membership:all-group-helper');
311300
311651
  function provisionAllGroupMembershipRelation(scope, fsGithubMembership) {
311301
311652
  const tfStateKey = `_${fsGithubMembership.getTfStateKey()}`;
311302
311653
  const config = {
@@ -311327,11 +311678,10 @@ class FirestartrGithubMembership_FirestartrGithubMembership extends Entity {
311327
311678
  var lib_team = __nccwpck_require__(57889);
311328
311679
  ;// CONCATENATED MODULE: ../provisioner/src/entities/firestartrgithubgroup/helpers/TeamsHelper.ts
311329
311680
 
311330
-
311331
311681
  // import { TeamConfigAux } from '../auxiliars/TeamConfigAux';
311332
- const TeamsHelper_messageLog = src_default()('firestartr:provisioner:entities:component:helpers:repositoryhelper');
311682
+
311333
311683
  function provisionGroup(scope, fsGithubGroup) {
311334
- TeamsHelper_messageLog(`provisionGroup with name ${fsGithubGroup.metadata.name} in org ${fsGithubGroup.spec.org}`);
311684
+ provisioner_src_logger.info(`provisionGroup with name ${fsGithubGroup.metadata.name} in org ${fsGithubGroup.spec.org}`);
311335
311685
  const config = {
311336
311686
  name: fsGithubGroup.metadata.name,
311337
311687
  description: fsGithubGroup.spec.description,
@@ -311349,11 +311699,10 @@ function provisionGroup(scope, fsGithubGroup) {
311349
311699
  ;// CONCATENATED MODULE: ../provisioner/src/entities/firestartrgithubgroup/helpers/TeamMembersHelper.ts
311350
311700
 
311351
311701
 
311352
- const TeamMembersHelper_messageLog = src_default()('firestartr:provisioner:entities:component:helpers:teamsmembershiphelper');
311353
311702
  function provisionMembers(scope, team, fsGithubGroup) {
311354
- TeamMembersHelper_messageLog(`provisionMembers of group ${fsGithubGroup.metadata.name} in org ${fsGithubGroup.spec.org}`);
311703
+ provisioner_src_logger.info(`provisionMembers of group ${fsGithubGroup.metadata.name} in org ${fsGithubGroup.spec.org}`);
311355
311704
  for (const member of fsGithubGroup.spec.members) {
311356
- TeamMembersHelper_messageLog(`Provisioning user ${member.ref.name} for group ${fsGithubGroup.metadata.name}`);
311705
+ provisioner_src_logger.info(`Provisioning user ${member.ref.name} for group ${fsGithubGroup.metadata.name}`);
311357
311706
  const tfStateKey = `_${fsGithubGroup.getTfStateKey()}-${member.ref.kind}-${member.ref.name}-tr`;
311358
311707
  if (member.ref.kind === 'FirestartrGithubMembership') {
311359
311708
  const username = fsGithubGroup.resolveRef(member.ref);
@@ -311388,8 +311737,6 @@ class FirestartrGithubGroup_FirestartrGithubGroup extends Entity {
311388
311737
  var organization_webhook = __nccwpck_require__(80516);
311389
311738
  ;// CONCATENATED MODULE: ../provisioner/src/entities/firestartrgithuborgwebhook/helpers/OrgWebhookHelper.ts
311390
311739
 
311391
-
311392
- const OrgWebhookHelper_messageLog = src_default()('firestartr:provisioner:modules:artifacts:orgwebhook');
311393
311740
  function provisionOrgWebhook(scope, fsGithubOrgWebhook) {
311394
311741
  const tfStateKey = `_${fsGithubOrgWebhook.getTfStateKey()}`;
311395
311742
  const webhookConfig = {
@@ -311655,7 +312002,6 @@ var FirestartrTerraformProvider;
311655
312002
 
311656
312003
 
311657
312004
 
311658
- const GithubStack_messageLog = src_default()('firestartr:provisioner:stacks:githubstack');
311659
312005
  class GithubStack extends BaseStack {
311660
312006
  async provisionEntity(isImport, entity, deps, tfStatePath, orgConfig) {
311661
312007
  try {
@@ -311671,7 +312017,7 @@ class GithubStack extends BaseStack {
311671
312017
  }
311672
312018
  }
311673
312019
  catch (err) {
311674
- GithubStack_messageLog('Error: provisionEntity: %s', err);
312020
+ provisioner_src_logger.error('Error: provisionEntity: %s', err);
311675
312021
  throw err;
311676
312022
  }
311677
312023
  }
@@ -311692,7 +312038,6 @@ class GithubStack extends BaseStack {
311692
312038
 
311693
312039
 
311694
312040
 
311695
- const TerraformModuleStack_messageLog = src_default()('firestartr:provisioner:stacks:terraformmodulestack');
311696
312041
  class TerraformModuleStack extends BaseStack {
311697
312042
  async provisionEntity(isImport, entity, deps, tfStatePath, orgConfig) {
311698
312043
  try {
@@ -311705,7 +312050,7 @@ class TerraformModuleStack extends BaseStack {
311705
312050
  }
311706
312051
  }
311707
312052
  catch (err) {
311708
- TerraformModuleStack_messageLog('Error: provisionEntity: %s', err);
312053
+ provisioner_src_logger.error('Error: provisionEntity: %s', err);
311709
312054
  throw err;
311710
312055
  }
311711
312056
  }
@@ -311761,9 +312106,10 @@ function __calculateTFStatePath(entity) {
311761
312106
  ;// CONCATENATED MODULE: ../provisioner/src/cdktf.ts
311762
312107
 
311763
312108
 
311764
- async function runCDKTF(entityPath, action, depsPath) {
312109
+ async function runCDKTF(entityPath, action, depsPath, stream) {
311765
312110
  return new Promise((ok, ko) => {
311766
312111
  const cdktfProcess = (0,external_child_process_.spawn)('cdktf', [action, '--log-level', 'DEBUG', '--auto-approve'], {
312112
+ stdio: ['inherit', 'pipe', 'pipe'],
311767
312113
  cwd: process.env.IS_DEV_LOCAL_ENVIRONMENT
311768
312114
  ? '/library/packages/provisioner'
311769
312115
  : '/library/provisioner',
@@ -311791,10 +312137,14 @@ async function runCDKTF(entityPath, action, depsPath) {
311791
312137
  const logparsed = log.toString();
311792
312138
  if (!logparsed.includes('Synthesizing')) {
311793
312139
  output += catalog_common.io.stripAnsi(logparsed);
312140
+ if (stream)
312141
+ stream.write(catalog_common.io.stripAnsi(logparsed));
311794
312142
  }
311795
312143
  });
311796
312144
  cdktfProcess.stderr.on('data', (log) => {
311797
312145
  output += catalog_common.io.stripAnsi(log.toString());
312146
+ if (stream)
312147
+ stream.write(catalog_common.io.stripAnsi(log.toString()));
311798
312148
  });
311799
312149
  cdktfProcess.on('exit', async (code) => {
311800
312150
  if (code !== 0) {
@@ -311812,7 +312162,6 @@ async function runCDKTF(entityPath, action, depsPath) {
311812
312162
 
311813
312163
 
311814
312164
 
311815
- const installer_messageLog = src_default()('firestartr:provisioner:features:installer');
311816
312165
  async function installer_installFeaturesForComponent(component, store) {
311817
312166
  const componentFeatures = component.spec?.provisioner?.features || '[]';
311818
312167
  const componentFeaturesToInstall = componentFeatures.filter((feature) => {
@@ -311821,7 +312170,7 @@ async function installer_installFeaturesForComponent(component, store) {
311821
312170
  });
311822
312171
  if (componentFeaturesToInstall.length > 0) {
311823
312172
  for (const feature of componentFeaturesToInstall) {
311824
- installer_messageLog('Installing feature %s for component %s', feature.name, component.metadata.name);
312173
+ log.info('Installing feature %s for component %s', feature.name, component.metadata.name);
311825
312174
  // Get feature config
311826
312175
  const featureConfig = common.features.features.getFeatureRenderedConfigForComponent(component, feature.name);
311827
312176
  // prepare files
@@ -311834,7 +312183,7 @@ async function installer_installFeaturesForComponent(component, store) {
311834
312183
  }
311835
312184
  }
311836
312185
  else {
311837
- installer_messageLog(`No features to install for component ${component.metadata.name}`);
312186
+ log.error(`No features to install for component ${component.metadata.name}`);
311838
312187
  }
311839
312188
  return store;
311840
312189
  }
@@ -311861,7 +312210,7 @@ async function getFileContentFromGithubIfExists(path, repositoryName, owner) {
311861
312210
  }
311862
312211
  catch (e) {
311863
312212
  if (e.status === 404) {
311864
- installer_messageLog(`File ${path} not found in ${repositoryName}`);
312213
+ log.debug(`File ${path} not found in ${repositoryName}`);
311865
312214
  return false;
311866
312215
  }
311867
312216
  throw e;
@@ -311886,7 +312235,6 @@ function isFreshInstallation(featureName, component) {
311886
312235
 
311887
312236
 
311888
312237
 
311889
- const preparer_messageLog = src_default()('firestartr:provisioner:features:installer');
311890
312238
  async function preparer_prepareFeaturesForComponent(component, store) {
311891
312239
  // those are the features to maintain
311892
312240
  let componentFeatures = component.spec?.provisioner?.features || [];
@@ -311901,7 +312249,7 @@ async function preparer_prepareFeaturesForComponent(component, store) {
311901
312249
  if (componentFeatures.length > 0) {
311902
312250
  const entityPath = dumpArtifactYaml(component);
311903
312251
  for (const feature of componentFeatures) {
311904
- preparer_messageLog('Installing feature %s for component %s', feature.name, component.metadata.name);
312252
+ log.info('Installing feature %s for component %s', feature.name, component.metadata.name);
311905
312253
  await featuresPreparer.getFeatureConfig(feature.name, feature.version, entityPath);
311906
312254
  // Get feature config
311907
312255
  const featureConfig = common.features.features.getFeatureRenderedConfigForComponent(component, feature.name);
@@ -311925,17 +312273,17 @@ const external_node_readline_namespaceObject = __WEBPACK_EXTERNAL_createRequire(
311925
312273
 
311926
312274
 
311927
312275
 
311928
- const terraform_messageLog = src_default()('firestartr:provisioner:terraform');
311929
- async function runTerraform(entity, command) {
312276
+ async function runTerraform(entity, command, stream) {
311930
312277
  let entityID = `${entity.kind.toLowerCase()}--${entity['spec']['firestartr']['tfStateKey']}`;
311931
312278
  if (entity.kind === 'FirestartrGithubRepositoryFeature')
311932
312279
  entityID = `${entity.kind.toLowerCase()}--${entity.metadata.name}`;
311933
312280
  const workDir = external_path_.join(process.env.IS_DEV_LOCAL_ENVIRONMENT
311934
312281
  ? '/library/packages/provisioner'
311935
312282
  : '/library/provisioner', 'cdktf.out', 'stacks', entityID);
311936
- terraform_messageLog(`Running terraform with command ${command} in ${workDir}`);
312283
+ provisioner_src_logger.info(`Running terraform with command ${command} in ${workDir}`);
311937
312284
  return new Promise((ok, ko) => {
311938
312285
  const terraformProcess = (0,external_child_process_.spawn)('terraform', [...command], {
312286
+ stdio: ['inherit', 'pipe', 'pipe'],
311939
312287
  cwd: workDir,
311940
312288
  env: {
311941
312289
  PATH: process.env.PATH,
@@ -311953,17 +312301,18 @@ async function runTerraform(entity, command) {
311953
312301
  terraformProcess.stdout.on('data', (log) => {
311954
312302
  const line = catalog_common.io.stripAnsi(log.toString());
311955
312303
  output += line;
311956
- console.log(line);
312304
+ if (stream)
312305
+ stream.write(line);
311957
312306
  });
311958
312307
  terraformProcess.stderr.on('data', (log) => {
311959
312308
  const line = catalog_common.io.stripAnsi(log.toString());
311960
312309
  output += line;
311961
- console.log(line);
312310
+ if (stream)
312311
+ stream.write(line);
311962
312312
  });
311963
312313
  terraformProcess.on('exit', async (code) => {
311964
312314
  console.log(`child process exited with code ${code}`);
311965
312315
  if (code !== 0) {
311966
- console.log(output);
311967
312316
  ko(output);
311968
312317
  }
311969
312318
  else {
@@ -311972,13 +312321,13 @@ async function runTerraform(entity, command) {
311972
312321
  });
311973
312322
  });
311974
312323
  }
311975
- function terraformInit(entity) {
311976
- return runTerraform(entity, ['init', '-no-color']);
312324
+ function terraformInit(entity, stream) {
312325
+ return runTerraform(entity, ['init', '-no-color'], stream);
311977
312326
  }
311978
- function terraformPlan(entity) {
311979
- return runTerraform(entity, ['plan', '-no-color']);
312327
+ function terraformPlan(entity, stream) {
312328
+ return runTerraform(entity, ['plan', '-no-color'], stream);
311980
312329
  }
311981
- async function terraformApply(entity, isImport = false, skipPlan = false) {
312330
+ async function terraformApply(entity, isImport = false, skipPlan = false, stream) {
311982
312331
  let line = false;
311983
312332
  if (isImport && !skipPlan) {
311984
312333
  console.log(`
@@ -311997,15 +312346,15 @@ Type 'yes' to continue:`);
311997
312346
  });
311998
312347
  }
311999
312348
  if (line === 'yes' || skipPlan) {
312000
- return runTerraform(entity, ['apply', '-no-color', '-auto-approve']);
312349
+ return runTerraform(entity, ['apply', '-no-color', '-auto-approve'], stream);
312001
312350
  }
312002
312351
  else {
312003
312352
  console.log(`🚀 Skipping apply for entity ${entity.kind} ${entity.metadata.name}`);
312004
312353
  return Promise.resolve('');
312005
312354
  }
312006
312355
  }
312007
- function terraformDestroy(entity) {
312008
- return runTerraform(entity, ['destroy', '-no-color', '-auto-approve']);
312356
+ function terraformDestroy(entity, stream) {
312357
+ return runTerraform(entity, ['destroy', '-no-color', '-auto-approve'], stream);
312009
312358
  }
312010
312359
 
312011
312360
  ;// CONCATENATED MODULE: ../provisioner/src/features/uninstaller.ts
@@ -312014,12 +312363,11 @@ function terraformDestroy(entity) {
312014
312363
 
312015
312364
 
312016
312365
 
312017
- const uninstaller_messageLog = src_default()('firestartr:provisioner:features:uninstaller');
312018
312366
  async function untrackManagedFiles(feature, deps) {
312019
312367
  if (!feature.spec.files || feature.spec.files.length < 1)
312020
312368
  return;
312021
- uninstaller_messageLog('Removing managed files from the Terraform State');
312022
- uninstaller_messageLog('Synthing the project...');
312369
+ provisioner_src_logger.debug('Removing managed files from the Terraform State');
312370
+ provisioner_src_logger.debug('Synthing the project...');
312023
312371
  const randomFilenameFeature = `${catalog_common.generic.randomString(20)}.yaml`;
312024
312372
  const randomFilenameDeps = `${catalog_common.generic.randomString(20)}_deps.yaml`;
312025
312373
  catalog_common.io.writeYamlFile(randomFilenameFeature, feature, '/tmp');
@@ -312027,7 +312375,7 @@ async function untrackManagedFiles(feature, deps) {
312027
312375
  await runCDKTF(external_path_.join('/tmp', randomFilenameFeature), 'synth', external_path_.join('/tmp', randomFilenameDeps));
312028
312376
  await runTerraform(feature, ['init']);
312029
312377
  for (const file of feature.spec.files.filter((file) => file.userManaged === true)) {
312030
- uninstaller_messageLog(`Removing from the state file ${file.path}`);
312378
+ provisioner_src_logger.debug(`Removing from the state file ${file.path}`);
312031
312379
  // Terraform replaces / with -- and . with - in the state file names, so we do the same to get the state file name
312032
312380
  const stateFileName = `${feature.spec.type}-${file.path}`
312033
312381
  .replace(/\//g, '--')
@@ -312100,14 +312448,46 @@ function getNextStatus(status) {
312100
312448
 
312101
312449
 
312102
312450
 
312451
+
312452
+
312103
312453
  class Resource {
312104
312454
  setLogger(fn) {
312105
312455
  this.logFn = fn;
312106
312456
  }
312457
+ setSynthStreamLogs(callbacks) {
312458
+ this.synthStreamCallbacks = callbacks;
312459
+ }
312460
+ setTFStreamLogs(callbacks) {
312461
+ this.tfStreamCallbacks = callbacks;
312462
+ }
312463
+ async onSyncStreaming() {
312464
+ if (!this.logStream) {
312465
+ this.logStream = new external_stream_.PassThrough();
312466
+ }
312467
+ if (this.synthStreamCallbacks) {
312468
+ const callbacks = await this.synthStreamCallbacks.prepare();
312469
+ this.setLogStream(callbacks.fnData, callbacks.fnEnd);
312470
+ }
312471
+ }
312472
+ async onTFStreaming() {
312473
+ if (!this.logStream) {
312474
+ this.logStream = new external_stream_.PassThrough();
312475
+ }
312476
+ if (this.tfStreamCallbacks) {
312477
+ const callbacks = await this.tfStreamCallbacks.prepare();
312478
+ this.setLogStream(callbacks.fnData, callbacks.fnEnd);
312479
+ }
312480
+ }
312481
+ setLogStream(fnData, fnEnd, reopen = true) {
312482
+ if (reopen || !this.logStream)
312483
+ this.logStream = new external_stream_.PassThrough();
312484
+ this.logStream.on('data', (data) => fnData(data.toString()));
312485
+ this.logStream.on('end', () => fnEnd());
312486
+ }
312107
312487
  constructor(mainCR, operation, deps = []) {
312108
312488
  this.data = {};
312109
312489
  this.output = '';
312110
- this.logFn = (msg) => console.log(msg);
312490
+ this.logFn = (msg) => provisioner_src_logger.debug(msg);
312111
312491
  this.set('main_artifact', mainCR);
312112
312492
  this.set('operation', operation);
312113
312493
  this.set('deps', deps);
@@ -312117,36 +312497,46 @@ class Resource {
312117
312497
  await this.synth();
312118
312498
  await this.runTerraform();
312119
312499
  await this.postprocess();
312500
+ if (this.logStream) {
312501
+ this.logStream.end();
312502
+ this.logStream = null;
312503
+ }
312120
312504
  }
312121
312505
  artifact() {
312122
312506
  return this.get('main_artifact');
312123
312507
  }
312124
312508
  async synth() {
312509
+ await this.onSyncStreaming();
312125
312510
  const randomFilenameArtifact = `${catalog_common.generic.randomString(20)}.yaml`;
312126
312511
  const randomFilenameDeps = `${catalog_common.generic.randomString(20)}_deps.yaml`;
312127
312512
  catalog_common.io.writeYamlFile(randomFilenameArtifact, this.get('main_artifact'), '/tmp');
312128
312513
  catalog_common.io.writeYamlFile(randomFilenameDeps, this.get('deps'), '/tmp');
312129
- await runCDKTF(external_path_.join('/tmp', randomFilenameArtifact), 'synth', external_path_.join('/tmp', randomFilenameDeps));
312514
+ await runCDKTF(external_path_.join('/tmp', randomFilenameArtifact), 'synth', external_path_.join('/tmp', randomFilenameDeps), this.logStream);
312515
+ if (this.logStream) {
312516
+ this.logStream.end();
312517
+ this.logStream = null;
312518
+ }
312130
312519
  }
312131
312520
  log(msg) {
312132
312521
  this.logFn(msg);
312133
312522
  }
312134
312523
  async runTerraform() {
312524
+ await this.onTFStreaming();
312135
312525
  let output = '';
312136
- output += await terraformInit(this.get('main_artifact'));
312137
- output += await terraformPlan(this.get('main_artifact'));
312526
+ output += await terraformInit(this.get('main_artifact'), this.logStream);
312527
+ output += await terraformPlan(this.get('main_artifact'), this.logStream);
312138
312528
  if (this.get('operation') === 'CREATE' ||
312139
312529
  this.get('operation') === 'UPDATE') {
312140
- output += await terraformApply(this.get('main_artifact'), false, true);
312530
+ output += await terraformApply(this.get('main_artifact'), false, true, this.logStream);
312141
312531
  }
312142
312532
  else if (this.get('operation') === 'DELETE') {
312143
- output += await terraformDestroy(this.get('main_artifact'));
312533
+ output += await terraformDestroy(this.get('main_artifact'), this.logStream);
312144
312534
  }
312145
312535
  else if (this.get('operation') === 'IMPORT') {
312146
- output += await terraformApply(this.get('main_artifact'), true, false);
312536
+ output += await terraformApply(this.get('main_artifact'), true, false, this.logStream);
312147
312537
  }
312148
312538
  else if (this.get('operation') === 'IMPORT_SKIP_PLAN') {
312149
- output += await terraformApply(this.get('main_artifact'), true, true);
312539
+ output += await terraformApply(this.get('main_artifact'), true, true, this.logStream);
312150
312540
  }
312151
312541
  else {
312152
312542
  throw new Error(`unknown operation: ${this.get('operation')}`);
@@ -312171,7 +312561,6 @@ class Resource {
312171
312561
 
312172
312562
 
312173
312563
 
312174
- const github_feature_log = src_default()('firestartr:provisioner:github_repository_feature');
312175
312564
  class github_feature_FirestartrGithubRepositoryFeature extends Resource {
312176
312565
  static kind() {
312177
312566
  return 'FirestartrGithubRepositoryFeature';
@@ -312179,19 +312568,19 @@ class github_feature_FirestartrGithubRepositoryFeature extends Resource {
312179
312568
  async preprocess() {
312180
312569
  switch (this.get('operation')) {
312181
312570
  case 'CREATE':
312182
- github_feature_log('CREATED');
312571
+ provisioner_src_logger.debug('Creating FirestartrGithubRepositoryFeature');
312183
312572
  await this._updateManagedFiles();
312184
312573
  break;
312185
312574
  case 'UPDATE':
312186
- github_feature_log('UPDATED');
312575
+ provisioner_src_logger.debug('Updating FirestartrGithubRepositoryFeature');
312187
312576
  await this._updateManagedFiles();
312188
312577
  break;
312189
312578
  case 'DELETE':
312190
- github_feature_log('DELETE');
312579
+ provisioner_src_logger.debug('Deleting FirestartrGithubRepositoryFeature');
312191
312580
  await untrackManagedFiles(this.get('main_artifact'), this.get('deps'));
312192
312581
  break;
312193
312582
  default:
312194
- github_feature_log(`UNKNOWN: ${this.get('operation')}`);
312583
+ provisioner_src_logger.debug(`Unknown operation '${this.get('operation')}' for FirestartrGithubRepositoryFeature`);
312195
312584
  }
312196
312585
  }
312197
312586
  async _updateManagedFiles() {
@@ -312205,7 +312594,7 @@ class github_feature_FirestartrGithubRepositoryFeature extends Resource {
312205
312594
  file.content = newContent;
312206
312595
  }
312207
312596
  catch (e) {
312208
- github_feature_log(`File ${file.path} not found in repo ${repoInfo.ref.name} on branch ${repoInfo.branch}. File content not updated`);
312597
+ provisioner_src_logger.error(`File ${file.path} not found in repo ${repoInfo.ref.name} on branch ${repoInfo.branch}. File content not updated`);
312209
312598
  }
312210
312599
  }
312211
312600
  }
@@ -312285,7 +312674,6 @@ async function provisionRegularBranch(repo, branchName, sourceBranch, org) {
312285
312674
 
312286
312675
 
312287
312676
 
312288
- const github_repository_log = src_default()('firestartr:provisioner:github_repository');
312289
312677
  class github_repository_FirestartrGithubRepository extends Resource {
312290
312678
  static kind() {
312291
312679
  return 'FirestartrGithubRepository';
@@ -312293,22 +312681,23 @@ class github_repository_FirestartrGithubRepository extends Resource {
312293
312681
  async preprocess() {
312294
312682
  switch (this.get('operation')) {
312295
312683
  case 'CREATE':
312296
- github_repository_log('CREATE');
312684
+ provisioner_src_logger.debug('Creating FirestartrGithubRepository');
312297
312685
  break;
312298
312686
  case 'UPDATE':
312299
- github_repository_log('UPDATED');
312687
+ provisioner_src_logger.debug('Updating FirestartrGithubRepository');
312300
312688
  break;
312301
312689
  case 'DELETE':
312302
- github_repository_log('DELETED');
312690
+ provisioner_src_logger.debug('Deleted FirestartrGithubRepository');
312303
312691
  break;
312304
312692
  case 'IMPORT':
312305
- github_repository_log('IMPORT');
312693
+ provisioner_src_logger.debug('Importing FirestartrGithubRepository');
312306
312694
  break;
312307
312695
  case 'IMPORT_SKIP_PLAN':
312308
- github_repository_log('IMPORT_SKIP_PLAN');
312696
+ provisioner_src_logger.debug('Import skipping plan for FirestartrGithubRepository');
312309
312697
  break;
312310
312698
  default:
312311
- github_repository_log('UNKNOWN');
312699
+ provisioner_src_logger.debug('Unknown operation for FirestartrGithubRepository ' +
312700
+ this.get('operation'));
312312
312701
  }
312313
312702
  }
312314
312703
  async postprocess() {
@@ -312316,20 +312705,20 @@ class github_repository_FirestartrGithubRepository extends Resource {
312316
312705
  switch (this.get('operation')) {
312317
312706
  case 'CREATE':
312318
312707
  case 'UPDATE':
312319
- github_repository_log('CREATE & UPDATE');
312708
+ provisioner_src_logger.debug(`Created and updated FirestartrGithubRepository ${cr.metadata.name}`);
312320
312709
  await provisionAdditionalBranches(cr);
312321
312710
  break;
312322
312711
  case 'DELETE':
312323
- github_repository_log('DELETED');
312712
+ provisioner_src_logger.debug(`Deleted FirestartrGithubRepository ${cr.metadata.name}`);
312324
312713
  break;
312325
312714
  case 'IMPORT':
312326
- github_repository_log('IMPORT');
312715
+ provisioner_src_logger.debug(`Imported FirestartrGithubRepository ${cr.metadata.name}`);
312327
312716
  break;
312328
312717
  case 'IMPORT_SKIP_PLAN':
312329
- github_repository_log('IMPORT_SKIP_PLAN');
312718
+ provisioner_src_logger.debug(`Imported skipped plan FirestartrGithubRepository ${cr.metadata.name}`);
312330
312719
  break;
312331
312720
  default:
312332
- github_repository_log('UNKNOWN');
312721
+ provisioner_src_logger.debug(`Finished for unknown operation ${this.get('operation')} for FirestartrGithubRepository`);
312333
312722
  }
312334
312723
  }
312335
312724
  }
@@ -312337,7 +312726,6 @@ class github_repository_FirestartrGithubRepository extends Resource {
312337
312726
  ;// CONCATENATED MODULE: ../provisioner/src/resources/github_membership/index.ts
312338
312727
 
312339
312728
 
312340
- const github_membership_log = src_default()('firestartr:provisioner:github_membership');
312341
312729
  class github_membership_FirestartrGithubMembership extends Resource {
312342
312730
  static kind() {
312343
312731
  return 'FirestartrGithubMembership';
@@ -312345,22 +312733,23 @@ class github_membership_FirestartrGithubMembership extends Resource {
312345
312733
  async preprocess() {
312346
312734
  switch (this.get('operation')) {
312347
312735
  case 'CREATE':
312348
- github_membership_log('CREATE');
312736
+ provisioner_src_logger.debug('Creating FirestartrGithubMembership');
312349
312737
  break;
312350
312738
  case 'UPDATE':
312351
- github_membership_log('UPDATED');
312739
+ provisioner_src_logger.debug('Updating FirestartrGithubMembership');
312352
312740
  break;
312353
312741
  case 'DELETE':
312354
- github_membership_log('DELETED');
312742
+ provisioner_src_logger.debug('Deleted FirestartrGithubMembership');
312355
312743
  break;
312356
312744
  case 'IMPORT':
312357
- github_membership_log('IMPORT');
312745
+ provisioner_src_logger.debug('Importing FirestartrGithubMembership');
312358
312746
  break;
312359
312747
  case 'IMPORT_SKIP_PLAN':
312360
- github_membership_log('IMPORT_SKIP_PLAN');
312748
+ provisioner_src_logger.debug('Import skipping plan for FirestartrGithubMembership');
312361
312749
  break;
312362
312750
  default:
312363
- github_membership_log('UNKNOWN');
312751
+ provisioner_src_logger.debug('Unknown operation for FirestartrGithubMembership ' +
312752
+ this.get('operation'));
312364
312753
  }
312365
312754
  }
312366
312755
  }
@@ -312368,7 +312757,6 @@ class github_membership_FirestartrGithubMembership extends Resource {
312368
312757
  ;// CONCATENATED MODULE: ../provisioner/src/resources/github_group/index.ts
312369
312758
 
312370
312759
 
312371
- const github_group_log = src_default()('firestartr:provisioner:github_group');
312372
312760
  class github_group_FirestartrGithubGroup extends Resource {
312373
312761
  static kind() {
312374
312762
  return 'FirestartrGithubGroup';
@@ -312376,22 +312764,23 @@ class github_group_FirestartrGithubGroup extends Resource {
312376
312764
  async preprocess() {
312377
312765
  switch (this.get('operation')) {
312378
312766
  case 'CREATE':
312379
- github_group_log('CREATE');
312767
+ provisioner_src_logger.debug('Creating FirestartrGithubGroup');
312380
312768
  break;
312381
312769
  case 'UPDATE':
312382
- github_group_log('UPDATED');
312770
+ provisioner_src_logger.debug('Updating FirestartrGithubGroup');
312383
312771
  break;
312384
312772
  case 'DELETE':
312385
- github_group_log('DELETED');
312773
+ provisioner_src_logger.debug('Deleted FirestartrGithubGroup');
312386
312774
  break;
312387
312775
  case 'IMPORT':
312388
- github_group_log('IMPORT');
312776
+ provisioner_src_logger.debug('Importing FirestartrGithubGroup');
312389
312777
  break;
312390
312778
  case 'IMPORT_SKIP_PLAN':
312391
- github_group_log('IMPORT_SKIP_PLAN');
312779
+ provisioner_src_logger.debug('Import skipping plan for FirestartrGithubGroup');
312392
312780
  break;
312393
312781
  default:
312394
- github_group_log('UNKNOWN');
312782
+ provisioner_src_logger.debug('Unknown operation for FirestartrGithubGroup ' +
312783
+ this.get('operation'));
312395
312784
  }
312396
312785
  }
312397
312786
  }
@@ -312399,14 +312788,13 @@ class github_group_FirestartrGithubGroup extends Resource {
312399
312788
  ;// CONCATENATED MODULE: ../provisioner/src/resources/terraform_module/index.ts
312400
312789
 
312401
312790
 
312402
- const terraform_module_log = src_default()('firestartr:provisioner:terraform_module');
312403
312791
  class FirestartrTerraformModule extends Resource {
312404
312792
  static kind() {
312405
312793
  return 'FirestartrTerraformModule';
312406
312794
  }
312407
312795
  async preprocess() {
312408
312796
  const operation = this.get('operation');
312409
- terraform_module_log(operation);
312797
+ provisioner_src_logger.debug(`Running operation '${operation}' for FirestartrTerraformModule`);
312410
312798
  switch (operation) {
312411
312799
  case 'CREATE':
312412
312800
  break;
@@ -312427,7 +312815,6 @@ class FirestartrTerraformModule extends Resource {
312427
312815
  ;// CONCATENATED MODULE: ../provisioner/src/resources/github_orgWebhook/index.ts
312428
312816
 
312429
312817
 
312430
- const github_orgWebhook_log = src_default()('firestartr:provisioner:github_orgWebhook');
312431
312818
  class github_orgWebhook_FirestartrGithubOrgWebhook extends Resource {
312432
312819
  static kind() {
312433
312820
  return 'FirestartrGithubOrgWebhook';
@@ -312435,22 +312822,23 @@ class github_orgWebhook_FirestartrGithubOrgWebhook extends Resource {
312435
312822
  async preprocess() {
312436
312823
  switch (this.get('operation')) {
312437
312824
  case 'CREATE':
312438
- github_orgWebhook_log('CREATE');
312825
+ provisioner_src_logger.debug('Creating FirestartrGithubOrgWebhook');
312439
312826
  break;
312440
312827
  case 'UPDATE':
312441
- github_orgWebhook_log('UPDATED');
312828
+ provisioner_src_logger.debug('Updating FirestartrGithubOrgWebhook');
312442
312829
  break;
312443
312830
  case 'DELETE':
312444
- github_orgWebhook_log('DELETED');
312831
+ provisioner_src_logger.debug('Deleted FirestartrGithubOrgWebhook');
312445
312832
  break;
312446
312833
  case 'IMPORT':
312447
- github_orgWebhook_log('IMPORT');
312834
+ provisioner_src_logger.debug('Importing FirestartrGithubOrgWebhook');
312448
312835
  break;
312449
312836
  case 'IMPORT_SKIP_PLAN':
312450
- github_orgWebhook_log('IMPORT_SKIP_PLAN');
312837
+ provisioner_src_logger.debug('Import skipping plan for FirestartrGithubOrgWebhook');
312451
312838
  break;
312452
312839
  default:
312453
- github_orgWebhook_log('UNKNOWN');
312840
+ provisioner_src_logger.debug('Unknown operation for FirestartrGithubOrgWebhook ' +
312841
+ this.get('operation'));
312454
312842
  }
312455
312843
  }
312456
312844
  }
@@ -312487,6 +312875,12 @@ async function runProvisioner(data, opts) {
312487
312875
  ? 'DELETE'
312488
312876
  : 'UNKNOWN';
312489
312877
  const resource = createInstanceOf(mainCr, operation, deps);
312878
+ if ('logStreamCallbacksCDKTF' in opts) {
312879
+ resource.setSynthStreamLogs(opts['logStreamCallbacksCDKTF']);
312880
+ }
312881
+ if ('logStreamCallbacksTF' in opts) {
312882
+ resource.setTFStreamLogs(opts['logStreamCallbacksTF']);
312883
+ }
312490
312884
  await resource.run();
312491
312885
  return resource;
312492
312886
  }
@@ -312505,7 +312899,6 @@ function createInstanceOf(entity, op, deps) {
312505
312899
 
312506
312900
 
312507
312901
 
312508
- const provisioner_messageLog_0 = src_default()('firestartr:provisioner:main');
312509
312902
  async function deploy(app) {
312510
312903
  const entity = catalog_common.io.fromYaml(external_fs_.readFileSync(catalog_common.environment.getFromEnvironment(catalog_common.types.envVars.cdktfEntityPath), 'utf8'));
312511
312904
  const deps = catalog_common.io.fromYaml(external_fs_.readFileSync(catalog_common.environment.getFromEnvironment(catalog_common.types.envVars.cdktfDepsPath), 'utf8'));
@@ -312514,7 +312907,7 @@ async function deploy(app) {
312514
312907
  : false;
312515
312908
  catalog_common.io.removeFile(catalog_common.environment.getFromEnvironment(catalog_common.types.envVars.cdktfEntityPath));
312516
312909
  catalog_common.io.removeFile(catalog_common.environment.getFromEnvironment(catalog_common.types.envVars.cdktfDepsPath));
312517
- provisioner_messageLog_0('Entity to provision: %O', entity);
312910
+ provisioner_src_logger.info(`Entity to provision: ${entity}`);
312518
312911
  const orgConfig = {
312519
312912
  bucket: catalog_common.environment.getFromEnvironment(catalog_common.types.envVars.s3Bucket),
312520
312913
  dynamodbTable: catalog_common.environment.getFromEnvironment(catalog_common.types.envVars.s3Lock),
@@ -312532,7 +312925,7 @@ async function deploy(app) {
312532
312925
  app.synth();
312533
312926
  }
312534
312927
  catch (e) {
312535
- void provisioner_messageLog_0('Error: deploy: %s', e);
312928
+ provisioner_src_logger.error('Error: deploy: %s', e);
312536
312929
  throw e;
312537
312930
  }
312538
312931
  }
@@ -312554,26 +312947,20 @@ if (process.env.RUN_PROVISIONER) {
312554
312947
  async function tryPublishApply(item, planOutput, kind) {
312555
312948
  try {
312556
312949
  if (!('firestartr.dev/last-state-pr' in item.metadata.annotations)) {
312557
- src_logger.debug('USER_FEEDBACK_PUBLISH_APPLY_NO_LAST_STATE', {
312558
- metadata: { name: item.metadata.name, kind },
312559
- });
312950
+ operator_src_logger.debug(`The user feedback for the '${kind}/${item.metadata.name}' apply operation could not be published because the last state was not found.`);
312560
312951
  return;
312561
312952
  }
312562
312953
  await publishApply(item, planOutput, kind);
312563
312954
  }
312564
312955
  catch (e) {
312565
- src_logger.error('USER_FEEDBACK_PUBLISH_APPLY_ERROR', {
312566
- metadata: { name: item.metadata.name, kind, error: e },
312567
- });
312956
+ operator_src_logger.error(`The user feedback for the '${kind}/${item.metadata.name}' apply operation failed to publish due to an error: '${e}'.`);
312568
312957
  }
312569
312958
  }
312570
312959
  async function tryPublishDestroy(item, destroyOutput) {
312571
312960
  let lastPr = null;
312572
312961
  try {
312573
312962
  const { repo, org } = extractPrInfo(item);
312574
- src_logger.debug('USER_FEEDBACK_PUBLISH_DESTROY', {
312575
- metadata: { item, repo, org },
312576
- });
312963
+ operator_src_logger.debug(`The user feedback for the '${item.kind}/${item.metadata.name}' destroy operation is being published for repository '${repo}' in organization '${org}'.`);
312577
312964
  lastPr = await github_0.pulls.filterPrBy({
312578
312965
  title: `hydrate: ${item.metadata.name}`,
312579
312966
  state: 'closed',
@@ -312584,9 +312971,7 @@ async function tryPublishDestroy(item, destroyOutput) {
312584
312971
  maxRetries: 3,
312585
312972
  });
312586
312973
  if (!lastPr) {
312587
- src_logger.debug('USER_FEEDBACK_PUBLISH_DESTROY_NO_LAST_STATE', {
312588
- metadata: { item },
312589
- });
312974
+ operator_src_logger.debug(`The user feedback for the '${item.kind}/${item.metadata.name}' destroy operation could not be published because the last state was not found.`);
312590
312975
  return;
312591
312976
  }
312592
312977
  const dividedOutput = github_0.pulls.divideCommentIntoChunks(destroyOutput, 250);
@@ -312604,20 +312989,14 @@ async function tryPublishDestroy(item, destroyOutput) {
312604
312989
  ${commentContent}
312605
312990
  \`\`\`
312606
312991
  </details>`;
312607
- src_logger.debug('USER_FEEDBACK_PUBLISH_COMMENT', {
312608
- metadata: { lastPr: lastPr.number, repo, org, item },
312609
- });
312992
+ operator_src_logger.debug(`The user feedback for item '${item.kind}/${item.metadata.name}' is being published as a comment on pull request '${lastPr.number}' for repository '${repo}' in organization '${org}'.`);
312610
312993
  await github_0.pulls.commentInPR(comment, lastPr.number, repo, org);
312611
- src_logger.debug('USER_FEEDBACK_PUBLISH_DESTROY_COMMENT', {
312612
- metadata: { lastPr: lastPr.number, item },
312613
- });
312994
+ operator_src_logger.debug(`The user feedback for the '${item.kind}/${item.metadata.name}' destroy operation is being published as a comment on pull request '${lastPr.number}'.`);
312614
312995
  currentCommentNo += 1;
312615
312996
  }
312616
312997
  }
312617
312998
  catch (e) {
312618
- src_logger.error('USER_FEEDBACK_PUBLISH_ERROR', {
312619
- metadata: { lastPr: lastPr.number, item, error: e },
312620
- });
312999
+ operator_src_logger.error(`An error occurred while publishing user feedback for item '${item.kind}/${item.metadata.name}' on pull request '${lastPr.number}': '${e}'.`);
312621
313000
  }
312622
313001
  }
312623
313002
  async function publishApply(item, applyOutput, kind) {
@@ -312654,9 +313033,7 @@ function tryCreateErrorSummary(title, errorMsg) {
312654
313033
  return summaryText;
312655
313034
  }
312656
313035
  catch (e) {
312657
- src_logger.error('USER_FEEDBACK_GETTING_ERROR_SUMMARY', {
312658
- metadata: { error: e, title, errorMsg },
312659
- });
313036
+ operator_src_logger.error(`An error occurred while getting the error summary for '${title}'. The error was '${e}', with the message: '${errorMsg}'.`);
312660
313037
  return `Error when getting error summary: ${e}`;
312661
313038
  }
312662
313039
  }
@@ -312678,9 +313055,7 @@ async function tryPublishError(item, reason, message) {
312678
313055
  await publishError(item, reason, message);
312679
313056
  }
312680
313057
  catch (e) {
312681
- src_logger.error('USER_FEEDBACK_TRY_PUBLISH_ERROR', {
312682
- metadata: { item, error: e, reason },
312683
- });
313058
+ operator_src_logger.error(`The user feedback for item '${item.kind}/${item.metadata.name}' failed to publish due to an error: '${e}'. Reason: '${reason}'.`);
312684
313059
  }
312685
313060
  }
312686
313061
  async function publishError(item, reason, message) {
@@ -312718,6 +313093,53 @@ ${commentContent}
312718
313093
  }
312719
313094
  }
312720
313095
 
313096
+ ;// CONCATENATED MODULE: ../operator/src/user-feedback-ops/gh-checkrun.ts
313097
+
313098
+ async function GHCheckRun(cmd, item) {
313099
+ const prInfo = gh_checkrun_extractPrInfo(item);
313100
+ if (!prInfo.prNumber) {
313101
+ throw new Error('TFCheckRun: prNumber not retrievable');
313102
+ }
313103
+ const checkRun = await github_0.feedback.createCheckRun(prInfo.org, prInfo.repo, helperCreateCheckRunName(cmd, item), {
313104
+ //Number(pr_number),
313105
+ pullNumber: Number(prInfo.prNumber),
313106
+ includeCheckRunComment: true,
313107
+ checkRunComment: `The Github ${item.kind} is being processed (cmd=${cmd}). Details: `,
313108
+ });
313109
+ checkRun.mdOptionsDetails({
313110
+ quotes: 'terraform',
313111
+ });
313112
+ checkRun.update('Initiating', 'queued');
313113
+ return {
313114
+ fnData: (d) => {
313115
+ checkRun.update(d.toString(), 'in_progress');
313116
+ },
313117
+ fnEnd: () => {
313118
+ checkRun.close('OK', true);
313119
+ },
313120
+ fnOnError: (err) => {
313121
+ checkRun.close('KO', false);
313122
+ },
313123
+ };
313124
+ }
313125
+ function helperCreateCheckRunName(cmd, item) {
313126
+ return `Github Provisioner / ${item.kind} - ${cmd}`;
313127
+ }
313128
+ function gh_checkrun_extractPrInfo(item) {
313129
+ const prInfo = item.metadata.annotations['firestartr.dev/last-state-pr'];
313130
+ const prNumber = prInfo.split('#')[1];
313131
+ if (!prNumber)
313132
+ throw new Error('No PR number found in CR');
313133
+ const orgRepo = prInfo.split('#')[0];
313134
+ const org = orgRepo.split('/')[0];
313135
+ if (!org)
313136
+ throw new Error('No org found in CR');
313137
+ const repo = orgRepo.split('/')[1];
313138
+ if (!repo)
313139
+ throw new Error('No repo found in CR');
313140
+ return { prNumber, repo, org };
313141
+ }
313142
+
312721
313143
  ;// CONCATENATED MODULE: ../operator/cdktf.ts
312722
313144
 
312723
313145
 
@@ -312727,8 +313149,8 @@ ${commentContent}
312727
313149
 
312728
313150
 
312729
313151
 
312730
- const cdktf_log = src_default()('firestartr:operator:cdktf');
312731
313152
  function processOperation(item, op, handler) {
313153
+ operator_src_logger.info(`Processing operation ${op} on ${item.kind}/${item.metadata?.name}`);
312732
313154
  try {
312733
313155
  switch (op) {
312734
313156
  case OperationType.UPDATED:
@@ -312750,7 +313172,7 @@ function processOperation(item, op, handler) {
312750
313172
  }
312751
313173
  }
312752
313174
  catch (e) {
312753
- cdktf_log(`Operation ${op} failed: ${e}`);
313175
+ operator_src_logger.error(`Operation ${op} failed: ${e}`);
312754
313176
  throw e;
312755
313177
  }
312756
313178
  }
@@ -312801,6 +313223,9 @@ async function* sync(item, op, handler) {
312801
313223
  };
312802
313224
  }
312803
313225
  async function* markedToDeletion(item, op, handler) {
313226
+ // here we store the current callbacks that
313227
+ // are being used (synth|tf-apply...)
313228
+ let checkRunCtl;
312804
313229
  try {
312805
313230
  void cleanTerraformState();
312806
313231
  const type = 'DELETING';
@@ -312825,15 +313250,38 @@ async function* markedToDeletion(item, op, handler) {
312825
313250
  status: 'True',
312826
313251
  message: 'Destroying process started',
312827
313252
  };
312828
- if (item.metadata.annotations['firestartr.dev/last-state-pr'] || false) {
312829
- await addDestroyCommitStatus(item, 'pending', 'Performing destroy operation...', `Terraform Destroy ${item.metadata.name}`);
312830
- }
312831
313253
  const deps = await handler.resolveReferences();
313254
+ const annotation = 'firestartr.dev/last-state-pr';
313255
+ const statePr = item?.metadata?.annotations?.[annotation];
313256
+ const hasStatePr = typeof statePr === 'string' && statePr.trim().length > 0;
313257
+ if (!hasStatePr) {
313258
+ operator_src_logger.warn(`CR ${item?.kind ?? 'UnknownKind'}/${item?.metadata?.name ?? 'unknown'} ` +
313259
+ `has no "${annotation}" annotation; skipping GitHub Check Runs (synth, terraform apply).`);
313260
+ }
313261
+ else {
313262
+ operator_src_logger.debug(`CR ${item.kind}/${item.metadata.name} uses "${annotation}" = ${statePr}`);
313263
+ }
312832
313264
  const destroyOutput = await provisioner.runProvisioner({
312833
313265
  mainCr: item,
312834
313266
  deps,
312835
313267
  }, {
312836
313268
  delete: true,
313269
+ ...(hasStatePr
313270
+ ? {
313271
+ logStreamCallbacksCDKTF: {
313272
+ prepare: async () => {
313273
+ checkRunCtl = await GHCheckRun('synth', item);
313274
+ return checkRunCtl;
313275
+ },
313276
+ },
313277
+ logStreamCallbacksTF: {
313278
+ prepare: async () => {
313279
+ checkRunCtl = await GHCheckRun('terraform destroy', item);
313280
+ return checkRunCtl;
313281
+ },
313282
+ },
313283
+ }
313284
+ : {}),
312837
313285
  });
312838
313286
  const output = destroyOutput.output;
312839
313287
  await tryPublishDestroy(item, output);
@@ -312859,10 +313307,11 @@ async function* markedToDeletion(item, op, handler) {
312859
313307
  status: 'True',
312860
313308
  message: e.toString(),
312861
313309
  };
313310
+ // if there is a current checkRun working
313311
+ // we close it with an error
313312
+ if (checkRunCtl)
313313
+ checkRunCtl.fnOnError(e);
312862
313314
  await handler.writeTerraformOutputInTfResult(item, e);
312863
- if (item.metadata.annotations['firestartr.dev/last-state-pr'] || false) {
312864
- await addDestroyCommitStatus(item, 'failure', 'Destroy operation failed', `Terraform Destroy ${item.metadata.name}`);
312865
- }
312866
313315
  void handler.error();
312867
313316
  }
312868
313317
  }
@@ -312881,6 +313330,9 @@ async function* nothing(item, op, handler) {
312881
313330
  * @param handler -
312882
313331
  */
312883
313332
  async function* doApply(item, op, handler) {
313333
+ // here we store the current callbacks that
313334
+ // are being used (synth|tf-apply...)
313335
+ let checkRunCtl;
312884
313336
  try {
312885
313337
  cleanTerraformState();
312886
313338
  yield {
@@ -312922,16 +313374,41 @@ async function* doApply(item, op, handler) {
312922
313374
  opts['create'] = true;
312923
313375
  }
312924
313376
  const deps = await handler.resolveReferences();
312925
- cdktf_log('Item %s has the following dependencies: %O', item.metadata.name, deps);
312926
- if (item.metadata.annotations['firestartr.dev/last-state-pr'] || false) {
312927
- await addApplyCommitStatus(item, 'pending', catalog_common.generic.getPrLinkFromAnnotationValue(item.metadata.annotations['firestartr.dev/last-state-pr']), 'Performing apply operation...', `Terraform Apply ${item.metadata.name}`);
313377
+ operator_src_logger.info(`Item ${item.metadata.name} has the following dependencies: ${deps}`);
313378
+ const annotation = 'firestartr.dev/last-state-pr';
313379
+ const statePr = item?.metadata?.annotations?.[annotation];
313380
+ const hasStatePr = typeof statePr === 'string' && statePr.trim().length > 0;
313381
+ if (!hasStatePr) {
313382
+ operator_src_logger.warn(`CR ${item?.kind ?? 'UnknownKind'}/${item?.metadata?.name ?? 'unknown'} ` +
313383
+ `has no "${annotation}" annotation; skipping GitHub Check Runs (synth, terraform apply).`);
313384
+ }
313385
+ else {
313386
+ operator_src_logger.debug(`CR ${item.kind}/${item.metadata.name} uses "${annotation}" = ${statePr}`);
312928
313387
  }
312929
313388
  const applyOutput = await provisioner.runProvisioner({
312930
313389
  mainCr: item,
312931
313390
  deps,
312932
- }, opts);
313391
+ }, {
313392
+ ...opts,
313393
+ ...(hasStatePr
313394
+ ? {
313395
+ logStreamCallbacksCDKTF: {
313396
+ prepare: async () => {
313397
+ checkRunCtl = await GHCheckRun('synth', item);
313398
+ return checkRunCtl;
313399
+ },
313400
+ },
313401
+ logStreamCallbacksTF: {
313402
+ prepare: async () => {
313403
+ checkRunCtl = await GHCheckRun('terraform apply', item);
313404
+ return checkRunCtl;
313405
+ },
313406
+ },
313407
+ }
313408
+ : {}),
313409
+ });
312933
313410
  await tryPublishApply(item, applyOutput?.data?.output, item.kind);
312934
- const terraformOutputJson = await provisioner.runTerraform(item, ['output', '-json']);
313411
+ const terraformOutputJson = await provisioner.runTerraform(item, ['output', '-json'], null);
312935
313412
  if (!terraformOutputJson) {
312936
313413
  throw new Error(`Terraform output is empty for ${item.kind}/${item.metadata.name}`);
312937
313414
  }
@@ -312959,9 +313436,6 @@ async function* doApply(item, op, handler) {
312959
313436
  message: 'doApply',
312960
313437
  };
312961
313438
  await handler.writeTerraformOutputInTfResult(item, output);
312962
- if (item.metadata.annotations['firestartr.dev/last-state-pr'] || false) {
312963
- await addApplyCommitStatus(item, 'success', catalog_common.generic.getPrLinkFromAnnotationValue(item.metadata.annotations['firestartr.dev/last-state-pr']), 'Apply operation completed', `Terraform Apply ${item.metadata.name}`);
312964
- }
312965
313439
  handler.success();
312966
313440
  }
312967
313441
  catch (e) {
@@ -312973,7 +313447,11 @@ async function* doApply(item, op, handler) {
312973
313447
  error = e;
312974
313448
  }
312975
313449
  await tryPublishApply(item, error, item.kind);
312976
- cdktf_log('Error applying item %s: %O', item.metadata.name, error);
313450
+ // if there is a current checkRun working
313451
+ // we close it with an error
313452
+ if (checkRunCtl)
313453
+ checkRunCtl.fnOnError(error);
313454
+ operator_src_logger.error(`Error applying item ${item.metadata.name}: ${error}`);
312977
313455
  yield {
312978
313456
  item,
312979
313457
  reason: op,
@@ -312995,9 +313473,6 @@ async function* doApply(item, op, handler) {
312995
313473
  status: 'False',
312996
313474
  message: error.toString(),
312997
313475
  };
312998
- if (item.metadata.annotations['firestartr.dev/last-state-pr'] || false) {
312999
- await addApplyCommitStatus(item, 'failure', catalog_common.generic.getPrLinkFromAnnotationValue(item.metadata.annotations['firestartr.dev/last-state-pr']), 'Apply operation failed', `Terraform Apply ${item.metadata.name}`);
313000
- }
313001
313476
  handler.error();
313002
313477
  if (error) {
313003
313478
  await handler.writeTerraformOutputInTfResult(item, error);
@@ -313114,57 +313589,72 @@ class TFPlanItemVersion extends TFPlanItem {
313114
313589
  }
313115
313590
  }
313116
313591
 
313592
+ ;// CONCATENATED MODULE: ../terraform_provisioner/src/logger.ts
313593
+
313594
+ /* harmony default export */ const terraform_provisioner_src_logger = (catalog_common.logger);
313595
+
313117
313596
  ;// CONCATENATED MODULE: ../terraform_provisioner/src/utils.ts
313118
313597
 
313119
313598
 
313120
- //import Debug from "debug"
313121
313599
 
313122
- //const infolog: Debug.Debugger = Debug('firestartr:operator:cmd:terraform')
313600
+
313123
313601
  async function utils_validate(path, secrets) {
313124
313602
  return await tfExec(path, ['validate'], secrets);
313125
313603
  }
313126
- async function init(path, secrets) {
313127
- return await tfExec(path, ['init'], secrets);
313604
+ async function init(path, secrets, stream) {
313605
+ return await tfExec(path, ['init'], secrets, ['-input=false'], stream);
313128
313606
  }
313129
- async function initFromModule(path, source, secrets) {
313130
- return tfExec(path, ['init', `-from-module=${source}`], secrets, []);
313607
+ async function initFromModule(path, source, secrets, stream) {
313608
+ return tfExec(path, ['init', `-from-module=${source}`], secrets, [], stream);
313131
313609
  }
313132
- async function plan(path, secrets, format, args = ['plan']) {
313133
- const plan = await tfExec(path, args.concat(format === 'json' ? ['-json'] : []), secrets);
313610
+ async function plan(path, secrets, format, args = ['plan'], stream) {
313611
+ terraform_provisioner_src_logger.info(`Running terraform plan with ${format} in path ${path}`);
313612
+ const plan = await tfExec(path, args.concat(format === 'json' ? ['-json'] : []), secrets, ['-input=false'], stream);
313134
313613
  if (format === 'json') {
313135
313614
  const tfPlan = planGet(plan);
313136
313615
  return tfPlan;
313137
313616
  }
313138
313617
  return plan;
313139
313618
  }
313140
- async function apply(path, secrets) {
313141
- return await tfExec(path, ['apply', '-auto-approve'], secrets);
313619
+ async function apply(path, secrets, stream) {
313620
+ terraform_provisioner_src_logger.debug(`Running terraform apply in path ${path}`);
313621
+ return await tfExec(path, ['apply', '-auto-approve'], secrets, ['-input=false'], stream);
313142
313622
  }
313143
- async function destroy(path, secrets) {
313144
- return await tfExec(path, ['destroy', '-auto-approve'], secrets);
313623
+ async function destroy(path, secrets, stream) {
313624
+ terraform_provisioner_src_logger.debug(`Running terraform destroy in path ${path}`);
313625
+ return await tfExec(path, ['destroy', '-auto-approve'], secrets, ['-input=false'], stream);
313145
313626
  }
313146
313627
  async function output(path, secrets) {
313628
+ terraform_provisioner_src_logger.debug(`Running terraform output in path ${path}`);
313147
313629
  return await tfExec(path, ['output', '-json'], secrets, []);
313148
313630
  }
313149
- async function tfExec(path, args, secrets, extraArgs = ['-input=false']) {
313631
+ async function tfExec(path, args, secrets, extraArgs = ['-input=false'], stream) {
313150
313632
  // Format to TF_VAR variables -> https://developer.hashicorp.com/terraform/cli/config/environment-variables#tf_var_name
313151
313633
  for (const secret of secrets) {
313152
313634
  process.env[`${secret.key}`] = secret.value;
313153
313635
  }
313636
+ terraform_provisioner_src_logger.info(`Spawning terraform process ['terraform ${args.concat(extraArgs).join(' ')}'] in path '${path}'`);
313154
313637
  process.env['TF_PLUGIN_CACHE_DIR'] = '/home/terraform-plugins-cache';
313155
313638
  return new Promise((ok, ko) => {
313156
- const tfProcess = (0,external_child_process_.spawn)('terraform', args.concat(extraArgs), { cwd: path });
313157
- tfProcess.stdout.pipe(process.stdout);
313158
- tfProcess.stderr.pipe(process.stderr);
313639
+ const tfProcess = (0,external_child_process_.spawn)('terraform', args.concat(extraArgs), {
313640
+ cwd: path,
313641
+ stdio: ['inherit', 'pipe', 'pipe'],
313642
+ });
313159
313643
  let output = '';
313160
313644
  let flagStdoutEnd = false;
313161
313645
  let flagStderrEnd = false;
313162
313646
  let outputErrors = '';
313163
313647
  tfProcess.stdout.on('data', (log) => {
313164
- output += catalog_common.io.stripAnsi(log.toString());
313648
+ const line = catalog_common.io.stripAnsi(log.toString());
313649
+ output += line;
313650
+ if (stream)
313651
+ stream.write(line);
313165
313652
  });
313166
313653
  tfProcess.stderr.on('data', (log) => {
313167
- outputErrors += catalog_common.io.stripAnsi(log.toString());
313654
+ const line = catalog_common.io.stripAnsi(log.toString());
313655
+ outputErrors += line;
313656
+ if (stream)
313657
+ stream.write(line);
313168
313658
  });
313169
313659
  tfProcess.stdout.on('end', () => {
313170
313660
  flagStdoutEnd = true;
@@ -313179,9 +313669,12 @@ async function tfExec(path, args, secrets, extraArgs = ['-input=false']) {
313179
313669
  await catalog_common.generic.sleep(500);
313180
313670
  }
313181
313671
  if (code !== 0) {
313182
- ko(output + outputErrors);
313672
+ terraform_provisioner_src_logger.error(`Terraform output ${path}: ${output + outputErrors}`);
313673
+ terraform_provisioner_src_logger.error(`Terraform output ${path}: ${[output, outputErrors].join('')}`);
313674
+ ko([output, outputErrors].join(''));
313183
313675
  }
313184
313676
  else {
313677
+ terraform_provisioner_src_logger.info(`Terraform output ${path}: ${output}`);
313185
313678
  ok(output);
313186
313679
  }
313187
313680
  });
@@ -313195,7 +313688,9 @@ async function configureGit(ghToken) {
313195
313688
  'url."https://' + ghToken + '@github.com".insteadOf',
313196
313689
  'https://github.com',
313197
313690
  ];
313198
- const gitProcess = spawn('git', options);
313691
+ const gitProcess = spawn('git', options, {
313692
+ stdio: ['inherit', 'pipe', 'pipe'],
313693
+ });
313199
313694
  let output = '';
313200
313695
  gitProcess.on('data', (log) => {
313201
313696
  output += common.io.stripAnsi(log.toString());
@@ -313550,6 +314045,7 @@ function fCheckString(keys, refs) {
313550
314045
 
313551
314046
 
313552
314047
 
314048
+
313553
314049
  class project_tf_TFProjectManager {
313554
314050
  constructor(ctx) {
313555
314051
  this.tfOutput = '';
@@ -313558,6 +314054,14 @@ class project_tf_TFProjectManager {
313558
314054
  this.tfVarsJsonWriter = new WriterTfVarsJson(ctx.values, ctx.references);
313559
314055
  this.secrets = ctx.secrets;
313560
314056
  }
314057
+ setStreamCallbacks(fnData, fnEnd, reopen = true) {
314058
+ if (reopen || !this.stream)
314059
+ this.stream = new external_stream_.PassThrough();
314060
+ this.stream.on('data', (data) => {
314061
+ fnData(data.toString());
314062
+ });
314063
+ this.stream.on('end', fnEnd);
314064
+ }
313561
314065
  getOutput() {
313562
314066
  return this.tfOutput;
313563
314067
  }
@@ -313568,10 +314072,10 @@ class project_tf_TFProjectManager {
313568
314072
  this.tfVarsJsonWriter.writeToTerraformProject(external_path_.join(this.projectPath, 'terraform.tfvars.json'));
313569
314073
  }
313570
314074
  async __init() {
313571
- this.tfOutput += await init(this.projectPath, this.secrets);
314075
+ this.tfOutput += await init(this.projectPath, this.secrets, this.stream);
313572
314076
  }
313573
314077
  async __initFromModule() {
313574
- this.tfOutput += await init(this.projectPath, this.secrets);
314078
+ this.tfOutput += await init(this.projectPath, this.secrets, this.stream);
313575
314079
  }
313576
314080
  async validate() {
313577
314081
  await this.__init();
@@ -313581,24 +314085,27 @@ class project_tf_TFProjectManager {
313581
314085
  await this.__init();
313582
314086
  if (format === 'json')
313583
314087
  this.tfOutput = null;
313584
- this.tfOutput = await plan(this.projectPath, this.secrets, format);
314088
+ this.tfOutput = await plan(this.projectPath, this.secrets, format, ['plan'], this.stream);
314089
+ if (this.stream)
314090
+ this.stream.end();
313585
314091
  }
313586
314092
  async planDestroy(format) {
313587
314093
  await this.__init();
313588
314094
  if (format === 'json')
313589
314095
  this.tfOutput = null;
313590
- this.tfOutput = await plan(this.projectPath, this.secrets, format, [
313591
- 'plan',
313592
- '-destroy',
313593
- ]);
314096
+ this.tfOutput = await plan(this.projectPath, this.secrets, format, ['plan', '-destroy'], this.stream);
313594
314097
  }
313595
314098
  async apply() {
313596
314099
  await this.__init();
313597
- this.tfOutput += await apply(this.projectPath, this.secrets);
314100
+ this.tfOutput += await apply(this.projectPath, this.secrets, this.stream);
314101
+ if (this.stream)
314102
+ this.stream.end();
313598
314103
  }
313599
314104
  async destroy() {
313600
314105
  await this.__init();
313601
- this.tfOutput += await destroy(this.projectPath, this.secrets);
314106
+ this.tfOutput += await destroy(this.projectPath, this.secrets, this.stream);
314107
+ if (this.stream)
314108
+ this.stream.end();
313602
314109
  }
313603
314110
  async output() {
313604
314111
  await this.__init();
@@ -313692,6 +314199,7 @@ var lib_ajv_default = /*#__PURE__*/__nccwpck_require__.n(lib_ajv);
313692
314199
 
313693
314200
 
313694
314201
 
314202
+
313695
314203
  class TFProjectManagerRemote {
313696
314204
  constructor(ctx) {
313697
314205
  this.tfOutput = '';
@@ -313704,6 +314212,14 @@ class TFProjectManagerRemote {
313704
314212
  getOutput() {
313705
314213
  return this.tfOutput;
313706
314214
  }
314215
+ setStreamCallbacks(fnData, fnEnd, reopen = true) {
314216
+ if (reopen || !this.stream)
314217
+ this.stream = new external_stream_.PassThrough();
314218
+ this.stream.on('data', (data) => {
314219
+ fnData(data.toString());
314220
+ });
314221
+ this.stream.on('end', fnEnd);
314222
+ }
313707
314223
  async build() {
313708
314224
  external_fs_.rmSync(this.projectPath, { recursive: true, force: true });
313709
314225
  await this.__configGit();
@@ -313735,19 +314251,25 @@ insteadOf = https://github.com`);
313735
314251
  async plan(format) {
313736
314252
  await this.__init();
313737
314253
  if (format === 'json') {
313738
- this.tfOutput = await plan(this.projectPath, this.secrets, format);
314254
+ this.tfOutput = await plan(this.projectPath, this.secrets, format, ['plan'], this.stream);
313739
314255
  }
313740
314256
  else {
313741
- this.tfOutput += await plan(this.projectPath, this.secrets, format);
314257
+ this.tfOutput += await plan(this.projectPath, this.secrets, format, ['plan'], this.stream);
313742
314258
  }
314259
+ if (this.stream)
314260
+ this.stream.end();
313743
314261
  }
313744
314262
  async apply() {
313745
314263
  await this.__init();
313746
- this.tfOutput += await apply(this.projectPath, this.secrets);
314264
+ this.tfOutput += await apply(this.projectPath, this.secrets, this.stream);
314265
+ if (this.stream)
314266
+ this.stream.end();
313747
314267
  }
313748
314268
  async destroy() {
313749
314269
  await this.__init();
313750
- this.tfOutput += await destroy(this.projectPath, this.secrets);
314270
+ this.tfOutput += await destroy(this.projectPath, this.secrets, this.stream);
314271
+ if (this.stream)
314272
+ this.stream.end();
313751
314273
  }
313752
314274
  async planDestroy(format) {
313753
314275
  await this.__init();
@@ -313776,6 +314298,7 @@ insteadOf = https://github.com`);
313776
314298
 
313777
314299
 
313778
314300
 
314301
+
313779
314302
  const terraform_provisioner_ajv = new (lib_ajv_default())();
313780
314303
  const terraform_provisioner_validate = terraform_provisioner_ajv.compile(terraform_provisioner_src_schema);
313781
314304
  function validateContext(context) {
@@ -313793,7 +314316,8 @@ async function run() {
313793
314316
  await execCommand(command, tfProject);
313794
314317
  }
313795
314318
  // Programatic API
313796
- async function runTerraformProvisioner(context, command = 'init') {
314319
+ async function runTerraformProvisioner(context, command = 'init', streaming) {
314320
+ terraform_provisioner_src_logger.info(`Running command ${command} on a ${context.type} project`);
313797
314321
  validateContext(context);
313798
314322
  let tfProject = {};
313799
314323
  if (context.type === 'Inline') {
@@ -313802,10 +314326,14 @@ async function runTerraformProvisioner(context, command = 'init') {
313802
314326
  else if (context.type === 'Remote') {
313803
314327
  tfProject = new TFProjectManagerRemote(context);
313804
314328
  }
314329
+ if (streaming) {
314330
+ tfProject.setStreamCallbacks(streaming.fnData, streaming.fnEnd);
314331
+ }
313805
314332
  const output = await execCommand(command, tfProject);
313806
314333
  return output;
313807
314334
  }
313808
314335
  async function execCommand(command, tfProject) {
314336
+ terraform_provisioner_src_logger.info(`Executing command ${command} on ${tfProject.projectPath}`);
313809
314337
  await tfProject.build();
313810
314338
  switch (command) {
313811
314339
  case 'init':
@@ -313914,6 +314442,52 @@ async function* errorPolicyCompatibility(syncPolicy, generalPolicy, item, op) {
313914
314442
  await tryPublishError(item, op, message);
313915
314443
  }
313916
314444
 
314445
+ ;// CONCATENATED MODULE: ../operator/src/user-feedback-ops/tf-checkrun.ts
314446
+
314447
+ async function TFCheckRun(cmd, item) {
314448
+ const prInfo = tf_checkrun_extractPrInfo(item);
314449
+ if (!prInfo.prNumber) {
314450
+ throw new Error('TFCheckRun: prNumber not retrievable');
314451
+ }
314452
+ const checkRun = await github_0.feedback.createCheckRun(prInfo.org, prInfo.repo, tf_checkrun_helperCreateCheckRunName(cmd), {
314453
+ //Number(pr_number),
314454
+ pullNumber: Number(prInfo.prNumber),
314455
+ includeCheckRunComment: true,
314456
+ checkRunComment: `The TFWorkspace is being processed (cmd=${cmd}). Details: `,
314457
+ });
314458
+ checkRun.mdOptionsDetails({
314459
+ quotes: 'terraform',
314460
+ });
314461
+ checkRun.update('Initiating', 'queued');
314462
+ return {
314463
+ fnData: (d) => {
314464
+ checkRun.update(d.toString(), 'in_progress');
314465
+ },
314466
+ fnEnd: () => {
314467
+ checkRun.close('OK', true);
314468
+ },
314469
+ fnOnError: (err) => {
314470
+ checkRun.close('KO', false);
314471
+ },
314472
+ };
314473
+ }
314474
+ function tf_checkrun_helperCreateCheckRunName(cmd) {
314475
+ return `TFWorkspace - ${cmd}`;
314476
+ }
314477
+ function tf_checkrun_extractPrInfo(item) {
314478
+ const prInfo = item.metadata.annotations['firestartr.dev/last-state-pr'];
314479
+ const prNumber = prInfo.split('#')[1];
314480
+ if (!prNumber)
314481
+ throw new Error('No PR number found in CR');
314482
+ const org = prInfo.split('#')[0].split('/')[0];
314483
+ if (!org)
314484
+ throw new Error('No org found in CR');
314485
+ const repo = prInfo.split('#')[0].split('/')[1];
314486
+ if (!repo)
314487
+ throw new Error('No repo found in CR');
314488
+ return { prNumber, repo, org };
314489
+ }
314490
+
313917
314491
  ;// CONCATENATED MODULE: ../operator/src/tfworkspaces/process-operation.ts
313918
314492
 
313919
314493
 
@@ -313924,6 +314498,7 @@ async function* errorPolicyCompatibility(syncPolicy, generalPolicy, item, op) {
313924
314498
 
313925
314499
 
313926
314500
 
314501
+
313927
314502
  const TF_PROJECTS_PATH = '/tmp/tfworkspaces';
313928
314503
  function process_operation_processOperation(item, op, handler) {
313929
314504
  try {
@@ -313957,7 +314532,7 @@ function process_operation_processOperation(item, op, handler) {
313957
314532
  }
313958
314533
  }
313959
314534
  catch (e) {
313960
- src_logger.error('TERRAFORM_PROCESSOR_OP_ERROR', { metadata: { op, error: e } });
314535
+ operator_src_logger.error(`The Terraform processor encountered an error during operation '${op}': '${e}'.`);
313961
314536
  throw e;
313962
314537
  }
313963
314538
  }
@@ -313990,9 +314565,7 @@ async function* doPlanJSONFormat(item, op, handler) {
313990
314565
  message: 'Planning process started',
313991
314566
  };
313992
314567
  const deps = await handler.resolveReferences();
313993
- src_logger.info('TERRAFORM_PROCESSOR_PLAN_ASSESS_DEPS', {
313994
- metadata: { item, deps },
313995
- });
314568
+ operator_src_logger.info(`The Terraform processor is planning to assess dependencies for item '${item.kind}/${item.metadata.name}' with dependencies: '${deps}'.`);
313996
314569
  const context = buildProvisionerContext(item, deps);
313997
314570
  let planType = 'plan-json';
313998
314571
  if ('deletionTimestamp' in item.metadata) {
@@ -314055,9 +314628,7 @@ async function* doPlanJSONFormat(item, op, handler) {
314055
314628
  }
314056
314629
  catch (e) {
314057
314630
  console.error(e);
314058
- src_logger.error('TERRAFORM_PROCESSOR_PLAN_OBSERVE_ERROR', {
314059
- metadata: { item, error: e },
314060
- });
314631
+ operator_src_logger.error(`The Terraform processor encountered an error while observing the plan for item '${item.kind}/${item.metadata.name}': '${e}'.`);
314061
314632
  yield {
314062
314633
  item,
314063
314634
  reason: op,
@@ -314143,9 +314714,7 @@ async function* process_operation_sync(item, op, handler, syncPolicy, generalPol
314143
314714
  message: 'Sync process started',
314144
314715
  };
314145
314716
  if (!syncPolicy) {
314146
- src_logger.debug('TERRAFORM_PROCESSOR_NO_SYNC_POLICY_ONLY_OBSERVE', {
314147
- metadata: { op, item },
314148
- });
314717
+ operator_src_logger.debug(`The Terraform processor is only observing item '${item.kind}/${item.metadata.name}' because no sync policy was found for operation '${op}'.`);
314149
314718
  yield* doPlanJSONFormat(item, op, handler);
314150
314719
  return;
314151
314720
  }
@@ -314164,9 +314733,7 @@ async function* process_operation_sync(item, op, handler, syncPolicy, generalPol
314164
314733
  break;
314165
314734
  }
314166
314735
  default: {
314167
- src_logger.debug('TERRAFORM_PROCESSOR_POLICY_NOT_SUPPORTED', {
314168
- metadata: { syncPolicy, item },
314169
- });
314736
+ operator_src_logger.debug(`The Terraform processor detected a sync policy '${syncPolicy}' for item '${item.kind}/${item.metadata.name}' that is not supported.`);
314170
314737
  yield* doPlanJSONFormat(item, op, handler);
314171
314738
  break;
314172
314739
  }
@@ -314291,6 +314858,7 @@ async function* process_operation_nothing(item, op, handler) {
314291
314858
  * @param handler -
314292
314859
  */
314293
314860
  async function* process_operation_doApply(item, op, handler) {
314861
+ const checkRunCtl = await TFCheckRun('apply', item);
314294
314862
  try {
314295
314863
  yield {
314296
314864
  item,
@@ -314337,14 +314905,9 @@ async function* process_operation_doApply(item, op, handler) {
314337
314905
  message: 'Provisioning process started',
314338
314906
  };
314339
314907
  const deps = await handler.resolveReferences();
314340
- src_logger.info('TERRAFORM_PROCESSOR_APPLY_ASSESS_DEPS', {
314341
- metadata: { item, deps },
314342
- });
314908
+ operator_src_logger.info(`The Terraform processor is applying and assessing dependencies for item '${item.kind}/${item.metadata.name}' with dependencies: '${deps}'.`);
314343
314909
  const context = buildProvisionerContext(item, deps);
314344
- if (item.metadata.annotations['firestartr.dev/last-state-pr'] || false) {
314345
- await addApplyCommitStatus(item, 'pending', catalog_common.generic.getPrLinkFromAnnotationValue(item.metadata.annotations['firestartr.dev/last-state-pr']), 'Performing apply operation...', `Terraform Apply ${item.metadata.name}`);
314346
- }
314347
- const applyOutput = await runTerraformProvisioner(context, 'apply');
314910
+ const applyOutput = await runTerraformProvisioner(context, 'apply', checkRunCtl);
314348
314911
  await tryPublishApply(item, applyOutput, 'TFWorkspace');
314349
314912
  const terraformOutputJson = await runTerraformProvisioner(context, 'output');
314350
314913
  if (!terraformOutputJson) {
@@ -314376,17 +314939,13 @@ async function* process_operation_doApply(item, op, handler) {
314376
314939
  message: 'doApply',
314377
314940
  };
314378
314941
  await handler.writeTerraformOutputInTfResult(item, output);
314379
- if (item.metadata.annotations['firestartr.dev/last-state-pr'] || false) {
314380
- await addApplyCommitStatus(item, 'success', catalog_common.generic.getPrLinkFromAnnotationValue(item.metadata.annotations['firestartr.dev/last-state-pr']), 'Apply operation completed', `Terraform Apply ${item.metadata.name}`);
314381
- }
314382
314942
  handler.success();
314383
314943
  }
314384
314944
  catch (e) {
314945
+ checkRunCtl.fnOnError(e);
314385
314946
  console.error(e);
314386
314947
  await tryPublishApply(item, e, 'TFWorkspace');
314387
- src_logger.error('TERRAFORM_PROCESSOR_APPLY_ERROR', {
314388
- metadata: { item, op, error: e },
314389
- });
314948
+ operator_src_logger.error(`The Terraform processor encountered an error during operation '${op}' for item '${item.kind}/${item.metadata.name}': '${e}'.`);
314390
314949
  yield {
314391
314950
  item,
314392
314951
  reason: op,
@@ -314408,9 +314967,6 @@ async function* process_operation_doApply(item, op, handler) {
314408
314967
  status: 'False',
314409
314968
  message: JSON.stringify(e),
314410
314969
  };
314411
- if (item.metadata.annotations['firestartr.dev/last-state-pr'] || false) {
314412
- await addApplyCommitStatus(item, 'failure', catalog_common.generic.getPrLinkFromAnnotationValue(item.metadata.annotations['firestartr.dev/last-state-pr']), 'Apply operation failed', `Terraform Apply ${item.metadata.name}`);
314413
- }
314414
314970
  handler.error();
314415
314971
  if (e) {
314416
314972
  await handler.writeTerraformOutputInTfResult(item, e);
@@ -314715,30 +315271,22 @@ async function tryAcquireOrRenewLease(namespace, leaseDurationSeconds) {
314715
315271
  const name = 'firestartr-lease';
314716
315272
  const currentPod = await getCurrentPod(namespace);
314717
315273
  try {
314718
- src_logger.debug('LEADER_ELECTION_TRYING_ACQUIRE_LEASE', {
314719
- metadata: { name, namespace },
314720
- });
315274
+ operator_src_logger.debug(`Attempting to acquire the leader election lease for '${name}' in namespace '${namespace}'.`);
314721
315275
  const lease = await k8sApi.readNamespacedLease(name, namespace);
314722
315276
  const weAreTheLeader = lease.body.metadata.ownerReferences[0].uid === currentPod.metadata.uid;
314723
315277
  if (!weAreTheLeader) {
314724
- src_logger.debug('LEADER_ELECTION_LEASE_ACQUIRED_BY_ANOTHER_POD', {
314725
- metadata: { name, namespace },
314726
- });
315278
+ operator_src_logger.debug(`Another pod has acquired the leader election lease for '${name}' in namespace '${namespace}'.`);
314727
315279
  throw new LeaseAcquisitionError('Lease already acquired by another pod');
314728
315280
  }
314729
315281
  lease.body.spec.acquireTime = new client_node_dist.V1MicroTime();
314730
315282
  lease.body.spec.renewTime = new client_node_dist.V1MicroTime();
314731
315283
  lease.body.spec.leaseDurationSeconds = 30;
314732
- src_logger.debug('LEADER_ELECTION_LEASE_RENEWING', {
314733
- metadata: { name, namespace },
314734
- });
315284
+ operator_src_logger.debug(`Renewing the leader election lease for '${name}' in namespace '${namespace}'.`);
314735
315285
  await k8sApi.replaceNamespacedLease(name, namespace, lease.body);
314736
315286
  }
314737
315287
  catch (err) {
314738
315288
  if (err.response && err.response.statusCode === 404) {
314739
- src_logger.debug('LEADER_ELECTION_LEASE_NOT_FOUND_CREATING', {
314740
- metadata: { name, namespace },
314741
- });
315289
+ operator_src_logger.debug(`The leader election lease for '${name}' in namespace '${namespace}' was not found. Creating a new one.`);
314742
315290
  const lease = {
314743
315291
  apiVersion: 'coordination.k8s.io/v1',
314744
315292
  kind: 'Lease',
@@ -314761,16 +315309,12 @@ async function tryAcquireOrRenewLease(namespace, leaseDurationSeconds) {
314761
315309
  },
314762
315310
  };
314763
315311
  await k8sApi.createNamespacedLease(namespace, lease);
314764
- src_logger.debug('LEADER_ELECTION_LEASE_CREATED', {
314765
- metadata: { name, namespace },
314766
- });
315312
+ operator_src_logger.debug(`A new leader election lease has been created for '${name}' in namespace '${namespace}'.`);
314767
315313
  }
314768
315314
  else {
314769
315315
  if (err.response)
314770
315316
  console.log(err.response);
314771
- src_logger.debug('LEADER_ELECTION_LEASE_RENEWAL_ERROR', {
314772
- metadata: { name, namespace, error: err },
314773
- });
315317
+ operator_src_logger.debug(`An error occurred while renewing the leader election lease for '${name}' in namespace '${namespace}': '${err}'.`);
314774
315318
  throw err;
314775
315319
  }
314776
315320
  }
@@ -314781,9 +315325,7 @@ async function tryAcquireOrRenewLease(namespace, leaseDurationSeconds) {
314781
315325
  async function acquireLease(namespace, cb, interval = 10000) {
314782
315326
  try {
314783
315327
  await tryAcquireOrRenewLease(namespace, interval / 1000);
314784
- src_logger.debug('LEADER_ELECTION_LEASE_ACQUIRED_EXEC_CALLBACK', {
314785
- metadata: { namespace },
314786
- });
315328
+ operator_src_logger.debug(`Successfully acquired the leader election lease in namespace '${namespace}'. Executing the callback.`);
314787
315329
  cb();
314788
315330
  }
314789
315331
  catch (err) {
@@ -314791,9 +315333,7 @@ async function acquireLease(namespace, cb, interval = 10000) {
314791
315333
  if (err instanceof LeaseAcquisitionError) {
314792
315334
  console.error(`Failed to acquire Lease, retrying in ${interval / 1000} seconds`);
314793
315335
  }
314794
- src_logger.silly('LEADER_ELECTION_LEASE_ACQUIRED_FAILED_RETRY', {
314795
- metadata: { retryIn: interval / 1000 },
314796
- });
315336
+ operator_src_logger.silly(`Failed to acquire the leader election lease; will retry in '${interval / 1000}' seconds.`);
314797
315337
  await setTimeout(() => acquireLease(namespace, cb), interval);
314798
315338
  }
314799
315339
  }
@@ -314822,7 +315362,7 @@ function processOperationPlan(item, op, handler) {
314822
315362
  }
314823
315363
  }
314824
315364
  catch (e) {
314825
- src_logger.error('TFWORKSPACE_PROCESSOR_PLAN_ERROR', {
315365
+ operator_src_logger.error('TFWORKSPACE_PROCESSOR_PLAN_ERROR', {
314826
315366
  metadata: { item, error: e, op },
314827
315367
  });
314828
315368
  throw e;
@@ -314873,7 +315413,7 @@ async function* doPlanPlainTextFormat(item, op, handler, action) {
314873
315413
  message: 'Planning process started',
314874
315414
  };
314875
315415
  const deps = await handler.resolveReferences();
314876
- src_logger.info('TFWORKSPACE_PROCESSOR_PLAN_ASSESS_DEPS', {
315416
+ operator_src_logger.info('TFWORKSPACE_PROCESSOR_PLAN_ASSESS_DEPS', {
314877
315417
  metadata: { item, deps },
314878
315418
  });
314879
315419
  const context = processOperationPlan_buildProvisionerContext(item, deps);
@@ -314909,7 +315449,7 @@ async function* doPlanPlainTextFormat(item, op, handler, action) {
314909
315449
  }
314910
315450
  catch (e) {
314911
315451
  await processOperationPlan_publishPlan(item, JSON.stringify(e));
314912
- src_logger.error('TFWORKSPACE_PROCESSOR_PLAN_OBSERVING_ERROR', {
315452
+ operator_src_logger.error('TFWORKSPACE_PROCESSOR_PLAN_OBSERVING_ERROR', {
314913
315453
  metadata: { item, error: e },
314914
315454
  });
314915
315455
  yield {
@@ -314967,7 +315507,7 @@ async function* processOperationPlan_doPlanJSONFormat(item, op, handler, action)
314967
315507
  message: 'Planning process started',
314968
315508
  };
314969
315509
  const deps = await handler.resolveReferences();
314970
- src_logger.info('TFWORKSPACE_PROCESSOR_PLAN_DO_PLAN_ASSESS_DEPS', {
315510
+ operator_src_logger.info('TFWORKSPACE_PROCESSOR_PLAN_DO_PLAN_ASSESS_DEPS', {
314971
315511
  metadata: { item, deps },
314972
315512
  });
314973
315513
  const context = processOperationPlan_buildProvisionerContext(item, deps);
@@ -315027,7 +315567,7 @@ async function* processOperationPlan_doPlanJSONFormat(item, op, handler, action)
315027
315567
  }
315028
315568
  catch (e) {
315029
315569
  console.error(e);
315030
- src_logger.error('TFWORKSPACE_PROCESSOR_PLAN_DO_PLAN_ERROR', {
315570
+ operator_src_logger.error('TFWORKSPACE_PROCESSOR_PLAN_DO_PLAN_ERROR', {
315031
315571
  metadata: { item, error: e },
315032
315572
  });
315033
315573
  yield {
@@ -315394,42 +315934,30 @@ async function ctx_buildContext(claim, namespace, command) {
315394
315934
  let cr = null;
315395
315935
  let deps = null;
315396
315936
  compute['resolveDeps'] = async () => {
315397
- src_logger.debug('TFWORKSPACE_RESOLVE_DEPS_FOR_CLAIM', {
315398
- metadata: { name: claim.name },
315399
- });
315937
+ operator_src_logger.debug(`The Terraform workspace is resolving dependencies for the claim '${claim.name}'.`);
315400
315938
  // First, we bring the previous CR, if any, to get the tfStateKey
315401
- src_logger.debug('TFWORKSPACE_RESOLVE_GET_PREVIOUS_CR', {
315402
- metadata: { name: claim.name },
315403
- });
315939
+ operator_src_logger.debug(`The Terraform workspace is resolving and getting the previous custom resource for claim '${claim.name}'.`);
315404
315940
  previousCR = await getCRfromClaimRef(claim.kind, claim.name, namespace);
315405
315941
  let tfStateKey = null;
315406
315942
  if (previousCR) {
315407
- src_logger.debug('TFWORKSPACE_RESOLVE_PREVIOUS_CR_FOUND', {
315408
- metadata: { name: claim.name },
315409
- });
315943
+ operator_src_logger.debug(`The Terraform workspace found a previous custom resource for claim '${claim.name}'.`);
315410
315944
  tfStateKey = previousCR.spec.firestartr.tfStateKey;
315411
315945
  }
315412
315946
  else
315413
- src_logger.debug('TFWORKSPACE_RESOLVE_PREVIOUS_CR_NOT_FOUND', {
315414
- metadata: { name: claim.name },
315415
- });
315947
+ operator_src_logger.debug(`The Terraform workspace did not find a previous custom resource for claim '${claim.name}'.`);
315416
315948
  // Then we render the claim passing a function to resolve the refs in the k8s API
315417
- src_logger.debug('TFWORKSPACE_RESOLVE_START_RENDERING', {
315418
- metadata: { name: claim.name },
315419
- });
315949
+ operator_src_logger.debug(`The Terraform workspace is starting the rendering process for claim '${claim.name}'.`);
315420
315950
  cr = await cdk8s_renderer.renderTfWorkspace(claim, tfStateKey, getTFWorkspaceRefs, namespace);
315421
315951
  cr['metadata']['namespace'] = namespace;
315422
- src_logger.debug('TFWORKSPACE_RESOLVE_CR_RENDERED', { metadata: { cr } });
315952
+ operator_src_logger.debug(`The Terraform workspace has finished rendering the custom resource '${cr.kind}/${cr.metadata.name}' in namespace '${cr.metadata.namespace}'.`);
315423
315953
  // Finally, we resolve the deps in the rendered CR
315424
315954
  deps = await resolve(cr, getItemByItemPath, getSecret, namespace);
315425
- src_logger.debug('TFWORKSPACE_RESOLVE_DEPS_RESOLVED', {
315426
- metadata: { name: claim.name },
315427
- });
315955
+ operator_src_logger.debug(`The Terraform workspace has finished resolving all dependencies for claim '${claim.name}'.`);
315428
315956
  };
315429
315957
  compute['dryRunExec'] = async () => {
315430
315958
  // We assume that if there is no previous CR, we are creating a new one
315431
315959
  // This will be preceeded by the resolveDeps function
315432
- src_logger.debug('TFWORKSPACE_DRY_RUN_VALIDATING_CR', { metadata: { cr } });
315960
+ operator_src_logger.debug(`The Terraform workspace is dry-running the validation for custom resource '${cr.kind}/${cr.metadata.name}' in namespace '${cr.metadata.namespace}'.`);
315433
315961
  if (!previousCR) {
315434
315962
  await createDryRun(cr, namespace);
315435
315963
  }
@@ -315437,17 +315965,15 @@ async function ctx_buildContext(claim, namespace, command) {
315437
315965
  cr.metadata.resourceVersion = previousCR.metadata.resourceVersion;
315438
315966
  await updateDryRun(cr, namespace);
315439
315967
  }
315440
- src_logger.debug('TFWORKSPACE_DRY_RUN_VALIDATED_CR', { metadata: { cr } });
315968
+ operator_src_logger.debug(`The Terraform workspace has finished validating the custom resource '${cr.kind}/${cr.metadata.name}' in namespace '${cr.metadata.namespace}'.`);
315441
315969
  };
315442
315970
  compute['runProvision'] = async () => {
315443
- src_logger.debug('TFWORKSPACE_RUN_PROVISION_INIT_TERRAFORM', {
315971
+ operator_src_logger.debug('TFWORKSPACE_RUN_PROVISION_INIT_TERRAFORM', {
315444
315972
  metadata: { cr, command },
315445
315973
  });
315446
315974
  const data = await buildProvisionerContext(cr, deps);
315447
315975
  const result = await runTerraformProvisioner(data, command);
315448
- src_logger.debug('TFWORKSPACE_RUN_PROVISION_FINISHED_TERRAFORM', {
315449
- metadata: { cr, command },
315450
- });
315976
+ operator_src_logger.debug(`The Terraform workspace has finished the '${command}' command for provisioning custom resource '${cr.kind}/${cr.metadata.name}' in namespace '${cr.metadata.namespace}'.`);
315451
315977
  return result;
315452
315978
  };
315453
315979
  return new Ctx({}, compute);
@@ -315600,27 +316126,28 @@ var sdk_metrics_build_src = __nccwpck_require__(84016);
315600
316126
  ;// CONCATENATED MODULE: ../operator/src/metrics/CRStates.ts
315601
316127
 
315602
316128
 
316129
+
315603
316130
  const INTERVAL_IN_SEGS = 60;
315604
316131
  class CRStateMetrics {
315605
316132
  constructor(kind, namespace, meter) {
315606
316133
  this.kind = kind;
315607
- this.provisionedGauge = meter.createGauge(`firestartr_${this.kind}_provisioned_total`, {
315608
- description: `Total number of ${this.kind} in PROVISIONED state`,
316134
+ this.provisionedGauge = meter.createGauge('firestartr_provisioned_total', {
316135
+ description: 'Total number of CRs in PROVISIONED state',
315609
316136
  });
315610
- this.provisioningGauge = meter.createGauge(`firestartr_${this.kind}_provisioning_total`, {
315611
- description: `Total number of ${this.kind} in PROVISIONING state`,
316137
+ this.provisioningGauge = meter.createGauge('firestartr_provisioning_total', {
316138
+ description: 'Total number of CRs in PROVISIONING state',
315612
316139
  });
315613
- this.outOfSyncGauge = meter.createGauge(`firestartr_${this.kind}_out_of_sync_total`, {
315614
- description: `Total number of ${this.kind} in OUT_OF_SYNC state`,
316140
+ this.outOfSyncGauge = meter.createGauge('firestartr_out_of_sync_total', {
316141
+ description: 'Total number of CRs in OUT_OF_SYNC state',
315615
316142
  });
315616
- this.errorGauge = meter.createGauge(`firestartr_${this.kind}_error_total`, {
315617
- description: `Total number of ${this.kind} in ERROR state`,
316143
+ this.errorGauge = meter.createGauge('firestartr_error_total', {
316144
+ description: 'Total number of CRs in ERROR state',
315618
316145
  });
315619
- this.planningGauge = meter.createGauge(`firestartr_${this.kind}_planning_total`, {
315620
- description: `Total number of ${this.kind} in PLANNING state`,
316146
+ this.planningGauge = meter.createGauge('firestartr_planning_total', {
316147
+ description: 'Total number of CRs in PLANNING state',
315621
316148
  });
315622
- this.deletedGauge = meter.createGauge(`firestartr_${this.kind}_deleted_total`, {
315623
- description: `Total number of ${this.kind} in DELETED state`,
316149
+ this.deletedGauge = meter.createGauge('firestartr_deleted_total', {
316150
+ description: 'Total number of CRs in DELETED state',
315624
316151
  });
315625
316152
  this.namespace = namespace;
315626
316153
  }
@@ -315677,19 +316204,33 @@ class CRStateMetrics {
315677
316204
  }
315678
316205
  this.provisionedGauge.record(provisionedCount, {
315679
316206
  namespace: this.namespace,
316207
+ kind: this.kind,
315680
316208
  });
315681
316209
  this.provisioningGauge.record(provisioningCount, {
315682
316210
  namespace: this.namespace,
316211
+ kind: this.kind,
316212
+ });
316213
+ this.planningGauge.record(planningCount, {
316214
+ namespace: this.namespace,
316215
+ kind: this.kind,
316216
+ });
316217
+ this.deletedGauge.record(deletedCount, {
316218
+ namespace: this.namespace,
316219
+ kind: this.kind,
316220
+ });
316221
+ this.outOfSyncGauge.record(outOfSyncCount, {
316222
+ namespace: this.namespace,
316223
+ kind: this.kind,
316224
+ });
316225
+ this.errorGauge.record(errorCount, {
316226
+ namespace: this.namespace,
316227
+ kind: this.kind,
315683
316228
  });
315684
- this.planningGauge.record(planningCount, { namespace: this.namespace });
315685
- this.deletedGauge.record(deletedCount, { namespace: this.namespace });
315686
- this.outOfSyncGauge.record(outOfSyncCount, { namespace: this.namespace });
315687
- this.errorGauge.record(errorCount, { namespace: this.namespace });
315688
316229
  }
315689
316230
  catch (err) {
315690
- console.log(err);
316231
+ console.log(`CRStateMetrics: update ${err}`);
315691
316232
  this.onUpdate = false;
315692
- throw new Error(`CRStateMetrics: update ${err}`);
316233
+ operator_src_logger.error('CR_METRICS_UPDATE', { error: err });
315693
316234
  }
315694
316235
  this.onUpdate = false;
315695
316236
  }
@@ -315783,7 +316324,7 @@ async function startCRStates(meter, kindList, namespace) {
315783
316324
 
315784
316325
  const deploymentName = catalog_common.environment.getFromEnvironment(catalog_common.types.envVars.operatorDeploymentName) || 'firestartr-firestartr-controller';
315785
316326
  const DEFAULT_OPERATOR_DEPLOY = (/* unused pure expression or super */ null && (deploymentName));
315786
- async function tfPlanner(claimFilePath, claim, namespace, debug, jobTtl, cmd = 'plan') {
316327
+ async function tfPlanner(claimFilePath, claim, namespace, debug, jobTtl = 300, cmd = 'plan') {
315787
316328
  const { kc } = await getConnection();
315788
316329
  const k8sApi = kc.makeApiClient(client.AppsV1Api);
315789
316330
  const batchV1Api = kc.makeApiClient(client.BatchV1Api);
@@ -315803,10 +316344,12 @@ async function tfPlanner(claimFilePath, claim, namespace, debug, jobTtl, cmd = '
315803
316344
  ? '/library/scripts/run.sh'
315804
316345
  : '/library/run.sh';
315805
316346
  job.spec = new client.V1JobSpec();
315806
- if (jobTtl)
315807
- job.spec.ttlSecondsAfterFinished = jobTtl;
316347
+ job.spec.ttlSecondsAfterFinished = jobTtl;
315808
316348
  job.spec.template = controllerDeploy.body.spec
315809
316349
  .template;
316350
+ // set activeDeadlineSeconds to force terminate jobs that exceed this time
316351
+ // see https://kubernetes.io/docs/concepts/workloads/controllers/job/#job-termination-and-cleanup
316352
+ job.spec.activeDeadlineSeconds = 3600;
315810
316353
  job.spec.template.spec.containers[0].command = [
315811
316354
  'sh',
315812
316355
  '-c',
@@ -315818,9 +316361,12 @@ async function tfPlanner(claimFilePath, claim, namespace, debug, jobTtl, cmd = '
315818
316361
  }
315819
316362
  job.spec.template.spec.restartPolicy = 'Never';
315820
316363
  job.metadata = metadata;
316364
+ // we exclude logs to be sent to datadog
316365
+ job.spec.template.metadata.annotations = {
316366
+ 'ad.datadoghq.com/logs_exclude': 'true',
316367
+ };
315821
316368
  await batchV1Api.createNamespacedJob(namespace, job);
315822
316369
  await copyClaimAndGetLogs(namespace, job.metadata.name, claimFilePath);
315823
- await batchV1Api.deleteNamespacedJob(job.metadata.name, namespace);
315824
316370
  }
315825
316371
  async function copyClaimAndGetLogs(namespace, jobName, sourcePath) {
315826
316372
  const { kc } = await getConnection();
@@ -315974,7 +316520,7 @@ function runOperator(opts) {
315974
316520
  importModeActive = importMode;
315975
316521
  if (importModeSkipPlan)
315976
316522
  importModeSkipPlanActive = importModeSkipPlan;
315977
- src_logger.info('START_OPERATOR', { ...opts });
316523
+ operator_src_logger.info(`started the operator with options ${JSON.stringify(opts)}`);
315978
316524
  const run = ignoreLease
315979
316525
  ? (_namespace, cb) => cb()
315980
316526
  : acquireLease;
@@ -315994,7 +316540,7 @@ function runOperator(opts) {
315994
316540
  .catch((e) => {
315995
316541
  console.log('exit catch kind', kind);
315996
316542
  console.error(e);
315997
- src_logger.error('CRASHED', { kind, error: e });
316543
+ operator_src_logger.error('CRASHED', { kind, error: e });
315998
316544
  })
315999
316545
  .finally(() => {
316000
316546
  console.log('kind', kind);
@@ -316026,7 +316572,7 @@ function getProvisionImplementation(plural) {
316026
316572
  }
316027
316573
  if (!implementation)
316028
316574
  throw new Error(`No implementation found for ${plural}`);
316029
- src_logger.info('GOT_PROVISION_IMPL', { kind: plural });
316575
+ operator_src_logger.info(`Retrieved the provision implementation for the kind '${plural}'`);
316030
316576
  return implementation;
316031
316577
  }
316032
316578