@probelabs/visor 0.1.158 → 0.1.159-ee

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (81) hide show
  1. package/defaults/assistant.yaml +9 -0
  2. package/dist/defaults/assistant.yaml +9 -0
  3. package/dist/index.js +1938 -228
  4. package/dist/sdk/{check-provider-registry-IRHRMYUJ.mjs → check-provider-registry-A32RRIIF.mjs} +5 -5
  5. package/dist/sdk/{check-provider-registry-LVXLROAJ.mjs → check-provider-registry-CZLHVHZZ.mjs} +5 -5
  6. package/dist/sdk/{chunk-FRTHLKIG.mjs → chunk-A3B4LEGZ.mjs} +15 -15
  7. package/dist/sdk/{chunk-ILIWDV37.mjs → chunk-ITZWA2RF.mjs} +2 -2
  8. package/dist/sdk/{chunk-FRVHW725.mjs → chunk-QSY3XLA3.mjs} +3 -3
  9. package/dist/sdk/{chunk-ZWPQV2VT.mjs → chunk-RHAKGIBI.mjs} +16 -16
  10. package/dist/sdk/{chunk-ZWPQV2VT.mjs.map → chunk-RHAKGIBI.mjs.map} +1 -1
  11. package/dist/sdk/{chunk-GQ7H7E4Y.mjs → chunk-WYD5ISLQ.mjs} +2 -2
  12. package/dist/sdk/{chunk-GQ7H7E4Y.mjs.map → chunk-WYD5ISLQ.mjs.map} +1 -1
  13. package/dist/sdk/{failure-condition-evaluator-ORMMBR23.mjs → failure-condition-evaluator-5DJZEGYT.mjs} +3 -3
  14. package/dist/sdk/{github-frontend-NNWL7IXV.mjs → github-frontend-TJ6TGLUW.mjs} +3 -3
  15. package/dist/sdk/{host-MICUIU7J.mjs → host-KO3WVPVP.mjs} +2 -2
  16. package/dist/sdk/{host-YKTAWW33.mjs → host-VJWSCD7X.mjs} +2 -2
  17. package/dist/sdk/knex-store-CRORFJE6.mjs +527 -0
  18. package/dist/sdk/knex-store-CRORFJE6.mjs.map +1 -0
  19. package/dist/sdk/loader-NJCF7DUS.mjs +89 -0
  20. package/dist/sdk/loader-NJCF7DUS.mjs.map +1 -0
  21. package/dist/sdk/opa-policy-engine-S2S2ULEI.mjs +655 -0
  22. package/dist/sdk/opa-policy-engine-S2S2ULEI.mjs.map +1 -0
  23. package/dist/sdk/{routing-L224WQSY.mjs → routing-7JFYIXHU.mjs} +4 -4
  24. package/dist/sdk/{schedule-tool-6KIQPMWM.mjs → schedule-tool-RF3GRKMG.mjs} +5 -5
  25. package/dist/sdk/{schedule-tool-JNL6TFP4.mjs → schedule-tool-Y2R3YZL6.mjs} +5 -5
  26. package/dist/sdk/{schedule-tool-handler-4SG6HYLF.mjs → schedule-tool-handler-5FGL2VQ5.mjs} +5 -5
  27. package/dist/sdk/{schedule-tool-handler-IYK54BWK.mjs → schedule-tool-handler-JRSULLPY.mjs} +5 -5
  28. package/dist/sdk/sdk.js +1634 -260
  29. package/dist/sdk/sdk.js.map +1 -1
  30. package/dist/sdk/sdk.mjs +4 -4
  31. package/dist/sdk/{trace-helpers-AWTAWKRA.mjs → trace-helpers-VPGAR5ZR.mjs} +2 -2
  32. package/dist/sdk/validator-XTZJZZJH.mjs +134 -0
  33. package/dist/sdk/validator-XTZJZZJH.mjs.map +1 -0
  34. package/dist/sdk/{workflow-check-provider-MVDV4U7F.mjs → workflow-check-provider-23RPALNF.mjs} +5 -5
  35. package/dist/sdk/{workflow-check-provider-V5QHQPTG.mjs → workflow-check-provider-LWJTZ5AI.mjs} +5 -5
  36. package/package.json +2 -2
  37. package/dist/output/traces/run-2026-03-05T09-35-31-694Z.ndjson +0 -138
  38. package/dist/output/traces/run-2026-03-05T09-36-16-833Z.ndjson +0 -2197
  39. package/dist/sdk/check-provider-registry-2GTN7M37.mjs +0 -29
  40. package/dist/sdk/chunk-IJGA5TFA.mjs +0 -739
  41. package/dist/sdk/chunk-ILIWDV37.mjs.map +0 -1
  42. package/dist/sdk/chunk-QRTO7XNW.mjs +0 -43780
  43. package/dist/sdk/chunk-QRTO7XNW.mjs.map +0 -1
  44. package/dist/sdk/chunk-SPCGI24K.mjs +0 -1502
  45. package/dist/sdk/chunk-SPCGI24K.mjs.map +0 -1
  46. package/dist/sdk/chunk-TLXAIQLH.mjs +0 -443
  47. package/dist/sdk/chunk-TLXAIQLH.mjs.map +0 -1
  48. package/dist/sdk/failure-condition-evaluator-LYFZMQ6Y.mjs +0 -17
  49. package/dist/sdk/github-frontend-XKPAYXOT.mjs +0 -1368
  50. package/dist/sdk/github-frontend-XKPAYXOT.mjs.map +0 -1
  51. package/dist/sdk/routing-SVLHRQEW.mjs +0 -25
  52. package/dist/sdk/schedule-tool-526VUPMF.mjs +0 -35
  53. package/dist/sdk/schedule-tool-handler-IYK54BWK.mjs.map +0 -1
  54. package/dist/sdk/schedule-tool-handler-Z5VWUB76.mjs +0 -39
  55. package/dist/sdk/schedule-tool-handler-Z5VWUB76.mjs.map +0 -1
  56. package/dist/sdk/trace-helpers-AWTAWKRA.mjs.map +0 -1
  57. package/dist/sdk/trace-helpers-TOEA67GA.mjs +0 -25
  58. package/dist/sdk/trace-helpers-TOEA67GA.mjs.map +0 -1
  59. package/dist/sdk/workflow-check-provider-5G6BGZEX.mjs +0 -29
  60. package/dist/sdk/workflow-check-provider-5G6BGZEX.mjs.map +0 -1
  61. package/dist/sdk/workflow-check-provider-MVDV4U7F.mjs.map +0 -1
  62. package/dist/sdk/workflow-check-provider-V5QHQPTG.mjs.map +0 -1
  63. package/dist/traces/run-2026-03-05T09-35-31-694Z.ndjson +0 -138
  64. package/dist/traces/run-2026-03-05T09-36-16-833Z.ndjson +0 -2197
  65. /package/dist/sdk/{check-provider-registry-2GTN7M37.mjs.map → check-provider-registry-A32RRIIF.mjs.map} +0 -0
  66. /package/dist/sdk/{check-provider-registry-IRHRMYUJ.mjs.map → check-provider-registry-CZLHVHZZ.mjs.map} +0 -0
  67. /package/dist/sdk/{chunk-FRTHLKIG.mjs.map → chunk-A3B4LEGZ.mjs.map} +0 -0
  68. /package/dist/sdk/{chunk-IJGA5TFA.mjs.map → chunk-ITZWA2RF.mjs.map} +0 -0
  69. /package/dist/sdk/{chunk-FRVHW725.mjs.map → chunk-QSY3XLA3.mjs.map} +0 -0
  70. /package/dist/sdk/{check-provider-registry-LVXLROAJ.mjs.map → failure-condition-evaluator-5DJZEGYT.mjs.map} +0 -0
  71. /package/dist/sdk/{github-frontend-NNWL7IXV.mjs.map → github-frontend-TJ6TGLUW.mjs.map} +0 -0
  72. /package/dist/sdk/{host-MICUIU7J.mjs.map → host-KO3WVPVP.mjs.map} +0 -0
  73. /package/dist/sdk/{host-YKTAWW33.mjs.map → host-VJWSCD7X.mjs.map} +0 -0
  74. /package/dist/sdk/{failure-condition-evaluator-LYFZMQ6Y.mjs.map → routing-7JFYIXHU.mjs.map} +0 -0
  75. /package/dist/sdk/{failure-condition-evaluator-ORMMBR23.mjs.map → schedule-tool-RF3GRKMG.mjs.map} +0 -0
  76. /package/dist/sdk/{routing-L224WQSY.mjs.map → schedule-tool-Y2R3YZL6.mjs.map} +0 -0
  77. /package/dist/sdk/{routing-SVLHRQEW.mjs.map → schedule-tool-handler-5FGL2VQ5.mjs.map} +0 -0
  78. /package/dist/sdk/{schedule-tool-526VUPMF.mjs.map → schedule-tool-handler-JRSULLPY.mjs.map} +0 -0
  79. /package/dist/sdk/{schedule-tool-6KIQPMWM.mjs.map → trace-helpers-VPGAR5ZR.mjs.map} +0 -0
  80. /package/dist/sdk/{schedule-tool-JNL6TFP4.mjs.map → workflow-check-provider-23RPALNF.mjs.map} +0 -0
  81. /package/dist/sdk/{schedule-tool-handler-4SG6HYLF.mjs.map → workflow-check-provider-LWJTZ5AI.mjs.map} +0 -0
package/dist/index.js CHANGED
@@ -1,8 +1,8 @@
1
1
  #!/usr/bin/env node
2
- process.env.VISOR_VERSION = '0.1.158';
3
- process.env.PROBE_VERSION = '0.6.0-rc271';
4
- process.env.VISOR_COMMIT_SHA = '1e3454d9f5650edec0c5c1cb982653fb1e3ae7c5';
5
- process.env.VISOR_COMMIT_SHORT = '1e3454d9';
2
+ process.env.VISOR_VERSION = '0.1.159';
3
+ process.env.PROBE_VERSION = '0.6.0-rc274';
4
+ process.env.VISOR_COMMIT_SHA = '345101301086b988952a5425033fc0ca9cbaafe1';
5
+ process.env.VISOR_COMMIT_SHORT = '3451013';
6
6
  /******/ (() => { // webpackBootstrap
7
7
  /******/ var __webpack_modules__ = ({
8
8
 
@@ -161210,7 +161210,7 @@ async function handleDumpPolicyInput(checkId, argv) {
161210
161210
  let PolicyInputBuilder;
161211
161211
  try {
161212
161212
  // @ts-ignore — enterprise/ may not exist in OSS builds (caught at runtime)
161213
- const mod = await Promise.resolve().then(() => __importStar(__nccwpck_require__(71370)));
161213
+ const mod = await Promise.resolve().then(() => __importStar(__nccwpck_require__(17117)));
161214
161214
  PolicyInputBuilder = mod.PolicyInputBuilder;
161215
161215
  }
161216
161216
  catch {
@@ -167097,6 +167097,1810 @@ class DependencyResolver {
167097
167097
  exports.DependencyResolver = DependencyResolver;
167098
167098
 
167099
167099
 
167100
+ /***/ }),
167101
+
167102
+ /***/ 50069:
167103
+ /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
167104
+
167105
+ "use strict";
167106
+
167107
+ /**
167108
+ * Copyright (c) ProbeLabs. All rights reserved.
167109
+ * Licensed under the Elastic License 2.0; you may not use this file except
167110
+ * in compliance with the Elastic License 2.0.
167111
+ */
167112
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
167113
+ if (k2 === undefined) k2 = k;
167114
+ var desc = Object.getOwnPropertyDescriptor(m, k);
167115
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
167116
+ desc = { enumerable: true, get: function() { return m[k]; } };
167117
+ }
167118
+ Object.defineProperty(o, k2, desc);
167119
+ }) : (function(o, m, k, k2) {
167120
+ if (k2 === undefined) k2 = k;
167121
+ o[k2] = m[k];
167122
+ }));
167123
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
167124
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
167125
+ }) : function(o, v) {
167126
+ o["default"] = v;
167127
+ });
167128
+ var __importStar = (this && this.__importStar) || (function () {
167129
+ var ownKeys = function(o) {
167130
+ ownKeys = Object.getOwnPropertyNames || function (o) {
167131
+ var ar = [];
167132
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
167133
+ return ar;
167134
+ };
167135
+ return ownKeys(o);
167136
+ };
167137
+ return function (mod) {
167138
+ if (mod && mod.__esModule) return mod;
167139
+ var result = {};
167140
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
167141
+ __setModuleDefault(result, mod);
167142
+ return result;
167143
+ };
167144
+ })();
167145
+ Object.defineProperty(exports, "__esModule", ({ value: true }));
167146
+ exports.LicenseValidator = void 0;
167147
+ const crypto = __importStar(__nccwpck_require__(76982));
167148
+ const fs = __importStar(__nccwpck_require__(79896));
167149
+ const path = __importStar(__nccwpck_require__(16928));
167150
+ class LicenseValidator {
167151
+ /** Ed25519 public key for license verification (PEM format). */
167152
+ static PUBLIC_KEY = '-----BEGIN PUBLIC KEY-----\n' +
167153
+ 'MCowBQYDK2VwAyEAI/Zd08EFmgIdrDm/HXd0l3/5GBt7R1PrdvhdmEXhJlU=\n' +
167154
+ '-----END PUBLIC KEY-----\n';
167155
+ cache = null;
167156
+ static CACHE_TTL = 5 * 60 * 1000; // 5 minutes
167157
+ static GRACE_PERIOD = 72 * 3600 * 1000; // 72 hours after expiry
167158
+ /**
167159
+ * Load and validate license from environment or file.
167160
+ *
167161
+ * Resolution order:
167162
+ * 1. VISOR_LICENSE env var (JWT string)
167163
+ * 2. VISOR_LICENSE_FILE env var (path to file)
167164
+ * 3. .visor-license in project root (cwd)
167165
+ * 4. .visor-license in ~/.config/visor/
167166
+ */
167167
+ async loadAndValidate() {
167168
+ // Return cached result if still fresh
167169
+ if (this.cache && Date.now() - this.cache.validatedAt < LicenseValidator.CACHE_TTL) {
167170
+ return this.cache.payload;
167171
+ }
167172
+ const token = this.resolveToken();
167173
+ if (!token)
167174
+ return null;
167175
+ const payload = this.verifyAndDecode(token);
167176
+ if (!payload)
167177
+ return null;
167178
+ this.cache = { payload, validatedAt: Date.now() };
167179
+ return payload;
167180
+ }
167181
+ /** Check if a specific feature is licensed */
167182
+ hasFeature(feature) {
167183
+ if (!this.cache)
167184
+ return false;
167185
+ return this.cache.payload.features.includes(feature);
167186
+ }
167187
+ /** Check if license is valid (with grace period) */
167188
+ isValid() {
167189
+ if (!this.cache)
167190
+ return false;
167191
+ const now = Date.now();
167192
+ const expiryMs = this.cache.payload.exp * 1000;
167193
+ return now < expiryMs + LicenseValidator.GRACE_PERIOD;
167194
+ }
167195
+ /** Check if the license is within its grace period (expired but still valid) */
167196
+ isInGracePeriod() {
167197
+ if (!this.cache)
167198
+ return false;
167199
+ const now = Date.now();
167200
+ const expiryMs = this.cache.payload.exp * 1000;
167201
+ return now >= expiryMs && now < expiryMs + LicenseValidator.GRACE_PERIOD;
167202
+ }
167203
+ resolveToken() {
167204
+ // 1. Direct env var
167205
+ if (process.env.VISOR_LICENSE) {
167206
+ return process.env.VISOR_LICENSE.trim();
167207
+ }
167208
+ // 2. File path from env (validate against path traversal)
167209
+ if (process.env.VISOR_LICENSE_FILE) {
167210
+ // path.resolve() produces an absolute path with all '..' segments resolved,
167211
+ // so a separate resolved.includes('..') check is unnecessary.
167212
+ const resolved = path.resolve(process.env.VISOR_LICENSE_FILE);
167213
+ const home = process.env.HOME || process.env.USERPROFILE || '';
167214
+ const allowedPrefixes = [path.normalize(process.cwd())];
167215
+ if (home)
167216
+ allowedPrefixes.push(path.normalize(path.join(home, '.config', 'visor')));
167217
+ // Resolve symlinks so an attacker cannot create a symlink inside an
167218
+ // allowed prefix that points to an arbitrary file outside it.
167219
+ let realPath;
167220
+ try {
167221
+ realPath = fs.realpathSync(resolved);
167222
+ }
167223
+ catch {
167224
+ return null; // File doesn't exist or isn't accessible
167225
+ }
167226
+ const isSafe = allowedPrefixes.some(prefix => realPath === prefix || realPath.startsWith(prefix + path.sep));
167227
+ if (!isSafe)
167228
+ return null;
167229
+ return this.readFile(realPath);
167230
+ }
167231
+ // 3. .visor-license in cwd
167232
+ const cwdPath = path.join(process.cwd(), '.visor-license');
167233
+ const cwdToken = this.readFile(cwdPath);
167234
+ if (cwdToken)
167235
+ return cwdToken;
167236
+ // 4. ~/.config/visor/.visor-license
167237
+ const home = process.env.HOME || process.env.USERPROFILE || '';
167238
+ if (home) {
167239
+ const configPath = path.join(home, '.config', 'visor', '.visor-license');
167240
+ const configToken = this.readFile(configPath);
167241
+ if (configToken)
167242
+ return configToken;
167243
+ }
167244
+ return null;
167245
+ }
167246
+ readFile(filePath) {
167247
+ try {
167248
+ return fs.readFileSync(filePath, 'utf-8').trim();
167249
+ }
167250
+ catch {
167251
+ return null;
167252
+ }
167253
+ }
167254
+ verifyAndDecode(token) {
167255
+ try {
167256
+ const parts = token.split('.');
167257
+ if (parts.length !== 3)
167258
+ return null;
167259
+ const [headerB64, payloadB64, signatureB64] = parts;
167260
+ // Decode header to verify algorithm
167261
+ const header = JSON.parse(Buffer.from(headerB64, 'base64url').toString());
167262
+ if (header.alg !== 'EdDSA')
167263
+ return null;
167264
+ // Verify signature
167265
+ const data = `${headerB64}.${payloadB64}`;
167266
+ const signature = Buffer.from(signatureB64, 'base64url');
167267
+ const publicKey = crypto.createPublicKey(LicenseValidator.PUBLIC_KEY);
167268
+ // Validate that the loaded public key is actually Ed25519 (OID 1.3.101.112).
167269
+ // This prevents algorithm-confusion attacks if the embedded key were ever
167270
+ // swapped to a different type.
167271
+ if (publicKey.asymmetricKeyType !== 'ed25519') {
167272
+ return null;
167273
+ }
167274
+ // Ed25519 verification: algorithm must be null because EdDSA performs its
167275
+ // own internal hashing (SHA-512) — passing a digest algorithm here would
167276
+ // cause Node.js to throw. The key type is validated above.
167277
+ const isValid = crypto.verify(null, Buffer.from(data), publicKey, signature);
167278
+ if (!isValid)
167279
+ return null;
167280
+ // Decode payload
167281
+ const payload = JSON.parse(Buffer.from(payloadB64, 'base64url').toString());
167282
+ // Validate required fields
167283
+ if (!payload.org ||
167284
+ !Array.isArray(payload.features) ||
167285
+ typeof payload.exp !== 'number' ||
167286
+ typeof payload.iat !== 'number' ||
167287
+ !payload.sub) {
167288
+ return null;
167289
+ }
167290
+ // Check expiry (with grace period)
167291
+ const now = Date.now();
167292
+ const expiryMs = payload.exp * 1000;
167293
+ if (now >= expiryMs + LicenseValidator.GRACE_PERIOD) {
167294
+ return null;
167295
+ }
167296
+ return payload;
167297
+ }
167298
+ catch {
167299
+ return null;
167300
+ }
167301
+ }
167302
+ }
167303
+ exports.LicenseValidator = LicenseValidator;
167304
+
167305
+
167306
+ /***/ }),
167307
+
167308
+ /***/ 87068:
167309
+ /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
167310
+
167311
+ "use strict";
167312
+
167313
+ /**
167314
+ * Copyright (c) ProbeLabs. All rights reserved.
167315
+ * Licensed under the Elastic License 2.0; you may not use this file except
167316
+ * in compliance with the Elastic License 2.0.
167317
+ */
167318
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
167319
+ if (k2 === undefined) k2 = k;
167320
+ var desc = Object.getOwnPropertyDescriptor(m, k);
167321
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
167322
+ desc = { enumerable: true, get: function() { return m[k]; } };
167323
+ }
167324
+ Object.defineProperty(o, k2, desc);
167325
+ }) : (function(o, m, k, k2) {
167326
+ if (k2 === undefined) k2 = k;
167327
+ o[k2] = m[k];
167328
+ }));
167329
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
167330
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
167331
+ }) : function(o, v) {
167332
+ o["default"] = v;
167333
+ });
167334
+ var __importStar = (this && this.__importStar) || (function () {
167335
+ var ownKeys = function(o) {
167336
+ ownKeys = Object.getOwnPropertyNames || function (o) {
167337
+ var ar = [];
167338
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
167339
+ return ar;
167340
+ };
167341
+ return ownKeys(o);
167342
+ };
167343
+ return function (mod) {
167344
+ if (mod && mod.__esModule) return mod;
167345
+ var result = {};
167346
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
167347
+ __setModuleDefault(result, mod);
167348
+ return result;
167349
+ };
167350
+ })();
167351
+ Object.defineProperty(exports, "__esModule", ({ value: true }));
167352
+ exports.loadEnterprisePolicyEngine = loadEnterprisePolicyEngine;
167353
+ exports.loadEnterpriseStoreBackend = loadEnterpriseStoreBackend;
167354
+ const default_engine_1 = __nccwpck_require__(93866);
167355
+ /**
167356
+ * Load the enterprise policy engine if licensed, otherwise return the default no-op engine.
167357
+ *
167358
+ * This is the sole import boundary between OSS and enterprise code. Core code
167359
+ * must only import from this module (via dynamic `await import()`), never from
167360
+ * individual enterprise submodules.
167361
+ */
167362
+ async function loadEnterprisePolicyEngine(config) {
167363
+ try {
167364
+ const { LicenseValidator } = await Promise.resolve().then(() => __importStar(__nccwpck_require__(50069)));
167365
+ const validator = new LicenseValidator();
167366
+ const license = await validator.loadAndValidate();
167367
+ if (!license || !validator.hasFeature('policy')) {
167368
+ return new default_engine_1.DefaultPolicyEngine();
167369
+ }
167370
+ if (validator.isInGracePeriod()) {
167371
+ // eslint-disable-next-line no-console
167372
+ console.warn('[visor:enterprise] License has expired but is within the 72-hour grace period. ' +
167373
+ 'Please renew your license.');
167374
+ }
167375
+ const { OpaPolicyEngine } = await Promise.resolve().then(() => __importStar(__nccwpck_require__(39530)));
167376
+ const engine = new OpaPolicyEngine(config);
167377
+ await engine.initialize(config);
167378
+ return engine;
167379
+ }
167380
+ catch (err) {
167381
+ // Enterprise code not available or initialization failed
167382
+ const msg = err instanceof Error ? err.message : String(err);
167383
+ try {
167384
+ const { logger } = __nccwpck_require__(86999);
167385
+ logger.warn(`[PolicyEngine] Enterprise policy init failed, falling back to default: ${msg}`);
167386
+ }
167387
+ catch {
167388
+ // silent
167389
+ }
167390
+ return new default_engine_1.DefaultPolicyEngine();
167391
+ }
167392
+ }
167393
+ /**
167394
+ * Load the enterprise schedule store backend if licensed.
167395
+ *
167396
+ * @param driver Database driver ('postgresql', 'mysql', or 'mssql')
167397
+ * @param storageConfig Storage configuration with connection details
167398
+ * @param haConfig Optional HA configuration
167399
+ * @throws Error if enterprise license is not available or missing 'scheduler-sql' feature
167400
+ */
167401
+ async function loadEnterpriseStoreBackend(driver, storageConfig, haConfig) {
167402
+ const { LicenseValidator } = await Promise.resolve().then(() => __importStar(__nccwpck_require__(50069)));
167403
+ const validator = new LicenseValidator();
167404
+ const license = await validator.loadAndValidate();
167405
+ if (!license || !validator.hasFeature('scheduler-sql')) {
167406
+ throw new Error(`The ${driver} schedule storage driver requires a Visor Enterprise license ` +
167407
+ `with the 'scheduler-sql' feature. Please upgrade or use driver: 'sqlite' (default).`);
167408
+ }
167409
+ if (validator.isInGracePeriod()) {
167410
+ // eslint-disable-next-line no-console
167411
+ console.warn('[visor:enterprise] License has expired but is within the 72-hour grace period. ' +
167412
+ 'Please renew your license.');
167413
+ }
167414
+ const { KnexStoreBackend } = await Promise.resolve().then(() => __importStar(__nccwpck_require__(63737)));
167415
+ return new KnexStoreBackend(driver, storageConfig, haConfig);
167416
+ }
167417
+
167418
+
167419
+ /***/ }),
167420
+
167421
+ /***/ 628:
167422
+ /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
167423
+
167424
+ "use strict";
167425
+
167426
+ /**
167427
+ * Copyright (c) ProbeLabs. All rights reserved.
167428
+ * Licensed under the Elastic License 2.0; you may not use this file except
167429
+ * in compliance with the Elastic License 2.0.
167430
+ */
167431
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
167432
+ if (k2 === undefined) k2 = k;
167433
+ var desc = Object.getOwnPropertyDescriptor(m, k);
167434
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
167435
+ desc = { enumerable: true, get: function() { return m[k]; } };
167436
+ }
167437
+ Object.defineProperty(o, k2, desc);
167438
+ }) : (function(o, m, k, k2) {
167439
+ if (k2 === undefined) k2 = k;
167440
+ o[k2] = m[k];
167441
+ }));
167442
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
167443
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
167444
+ }) : function(o, v) {
167445
+ o["default"] = v;
167446
+ });
167447
+ var __importStar = (this && this.__importStar) || (function () {
167448
+ var ownKeys = function(o) {
167449
+ ownKeys = Object.getOwnPropertyNames || function (o) {
167450
+ var ar = [];
167451
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
167452
+ return ar;
167453
+ };
167454
+ return ownKeys(o);
167455
+ };
167456
+ return function (mod) {
167457
+ if (mod && mod.__esModule) return mod;
167458
+ var result = {};
167459
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
167460
+ __setModuleDefault(result, mod);
167461
+ return result;
167462
+ };
167463
+ })();
167464
+ Object.defineProperty(exports, "__esModule", ({ value: true }));
167465
+ exports.OpaCompiler = void 0;
167466
+ const fs = __importStar(__nccwpck_require__(79896));
167467
+ const path = __importStar(__nccwpck_require__(16928));
167468
+ const os = __importStar(__nccwpck_require__(70857));
167469
+ const crypto = __importStar(__nccwpck_require__(76982));
167470
+ const child_process_1 = __nccwpck_require__(35317);
167471
+ /**
167472
+ * OPA Rego Compiler - compiles .rego policy files to WASM bundles using the `opa` CLI.
167473
+ *
167474
+ * Handles:
167475
+ * - Resolving input paths to WASM bytes (direct .wasm, directory with policy.wasm, or .rego files)
167476
+ * - Compiling .rego files to WASM via `opa build`
167477
+ * - Caching compiled bundles based on content hashes
167478
+ * - Extracting policy.wasm from OPA tar.gz bundles
167479
+ *
167480
+ * Requires:
167481
+ * - `opa` CLI on PATH (only when auto-compiling .rego files)
167482
+ */
167483
+ class OpaCompiler {
167484
+ static CACHE_DIR = path.join(os.tmpdir(), 'visor-opa-cache');
167485
+ /**
167486
+ * Resolve the input paths to WASM bytes.
167487
+ *
167488
+ * Strategy:
167489
+ * 1. If any path is a .wasm file, read it directly
167490
+ * 2. If a directory contains policy.wasm, read it
167491
+ * 3. Otherwise, collect all .rego files and auto-compile via `opa build`
167492
+ */
167493
+ async resolveWasmBytes(paths) {
167494
+ // Collect .rego files and check for existing .wasm
167495
+ const regoFiles = [];
167496
+ for (const p of paths) {
167497
+ const resolved = path.resolve(p);
167498
+ // Reject paths containing '..' after resolution (path traversal)
167499
+ if (path.normalize(resolved).includes('..')) {
167500
+ throw new Error(`Policy path contains traversal sequences: ${p}`);
167501
+ }
167502
+ // Direct .wasm file
167503
+ if (resolved.endsWith('.wasm') && fs.existsSync(resolved)) {
167504
+ return fs.readFileSync(resolved);
167505
+ }
167506
+ if (!fs.existsSync(resolved))
167507
+ continue;
167508
+ const stat = fs.statSync(resolved);
167509
+ if (stat.isDirectory()) {
167510
+ // Check for pre-compiled policy.wasm in directory
167511
+ const wasmCandidate = path.join(resolved, 'policy.wasm');
167512
+ if (fs.existsSync(wasmCandidate)) {
167513
+ return fs.readFileSync(wasmCandidate);
167514
+ }
167515
+ // Collect all .rego files from directory
167516
+ const files = fs.readdirSync(resolved);
167517
+ for (const f of files) {
167518
+ if (f.endsWith('.rego')) {
167519
+ regoFiles.push(path.join(resolved, f));
167520
+ }
167521
+ }
167522
+ }
167523
+ else if (resolved.endsWith('.rego')) {
167524
+ regoFiles.push(resolved);
167525
+ }
167526
+ }
167527
+ if (regoFiles.length === 0) {
167528
+ throw new Error(`OPA WASM evaluator: no .wasm bundle or .rego files found in: ${paths.join(', ')}`);
167529
+ }
167530
+ // Auto-compile .rego -> .wasm
167531
+ return this.compileRego(regoFiles);
167532
+ }
167533
+ /**
167534
+ * Auto-compile .rego files to a WASM bundle using the `opa` CLI.
167535
+ *
167536
+ * Caches the compiled bundle based on a content hash of all input .rego files
167537
+ * so subsequent runs skip compilation if policies haven't changed.
167538
+ */
167539
+ compileRego(regoFiles) {
167540
+ // Check that `opa` CLI is available
167541
+ try {
167542
+ (0, child_process_1.execFileSync)('opa', ['version'], { stdio: 'pipe' });
167543
+ }
167544
+ catch {
167545
+ throw new Error('OPA CLI (`opa`) not found on PATH. Install it from https://www.openpolicyagent.org/docs/latest/#running-opa\n' +
167546
+ 'Or pre-compile your .rego files: opa build -t wasm -e visor -o bundle.tar.gz ' +
167547
+ regoFiles.join(' '));
167548
+ }
167549
+ // Compute content hash for cache key
167550
+ const hash = crypto.createHash('sha256');
167551
+ for (const f of regoFiles.sort()) {
167552
+ hash.update(fs.readFileSync(f));
167553
+ hash.update(f); // include filename for disambiguation
167554
+ }
167555
+ const cacheKey = hash.digest('hex').slice(0, 16);
167556
+ const cacheDir = OpaCompiler.CACHE_DIR;
167557
+ const cachedWasm = path.join(cacheDir, `${cacheKey}.wasm`);
167558
+ // Return cached bundle if still valid
167559
+ if (fs.existsSync(cachedWasm)) {
167560
+ return fs.readFileSync(cachedWasm);
167561
+ }
167562
+ // Compile to WASM via opa build
167563
+ fs.mkdirSync(cacheDir, { recursive: true });
167564
+ const bundleTar = path.join(cacheDir, `${cacheKey}-bundle.tar.gz`);
167565
+ try {
167566
+ const args = [
167567
+ 'build',
167568
+ '-t',
167569
+ 'wasm',
167570
+ '-e',
167571
+ 'visor', // entrypoint: the visor package tree
167572
+ '-o',
167573
+ bundleTar,
167574
+ ...regoFiles,
167575
+ ];
167576
+ (0, child_process_1.execFileSync)('opa', args, {
167577
+ stdio: 'pipe',
167578
+ timeout: 30000,
167579
+ });
167580
+ }
167581
+ catch (err) {
167582
+ const stderr = err?.stderr?.toString() || '';
167583
+ throw new Error(`Failed to compile .rego files to WASM:\n${stderr}\n` +
167584
+ 'Ensure your .rego files are valid and the `opa` CLI is installed.');
167585
+ }
167586
+ // Extract policy.wasm from the tar.gz bundle
167587
+ // OPA bundles are tar.gz with /policy.wasm inside
167588
+ try {
167589
+ (0, child_process_1.execFileSync)('tar', ['-xzf', bundleTar, '-C', cacheDir, '/policy.wasm'], {
167590
+ stdio: 'pipe',
167591
+ });
167592
+ const extractedWasm = path.join(cacheDir, 'policy.wasm');
167593
+ if (fs.existsSync(extractedWasm)) {
167594
+ // Move to cache-key named file
167595
+ fs.renameSync(extractedWasm, cachedWasm);
167596
+ }
167597
+ }
167598
+ catch {
167599
+ // Some tar implementations don't like leading /
167600
+ try {
167601
+ (0, child_process_1.execFileSync)('tar', ['-xzf', bundleTar, '-C', cacheDir, 'policy.wasm'], {
167602
+ stdio: 'pipe',
167603
+ });
167604
+ const extractedWasm = path.join(cacheDir, 'policy.wasm');
167605
+ if (fs.existsSync(extractedWasm)) {
167606
+ fs.renameSync(extractedWasm, cachedWasm);
167607
+ }
167608
+ }
167609
+ catch (err2) {
167610
+ throw new Error(`Failed to extract policy.wasm from OPA bundle: ${err2?.message || err2}`);
167611
+ }
167612
+ }
167613
+ // Clean up tar
167614
+ try {
167615
+ fs.unlinkSync(bundleTar);
167616
+ }
167617
+ catch { }
167618
+ if (!fs.existsSync(cachedWasm)) {
167619
+ throw new Error('OPA build succeeded but policy.wasm was not found in the bundle');
167620
+ }
167621
+ return fs.readFileSync(cachedWasm);
167622
+ }
167623
+ }
167624
+ exports.OpaCompiler = OpaCompiler;
167625
+
167626
+
167627
+ /***/ }),
167628
+
167629
+ /***/ 44693:
167630
+ /***/ ((__unused_webpack_module, exports) => {
167631
+
167632
+ "use strict";
167633
+
167634
+ /**
167635
+ * Copyright (c) ProbeLabs. All rights reserved.
167636
+ * Licensed under the Elastic License 2.0; you may not use this file except
167637
+ * in compliance with the Elastic License 2.0.
167638
+ */
167639
+ Object.defineProperty(exports, "__esModule", ({ value: true }));
167640
+ exports.OpaHttpEvaluator = void 0;
167641
+ /**
167642
+ * OPA HTTP Evaluator - evaluates policies via an external OPA server's REST API.
167643
+ *
167644
+ * Uses the built-in `fetch` API (Node 18+), so no extra dependencies are needed.
167645
+ */
167646
+ class OpaHttpEvaluator {
167647
+ baseUrl;
167648
+ timeout;
167649
+ constructor(baseUrl, timeout = 5000) {
167650
+ // Validate URL format and protocol
167651
+ let parsed;
167652
+ try {
167653
+ parsed = new URL(baseUrl);
167654
+ }
167655
+ catch {
167656
+ throw new Error(`OPA HTTP evaluator: invalid URL: ${baseUrl}`);
167657
+ }
167658
+ if (!['http:', 'https:'].includes(parsed.protocol)) {
167659
+ throw new Error(`OPA HTTP evaluator: url must use http:// or https:// protocol, got: ${baseUrl}`);
167660
+ }
167661
+ // Block cloud metadata, loopback, link-local, and private network addresses
167662
+ const hostname = parsed.hostname;
167663
+ if (this.isBlockedHostname(hostname)) {
167664
+ throw new Error(`OPA HTTP evaluator: url must not point to internal, loopback, or private network addresses`);
167665
+ }
167666
+ // Normalize: strip trailing slash
167667
+ this.baseUrl = baseUrl.replace(/\/+$/, '');
167668
+ this.timeout = timeout;
167669
+ }
167670
+ /**
167671
+ * Check if a hostname is blocked due to SSRF concerns.
167672
+ *
167673
+ * Blocks:
167674
+ * - Loopback addresses (127.x.x.x, localhost, 0.0.0.0, ::1)
167675
+ * - Link-local addresses (169.254.x.x)
167676
+ * - Private networks (10.x.x.x, 172.16-31.x.x, 192.168.x.x)
167677
+ * - IPv6 unique local addresses (fd00::/8)
167678
+ * - Cloud metadata services (*.internal)
167679
+ */
167680
+ isBlockedHostname(hostname) {
167681
+ if (!hostname)
167682
+ return true; // block empty hostnames
167683
+ // Normalize hostname: lowercase and remove brackets for IPv6
167684
+ const normalized = hostname.toLowerCase().replace(/^\[|\]$/g, '');
167685
+ // Block .internal domains (cloud metadata services)
167686
+ if (normalized === 'metadata.google.internal' || normalized.endsWith('.internal')) {
167687
+ return true;
167688
+ }
167689
+ // Block localhost variants
167690
+ if (normalized === 'localhost' || normalized === 'localhost.localdomain') {
167691
+ return true;
167692
+ }
167693
+ // Block IPv6 loopback
167694
+ if (normalized === '::1' || normalized === '0:0:0:0:0:0:0:1') {
167695
+ return true;
167696
+ }
167697
+ // Check IPv4 patterns
167698
+ const ipv4Pattern = /^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/;
167699
+ const ipv4Match = normalized.match(ipv4Pattern);
167700
+ if (ipv4Match) {
167701
+ const octets = ipv4Match.slice(1, 5).map(Number);
167702
+ // Validate octets are in range [0, 255]
167703
+ if (octets.some(octet => octet > 255)) {
167704
+ return false;
167705
+ }
167706
+ const [a, b] = octets;
167707
+ // Block loopback: 127.0.0.0/8
167708
+ if (a === 127) {
167709
+ return true;
167710
+ }
167711
+ // Block 0.0.0.0/8 (this host)
167712
+ if (a === 0) {
167713
+ return true;
167714
+ }
167715
+ // Block link-local: 169.254.0.0/16
167716
+ if (a === 169 && b === 254) {
167717
+ return true;
167718
+ }
167719
+ // Block private networks
167720
+ // 10.0.0.0/8
167721
+ if (a === 10) {
167722
+ return true;
167723
+ }
167724
+ // 172.16.0.0/12 (172.16.x.x through 172.31.x.x)
167725
+ if (a === 172 && b >= 16 && b <= 31) {
167726
+ return true;
167727
+ }
167728
+ // 192.168.0.0/16
167729
+ if (a === 192 && b === 168) {
167730
+ return true;
167731
+ }
167732
+ }
167733
+ // Check IPv6 patterns
167734
+ // Block unique local addresses: fd00::/8
167735
+ if (normalized.startsWith('fd') || normalized.startsWith('fc')) {
167736
+ return true;
167737
+ }
167738
+ // Block link-local: fe80::/10
167739
+ if (normalized.startsWith('fe80:')) {
167740
+ return true;
167741
+ }
167742
+ return false;
167743
+ }
167744
+ /**
167745
+ * Evaluate a policy rule against an input document via OPA REST API.
167746
+ *
167747
+ * @param input - The input document to evaluate
167748
+ * @param rulePath - OPA rule path (e.g., 'visor/check/execute')
167749
+ * @returns The result object from OPA, or undefined on error
167750
+ */
167751
+ async evaluate(input, rulePath) {
167752
+ // OPA Data API: POST /v1/data/<path>
167753
+ const encodedPath = rulePath
167754
+ .split('/')
167755
+ .map(s => encodeURIComponent(s))
167756
+ .join('/');
167757
+ const url = `${this.baseUrl}/v1/data/${encodedPath}`;
167758
+ const controller = new AbortController();
167759
+ const timer = setTimeout(() => controller.abort(), this.timeout);
167760
+ try {
167761
+ const response = await fetch(url, {
167762
+ method: 'POST',
167763
+ headers: { 'Content-Type': 'application/json' },
167764
+ body: JSON.stringify({ input }),
167765
+ signal: controller.signal,
167766
+ });
167767
+ if (!response.ok) {
167768
+ throw new Error(`OPA HTTP ${response.status}: ${response.statusText}`);
167769
+ }
167770
+ let body;
167771
+ try {
167772
+ body = await response.json();
167773
+ }
167774
+ catch (jsonErr) {
167775
+ throw new Error(`OPA HTTP evaluator: failed to parse JSON response: ${jsonErr instanceof Error ? jsonErr.message : String(jsonErr)}`);
167776
+ }
167777
+ // OPA returns { result: { ... } }
167778
+ return body?.result;
167779
+ }
167780
+ finally {
167781
+ clearTimeout(timer);
167782
+ }
167783
+ }
167784
+ async shutdown() {
167785
+ // No persistent connections to close
167786
+ }
167787
+ }
167788
+ exports.OpaHttpEvaluator = OpaHttpEvaluator;
167789
+
167790
+
167791
+ /***/ }),
167792
+
167793
+ /***/ 39530:
167794
+ /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
167795
+
167796
+ "use strict";
167797
+
167798
+ /**
167799
+ * Copyright (c) ProbeLabs. All rights reserved.
167800
+ * Licensed under the Elastic License 2.0; you may not use this file except
167801
+ * in compliance with the Elastic License 2.0.
167802
+ */
167803
+ Object.defineProperty(exports, "__esModule", ({ value: true }));
167804
+ exports.OpaPolicyEngine = void 0;
167805
+ const opa_wasm_evaluator_1 = __nccwpck_require__(8613);
167806
+ const opa_http_evaluator_1 = __nccwpck_require__(44693);
167807
+ const policy_input_builder_1 = __nccwpck_require__(17117);
167808
+ /**
167809
+ * Enterprise OPA Policy Engine.
167810
+ *
167811
+ * Wraps both WASM (local) and HTTP (remote) OPA evaluators behind the
167812
+ * OSS PolicyEngine interface. All OPA input building and role resolution
167813
+ * is handled internally — the OSS call sites pass only plain types.
167814
+ */
167815
+ class OpaPolicyEngine {
167816
+ evaluator = null;
167817
+ fallback;
167818
+ timeout;
167819
+ config;
167820
+ inputBuilder = null;
167821
+ logger = null;
167822
+ constructor(config) {
167823
+ this.config = config;
167824
+ this.fallback = config.fallback || 'deny';
167825
+ this.timeout = config.timeout || 5000;
167826
+ }
167827
+ async initialize(config) {
167828
+ // Resolve logger once at initialization
167829
+ try {
167830
+ this.logger = (__nccwpck_require__(86999).logger);
167831
+ }
167832
+ catch {
167833
+ // logger not available in this context
167834
+ }
167835
+ // Build actor/repo context from environment (available at engine init time)
167836
+ const actor = {
167837
+ authorAssociation: process.env.VISOR_AUTHOR_ASSOCIATION,
167838
+ login: process.env.VISOR_AUTHOR_LOGIN || process.env.GITHUB_ACTOR,
167839
+ isLocalMode: !process.env.GITHUB_ACTIONS,
167840
+ };
167841
+ const repo = {
167842
+ owner: process.env.GITHUB_REPOSITORY_OWNER,
167843
+ name: process.env.GITHUB_REPOSITORY?.split('/')[1],
167844
+ branch: process.env.GITHUB_HEAD_REF,
167845
+ baseBranch: process.env.GITHUB_BASE_REF,
167846
+ event: process.env.GITHUB_EVENT_NAME,
167847
+ };
167848
+ const prNum = process.env.GITHUB_PR_NUMBER
167849
+ ? parseInt(process.env.GITHUB_PR_NUMBER, 10)
167850
+ : undefined;
167851
+ const pullRequest = {
167852
+ number: prNum !== undefined && Number.isFinite(prNum) ? prNum : undefined,
167853
+ };
167854
+ this.inputBuilder = new policy_input_builder_1.PolicyInputBuilder(config, actor, repo, pullRequest);
167855
+ if (config.engine === 'local') {
167856
+ if (!config.rules) {
167857
+ throw new Error('OPA local mode requires `policy.rules` path to .wasm or .rego files');
167858
+ }
167859
+ const wasm = new opa_wasm_evaluator_1.OpaWasmEvaluator();
167860
+ await wasm.initialize(config.rules);
167861
+ if (config.data) {
167862
+ wasm.loadData(config.data);
167863
+ }
167864
+ this.evaluator = wasm;
167865
+ }
167866
+ else if (config.engine === 'remote') {
167867
+ if (!config.url) {
167868
+ throw new Error('OPA remote mode requires `policy.url` pointing to OPA server');
167869
+ }
167870
+ this.evaluator = new opa_http_evaluator_1.OpaHttpEvaluator(config.url, this.timeout);
167871
+ }
167872
+ else {
167873
+ this.evaluator = null;
167874
+ }
167875
+ }
167876
+ /**
167877
+ * Update actor/repo/PR context (e.g., after PR info becomes available).
167878
+ * Called by the enterprise loader when engine context is enriched.
167879
+ */
167880
+ setActorContext(actor, repo, pullRequest) {
167881
+ this.inputBuilder = new policy_input_builder_1.PolicyInputBuilder(this.config, actor, repo, pullRequest);
167882
+ }
167883
+ async evaluateCheckExecution(checkId, checkConfig) {
167884
+ if (!this.evaluator || !this.inputBuilder)
167885
+ return { allowed: true };
167886
+ const cfg = checkConfig && typeof checkConfig === 'object'
167887
+ ? checkConfig
167888
+ : {};
167889
+ const policyOverride = cfg.policy;
167890
+ const input = this.inputBuilder.forCheckExecution({
167891
+ id: checkId,
167892
+ type: cfg.type || 'ai',
167893
+ group: cfg.group,
167894
+ tags: cfg.tags,
167895
+ criticality: cfg.criticality,
167896
+ sandbox: cfg.sandbox,
167897
+ policy: policyOverride,
167898
+ });
167899
+ return this.doEvaluate(input, this.resolveRulePath('check.execute', policyOverride?.rule));
167900
+ }
167901
+ async evaluateToolInvocation(serverName, methodName, transport) {
167902
+ if (!this.evaluator || !this.inputBuilder)
167903
+ return { allowed: true };
167904
+ const input = this.inputBuilder.forToolInvocation(serverName, methodName, transport);
167905
+ return this.doEvaluate(input, 'visor/tool/invoke');
167906
+ }
167907
+ async evaluateCapabilities(checkId, capabilities) {
167908
+ if (!this.evaluator || !this.inputBuilder)
167909
+ return { allowed: true };
167910
+ const input = this.inputBuilder.forCapabilityResolve(checkId, capabilities);
167911
+ return this.doEvaluate(input, 'visor/capability/resolve');
167912
+ }
167913
+ async shutdown() {
167914
+ if (this.evaluator && 'shutdown' in this.evaluator) {
167915
+ await this.evaluator.shutdown();
167916
+ }
167917
+ this.evaluator = null;
167918
+ this.inputBuilder = null;
167919
+ }
167920
+ resolveRulePath(defaultScope, override) {
167921
+ if (override) {
167922
+ return override.startsWith('visor/') ? override : `visor/${override}`;
167923
+ }
167924
+ return `visor/${defaultScope.replace(/\./g, '/')}`;
167925
+ }
167926
+ async doEvaluate(input, rulePath) {
167927
+ try {
167928
+ this.logger?.debug(`[PolicyEngine] Evaluating ${rulePath}`, JSON.stringify(input));
167929
+ let timer;
167930
+ const timeoutPromise = new Promise((_resolve, reject) => {
167931
+ timer = setTimeout(() => reject(new Error('policy evaluation timeout')), this.timeout);
167932
+ });
167933
+ try {
167934
+ const result = await Promise.race([this.rawEvaluate(input, rulePath), timeoutPromise]);
167935
+ const decision = this.parseDecision(result);
167936
+ // In warn mode, override denied decisions to allowed but flag as warn
167937
+ if (!decision.allowed && this.fallback === 'warn') {
167938
+ decision.allowed = true;
167939
+ decision.warn = true;
167940
+ decision.reason = `audit: ${decision.reason || 'policy denied'}`;
167941
+ }
167942
+ this.logger?.debug(`[PolicyEngine] Decision for ${rulePath}: allowed=${decision.allowed}, warn=${decision.warn || false}, reason=${decision.reason || 'none'}`);
167943
+ return decision;
167944
+ }
167945
+ finally {
167946
+ if (timer)
167947
+ clearTimeout(timer);
167948
+ }
167949
+ }
167950
+ catch (err) {
167951
+ const msg = err instanceof Error ? err.message : String(err);
167952
+ this.logger?.warn(`[PolicyEngine] Evaluation failed for ${rulePath}: ${msg}`);
167953
+ return {
167954
+ allowed: this.fallback === 'allow' || this.fallback === 'warn',
167955
+ warn: this.fallback === 'warn' ? true : undefined,
167956
+ reason: `policy evaluation failed, fallback=${this.fallback}`,
167957
+ };
167958
+ }
167959
+ }
167960
+ async rawEvaluate(input, rulePath) {
167961
+ if (this.evaluator instanceof opa_wasm_evaluator_1.OpaWasmEvaluator) {
167962
+ const result = await this.evaluator.evaluate(input);
167963
+ // WASM compiled with `-e visor` entrypoint returns the full visor package tree.
167964
+ // Navigate to the specific rule subtree using rulePath segments.
167965
+ // e.g., 'visor/check/execute' → result.check.execute
167966
+ return this.navigateWasmResult(result, rulePath);
167967
+ }
167968
+ return this.evaluator.evaluate(input, rulePath);
167969
+ }
167970
+ /**
167971
+ * Navigate nested OPA WASM result tree to reach the specific rule's output.
167972
+ * The WASM entrypoint `-e visor` means the result root IS the visor package,
167973
+ * so we strip the `visor/` prefix and walk the remaining segments.
167974
+ */
167975
+ navigateWasmResult(result, rulePath) {
167976
+ if (!result || typeof result !== 'object')
167977
+ return result;
167978
+ // Strip the 'visor/' prefix (matches our compilation entrypoint)
167979
+ const segments = rulePath.replace(/^visor\//, '').split('/');
167980
+ let current = result;
167981
+ for (const seg of segments) {
167982
+ if (current && typeof current === 'object' && seg in current) {
167983
+ current = current[seg];
167984
+ }
167985
+ else {
167986
+ return undefined; // path not found in result tree
167987
+ }
167988
+ }
167989
+ return current;
167990
+ }
167991
+ parseDecision(result) {
167992
+ if (result === undefined || result === null) {
167993
+ return {
167994
+ allowed: this.fallback === 'allow' || this.fallback === 'warn',
167995
+ warn: this.fallback === 'warn' ? true : undefined,
167996
+ reason: this.fallback === 'warn' ? 'audit: no policy result' : 'no policy result',
167997
+ };
167998
+ }
167999
+ const allowed = result.allowed !== false;
168000
+ const decision = {
168001
+ allowed,
168002
+ reason: result.reason,
168003
+ };
168004
+ if (result.capabilities) {
168005
+ decision.capabilities = result.capabilities;
168006
+ }
168007
+ return decision;
168008
+ }
168009
+ }
168010
+ exports.OpaPolicyEngine = OpaPolicyEngine;
168011
+
168012
+
168013
+ /***/ }),
168014
+
168015
+ /***/ 8613:
168016
+ /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
168017
+
168018
+ "use strict";
168019
+
168020
+ /**
168021
+ * Copyright (c) ProbeLabs. All rights reserved.
168022
+ * Licensed under the Elastic License 2.0; you may not use this file except
168023
+ * in compliance with the Elastic License 2.0.
168024
+ */
168025
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
168026
+ if (k2 === undefined) k2 = k;
168027
+ var desc = Object.getOwnPropertyDescriptor(m, k);
168028
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
168029
+ desc = { enumerable: true, get: function() { return m[k]; } };
168030
+ }
168031
+ Object.defineProperty(o, k2, desc);
168032
+ }) : (function(o, m, k, k2) {
168033
+ if (k2 === undefined) k2 = k;
168034
+ o[k2] = m[k];
168035
+ }));
168036
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
168037
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
168038
+ }) : function(o, v) {
168039
+ o["default"] = v;
168040
+ });
168041
+ var __importStar = (this && this.__importStar) || (function () {
168042
+ var ownKeys = function(o) {
168043
+ ownKeys = Object.getOwnPropertyNames || function (o) {
168044
+ var ar = [];
168045
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
168046
+ return ar;
168047
+ };
168048
+ return ownKeys(o);
168049
+ };
168050
+ return function (mod) {
168051
+ if (mod && mod.__esModule) return mod;
168052
+ var result = {};
168053
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
168054
+ __setModuleDefault(result, mod);
168055
+ return result;
168056
+ };
168057
+ })();
168058
+ Object.defineProperty(exports, "__esModule", ({ value: true }));
168059
+ exports.OpaWasmEvaluator = void 0;
168060
+ const fs = __importStar(__nccwpck_require__(79896));
168061
+ const path = __importStar(__nccwpck_require__(16928));
168062
+ const opa_compiler_1 = __nccwpck_require__(628);
168063
+ /**
168064
+ * OPA WASM Evaluator - loads and evaluates OPA policies locally.
168065
+ *
168066
+ * Supports three input formats:
168067
+ * 1. Pre-compiled `.wasm` bundle — loaded directly (fastest startup)
168068
+ * 2. `.rego` files or directory — auto-compiled to WASM via `opa build` CLI
168069
+ * 3. Directory with `policy.wasm` inside — loaded directly
168070
+ *
168071
+ * Compilation and caching of .rego files is delegated to {@link OpaCompiler}.
168072
+ *
168073
+ * Requires:
168074
+ * - `@open-policy-agent/opa-wasm` npm package (optional dep)
168075
+ * - `opa` CLI on PATH (only when auto-compiling .rego files)
168076
+ */
168077
+ class OpaWasmEvaluator {
168078
+ policy = null;
168079
+ dataDocument = {};
168080
+ compiler = new opa_compiler_1.OpaCompiler();
168081
+ async initialize(rulesPath) {
168082
+ const paths = Array.isArray(rulesPath) ? rulesPath : [rulesPath];
168083
+ const wasmBytes = await this.compiler.resolveWasmBytes(paths);
168084
+ try {
168085
+ // Use createRequire to load the optional dep at runtime without ncc bundling it.
168086
+ // `new Function('id', 'return require(id)')` fails in ncc bundles because
168087
+ // `require` is not in the `new Function` scope. `createRequire` works correctly
168088
+ // because it creates a real Node.js require rooted at the given path.
168089
+ // eslint-disable-next-line @typescript-eslint/no-var-requires
168090
+ const { createRequire } = __nccwpck_require__(73339);
168091
+ const runtimeRequire = createRequire(__filename);
168092
+ const opaWasm = runtimeRequire('@open-policy-agent/opa-wasm');
168093
+ const loadPolicy = opaWasm.loadPolicy || opaWasm.default?.loadPolicy;
168094
+ if (!loadPolicy) {
168095
+ throw new Error('loadPolicy not found in @open-policy-agent/opa-wasm');
168096
+ }
168097
+ this.policy = await loadPolicy(wasmBytes);
168098
+ }
168099
+ catch (err) {
168100
+ if (err?.code === 'MODULE_NOT_FOUND' || err?.code === 'ERR_MODULE_NOT_FOUND') {
168101
+ throw new Error('OPA WASM evaluator requires @open-policy-agent/opa-wasm. ' +
168102
+ 'Install it with: npm install @open-policy-agent/opa-wasm');
168103
+ }
168104
+ throw err;
168105
+ }
168106
+ }
168107
+ /**
168108
+ * Load external data from a JSON file to use as the OPA data document.
168109
+ * The loaded data will be passed to `policy.setData()` during evaluation,
168110
+ * making it available in Rego via `data.<key>`.
168111
+ */
168112
+ loadData(dataPath) {
168113
+ const resolved = path.resolve(dataPath);
168114
+ if (path.normalize(resolved).includes('..')) {
168115
+ throw new Error(`Data path contains traversal sequences: ${dataPath}`);
168116
+ }
168117
+ if (!fs.existsSync(resolved)) {
168118
+ throw new Error(`OPA data file not found: ${resolved}`);
168119
+ }
168120
+ const stat = fs.statSync(resolved);
168121
+ if (stat.size > 10 * 1024 * 1024) {
168122
+ throw new Error(`OPA data file exceeds 10MB limit: ${resolved} (${stat.size} bytes)`);
168123
+ }
168124
+ const raw = fs.readFileSync(resolved, 'utf-8');
168125
+ try {
168126
+ const parsed = JSON.parse(raw);
168127
+ if (typeof parsed !== 'object' || parsed === null || Array.isArray(parsed)) {
168128
+ throw new Error('OPA data file must contain a JSON object (not an array or primitive)');
168129
+ }
168130
+ this.dataDocument = parsed;
168131
+ }
168132
+ catch (err) {
168133
+ if (err.message.startsWith('OPA data file must')) {
168134
+ throw err;
168135
+ }
168136
+ throw new Error(`Failed to parse OPA data file ${resolved}: ${err.message}`);
168137
+ }
168138
+ }
168139
+ async evaluate(input) {
168140
+ if (!this.policy) {
168141
+ throw new Error('OPA WASM evaluator not initialized');
168142
+ }
168143
+ this.policy.setData(this.dataDocument);
168144
+ const resultSet = this.policy.evaluate(input);
168145
+ if (Array.isArray(resultSet) && resultSet.length > 0) {
168146
+ return resultSet[0].result;
168147
+ }
168148
+ return undefined;
168149
+ }
168150
+ async shutdown() {
168151
+ if (this.policy) {
168152
+ // opa-wasm policy objects may have a close/free method for WASM cleanup
168153
+ if (typeof this.policy.close === 'function') {
168154
+ try {
168155
+ this.policy.close();
168156
+ }
168157
+ catch { }
168158
+ }
168159
+ else if (typeof this.policy.free === 'function') {
168160
+ try {
168161
+ this.policy.free();
168162
+ }
168163
+ catch { }
168164
+ }
168165
+ }
168166
+ this.policy = null;
168167
+ }
168168
+ }
168169
+ exports.OpaWasmEvaluator = OpaWasmEvaluator;
168170
+
168171
+
168172
+ /***/ }),
168173
+
168174
+ /***/ 17117:
168175
+ /***/ ((__unused_webpack_module, exports) => {
168176
+
168177
+ "use strict";
168178
+
168179
+ /**
168180
+ * Copyright (c) ProbeLabs. All rights reserved.
168181
+ * Licensed under the Elastic License 2.0; you may not use this file except
168182
+ * in compliance with the Elastic License 2.0.
168183
+ */
168184
+ Object.defineProperty(exports, "__esModule", ({ value: true }));
168185
+ exports.PolicyInputBuilder = void 0;
168186
+ /**
168187
+ * Builds OPA-compatible input documents from engine context.
168188
+ *
168189
+ * Resolves actor roles from the `policy.roles` config section by matching
168190
+ * the actor's authorAssociation and login against role definitions.
168191
+ */
168192
+ class PolicyInputBuilder {
168193
+ roles;
168194
+ actor;
168195
+ repository;
168196
+ pullRequest;
168197
+ constructor(policyConfig, actor, repository, pullRequest) {
168198
+ this.roles = policyConfig.roles || {};
168199
+ this.actor = actor;
168200
+ this.repository = repository;
168201
+ this.pullRequest = pullRequest;
168202
+ }
168203
+ /** Resolve which roles apply to the current actor. */
168204
+ resolveRoles() {
168205
+ const matched = [];
168206
+ for (const [roleName, roleConfig] of Object.entries(this.roles)) {
168207
+ let identityMatch = false;
168208
+ if (roleConfig.author_association &&
168209
+ this.actor.authorAssociation &&
168210
+ roleConfig.author_association.includes(this.actor.authorAssociation)) {
168211
+ identityMatch = true;
168212
+ }
168213
+ if (!identityMatch &&
168214
+ roleConfig.users &&
168215
+ this.actor.login &&
168216
+ roleConfig.users.includes(this.actor.login)) {
168217
+ identityMatch = true;
168218
+ }
168219
+ // Slack user ID match
168220
+ if (!identityMatch &&
168221
+ roleConfig.slack_users &&
168222
+ this.actor.slack?.userId &&
168223
+ roleConfig.slack_users.includes(this.actor.slack.userId)) {
168224
+ identityMatch = true;
168225
+ }
168226
+ // Email match (case-insensitive)
168227
+ if (!identityMatch && roleConfig.emails && this.actor.slack?.email) {
168228
+ const actorEmail = this.actor.slack.email.toLowerCase();
168229
+ if (roleConfig.emails.some(e => e.toLowerCase() === actorEmail)) {
168230
+ identityMatch = true;
168231
+ }
168232
+ }
168233
+ // Note: teams-based role resolution requires GitHub API access (read:org scope)
168234
+ // and is not yet implemented. If configured, the role will not match via teams.
168235
+ if (!identityMatch)
168236
+ continue;
168237
+ // slack_channels gate: if set, the role only applies when triggered from one of these channels
168238
+ if (roleConfig.slack_channels && roleConfig.slack_channels.length > 0) {
168239
+ if (!this.actor.slack?.channelId ||
168240
+ !roleConfig.slack_channels.includes(this.actor.slack.channelId)) {
168241
+ continue;
168242
+ }
168243
+ }
168244
+ matched.push(roleName);
168245
+ }
168246
+ return matched;
168247
+ }
168248
+ buildActor() {
168249
+ return {
168250
+ authorAssociation: this.actor.authorAssociation,
168251
+ login: this.actor.login,
168252
+ roles: this.resolveRoles(),
168253
+ isLocalMode: this.actor.isLocalMode,
168254
+ ...(this.actor.slack && { slack: this.actor.slack }),
168255
+ };
168256
+ }
168257
+ forCheckExecution(check) {
168258
+ return {
168259
+ scope: 'check.execute',
168260
+ check: {
168261
+ id: check.id,
168262
+ type: check.type,
168263
+ group: check.group,
168264
+ tags: check.tags,
168265
+ criticality: check.criticality,
168266
+ sandbox: check.sandbox,
168267
+ policy: check.policy,
168268
+ },
168269
+ actor: this.buildActor(),
168270
+ repository: this.repository,
168271
+ pullRequest: this.pullRequest,
168272
+ };
168273
+ }
168274
+ forToolInvocation(serverName, methodName, transport) {
168275
+ return {
168276
+ scope: 'tool.invoke',
168277
+ tool: { serverName, methodName, transport },
168278
+ actor: this.buildActor(),
168279
+ repository: this.repository,
168280
+ pullRequest: this.pullRequest,
168281
+ };
168282
+ }
168283
+ forCapabilityResolve(checkId, capabilities) {
168284
+ return {
168285
+ scope: 'capability.resolve',
168286
+ check: { id: checkId, type: 'ai' },
168287
+ capability: capabilities,
168288
+ actor: this.buildActor(),
168289
+ repository: this.repository,
168290
+ pullRequest: this.pullRequest,
168291
+ };
168292
+ }
168293
+ }
168294
+ exports.PolicyInputBuilder = PolicyInputBuilder;
168295
+
168296
+
168297
+ /***/ }),
168298
+
168299
+ /***/ 63737:
168300
+ /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
168301
+
168302
+ "use strict";
168303
+
168304
+ /**
168305
+ * Copyright (c) ProbeLabs. All rights reserved.
168306
+ * Licensed under the Elastic License 2.0; you may not use this file except
168307
+ * in compliance with the Elastic License 2.0.
168308
+ */
168309
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
168310
+ if (k2 === undefined) k2 = k;
168311
+ var desc = Object.getOwnPropertyDescriptor(m, k);
168312
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
168313
+ desc = { enumerable: true, get: function() { return m[k]; } };
168314
+ }
168315
+ Object.defineProperty(o, k2, desc);
168316
+ }) : (function(o, m, k, k2) {
168317
+ if (k2 === undefined) k2 = k;
168318
+ o[k2] = m[k];
168319
+ }));
168320
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
168321
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
168322
+ }) : function(o, v) {
168323
+ o["default"] = v;
168324
+ });
168325
+ var __importStar = (this && this.__importStar) || (function () {
168326
+ var ownKeys = function(o) {
168327
+ ownKeys = Object.getOwnPropertyNames || function (o) {
168328
+ var ar = [];
168329
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
168330
+ return ar;
168331
+ };
168332
+ return ownKeys(o);
168333
+ };
168334
+ return function (mod) {
168335
+ if (mod && mod.__esModule) return mod;
168336
+ var result = {};
168337
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
168338
+ __setModuleDefault(result, mod);
168339
+ return result;
168340
+ };
168341
+ })();
168342
+ Object.defineProperty(exports, "__esModule", ({ value: true }));
168343
+ exports.KnexStoreBackend = void 0;
168344
+ /**
168345
+ * Knex-backed schedule store for PostgreSQL, MySQL, and MSSQL (Enterprise)
168346
+ *
168347
+ * Uses Knex query builder for database-agnostic SQL. Same schema as SQLite backend
168348
+ * but with real distributed locking via row-level claims (claimed_by/claimed_at/lock_token).
168349
+ */
168350
+ const fs = __importStar(__nccwpck_require__(79896));
168351
+ const path = __importStar(__nccwpck_require__(16928));
168352
+ const uuid_1 = __nccwpck_require__(31914);
168353
+ const logger_1 = __nccwpck_require__(86999);
168354
+ function toNum(val) {
168355
+ if (val === null || val === undefined)
168356
+ return undefined;
168357
+ return typeof val === 'string' ? parseInt(val, 10) : val;
168358
+ }
168359
+ function safeJsonParse(value) {
168360
+ if (!value)
168361
+ return undefined;
168362
+ try {
168363
+ return JSON.parse(value);
168364
+ }
168365
+ catch {
168366
+ return undefined;
168367
+ }
168368
+ }
168369
+ function fromTriggerRow(row) {
168370
+ return {
168371
+ id: row.id,
168372
+ creatorId: row.creator_id,
168373
+ creatorContext: row.creator_context ?? undefined,
168374
+ creatorName: row.creator_name ?? undefined,
168375
+ description: row.description ?? undefined,
168376
+ channels: safeJsonParse(row.channels),
168377
+ fromUsers: safeJsonParse(row.from_users),
168378
+ fromBots: row.from_bots === true || row.from_bots === 1,
168379
+ contains: safeJsonParse(row.contains),
168380
+ matchPattern: row.match_pattern ?? undefined,
168381
+ threads: row.threads,
168382
+ workflow: row.workflow,
168383
+ inputs: safeJsonParse(row.inputs),
168384
+ outputContext: safeJsonParse(row.output_context),
168385
+ status: row.status,
168386
+ enabled: row.enabled === true || row.enabled === 1,
168387
+ createdAt: toNum(row.created_at),
168388
+ };
168389
+ }
168390
+ function toTriggerInsertRow(trigger) {
168391
+ return {
168392
+ id: trigger.id,
168393
+ creator_id: trigger.creatorId,
168394
+ creator_context: trigger.creatorContext ?? null,
168395
+ creator_name: trigger.creatorName ?? null,
168396
+ description: trigger.description ?? null,
168397
+ channels: trigger.channels ? JSON.stringify(trigger.channels) : null,
168398
+ from_users: trigger.fromUsers ? JSON.stringify(trigger.fromUsers) : null,
168399
+ from_bots: trigger.fromBots,
168400
+ contains: trigger.contains ? JSON.stringify(trigger.contains) : null,
168401
+ match_pattern: trigger.matchPattern ?? null,
168402
+ threads: trigger.threads,
168403
+ workflow: trigger.workflow,
168404
+ inputs: trigger.inputs ? JSON.stringify(trigger.inputs) : null,
168405
+ output_context: trigger.outputContext ? JSON.stringify(trigger.outputContext) : null,
168406
+ status: trigger.status,
168407
+ enabled: trigger.enabled,
168408
+ created_at: trigger.createdAt,
168409
+ };
168410
+ }
168411
+ function fromDbRow(row) {
168412
+ return {
168413
+ id: row.id,
168414
+ creatorId: row.creator_id,
168415
+ creatorContext: row.creator_context ?? undefined,
168416
+ creatorName: row.creator_name ?? undefined,
168417
+ timezone: row.timezone,
168418
+ schedule: row.schedule_expr,
168419
+ runAt: toNum(row.run_at),
168420
+ isRecurring: row.is_recurring === true || row.is_recurring === 1,
168421
+ originalExpression: row.original_expression,
168422
+ workflow: row.workflow ?? undefined,
168423
+ workflowInputs: safeJsonParse(row.workflow_inputs),
168424
+ outputContext: safeJsonParse(row.output_context),
168425
+ status: row.status,
168426
+ createdAt: toNum(row.created_at),
168427
+ lastRunAt: toNum(row.last_run_at),
168428
+ nextRunAt: toNum(row.next_run_at),
168429
+ runCount: row.run_count,
168430
+ failureCount: row.failure_count,
168431
+ lastError: row.last_error ?? undefined,
168432
+ previousResponse: row.previous_response ?? undefined,
168433
+ };
168434
+ }
168435
+ function toInsertRow(schedule) {
168436
+ return {
168437
+ id: schedule.id,
168438
+ creator_id: schedule.creatorId,
168439
+ creator_context: schedule.creatorContext ?? null,
168440
+ creator_name: schedule.creatorName ?? null,
168441
+ timezone: schedule.timezone,
168442
+ schedule_expr: schedule.schedule,
168443
+ run_at: schedule.runAt ?? null,
168444
+ is_recurring: schedule.isRecurring,
168445
+ original_expression: schedule.originalExpression,
168446
+ workflow: schedule.workflow ?? null,
168447
+ workflow_inputs: schedule.workflowInputs ? JSON.stringify(schedule.workflowInputs) : null,
168448
+ output_context: schedule.outputContext ? JSON.stringify(schedule.outputContext) : null,
168449
+ status: schedule.status,
168450
+ created_at: schedule.createdAt,
168451
+ last_run_at: schedule.lastRunAt ?? null,
168452
+ next_run_at: schedule.nextRunAt ?? null,
168453
+ run_count: schedule.runCount,
168454
+ failure_count: schedule.failureCount,
168455
+ last_error: schedule.lastError ?? null,
168456
+ previous_response: schedule.previousResponse ?? null,
168457
+ };
168458
+ }
168459
+ /**
168460
+ * Enterprise Knex-backed store for PostgreSQL, MySQL, and MSSQL
168461
+ */
168462
+ class KnexStoreBackend {
168463
+ knex = null;
168464
+ driver;
168465
+ connection;
168466
+ constructor(driver, storageConfig, _haConfig) {
168467
+ this.driver = driver;
168468
+ this.connection = (storageConfig.connection || {});
168469
+ }
168470
+ async initialize() {
168471
+ // Load knex dynamically
168472
+ const { createRequire } = __nccwpck_require__(73339);
168473
+ const runtimeRequire = createRequire(__filename);
168474
+ let knexFactory;
168475
+ try {
168476
+ knexFactory = runtimeRequire('knex');
168477
+ }
168478
+ catch (err) {
168479
+ const code = err?.code;
168480
+ if (code === 'MODULE_NOT_FOUND' || code === 'ERR_MODULE_NOT_FOUND') {
168481
+ throw new Error('knex is required for PostgreSQL/MySQL/MSSQL schedule storage. ' +
168482
+ 'Install it with: npm install knex');
168483
+ }
168484
+ throw err;
168485
+ }
168486
+ const clientMap = {
168487
+ postgresql: 'pg',
168488
+ mysql: 'mysql2',
168489
+ mssql: 'tedious',
168490
+ };
168491
+ const client = clientMap[this.driver];
168492
+ // Build connection config
168493
+ let connection;
168494
+ if (this.connection.connection_string) {
168495
+ connection = this.connection.connection_string;
168496
+ }
168497
+ else if (this.driver === 'mssql') {
168498
+ connection = this.buildMssqlConnection();
168499
+ }
168500
+ else {
168501
+ connection = this.buildStandardConnection();
168502
+ }
168503
+ this.knex = knexFactory({
168504
+ client,
168505
+ connection,
168506
+ pool: {
168507
+ min: this.connection.pool?.min ?? 0,
168508
+ max: this.connection.pool?.max ?? 10,
168509
+ },
168510
+ });
168511
+ // Run schema migration
168512
+ await this.migrateSchema();
168513
+ logger_1.logger.info(`[KnexStore] Initialized (${this.driver})`);
168514
+ }
168515
+ buildStandardConnection() {
168516
+ return {
168517
+ host: this.connection.host || 'localhost',
168518
+ port: this.connection.port,
168519
+ database: this.connection.database || 'visor',
168520
+ user: this.connection.user,
168521
+ password: this.connection.password,
168522
+ ssl: this.resolveSslConfig(),
168523
+ };
168524
+ }
168525
+ buildMssqlConnection() {
168526
+ const ssl = this.connection.ssl;
168527
+ const sslEnabled = ssl === true || (typeof ssl === 'object' && ssl.enabled !== false);
168528
+ return {
168529
+ server: this.connection.host || 'localhost',
168530
+ port: this.connection.port,
168531
+ database: this.connection.database || 'visor',
168532
+ user: this.connection.user,
168533
+ password: this.connection.password,
168534
+ options: {
168535
+ encrypt: sslEnabled,
168536
+ trustServerCertificate: typeof ssl === 'object' ? ssl.reject_unauthorized === false : !sslEnabled,
168537
+ },
168538
+ };
168539
+ }
168540
+ resolveSslConfig() {
168541
+ const ssl = this.connection.ssl;
168542
+ if (ssl === false || ssl === undefined)
168543
+ return false;
168544
+ if (ssl === true)
168545
+ return { rejectUnauthorized: true };
168546
+ // Object config
168547
+ if (ssl.enabled === false)
168548
+ return false;
168549
+ const result = {
168550
+ rejectUnauthorized: ssl.reject_unauthorized !== false,
168551
+ };
168552
+ if (ssl.ca) {
168553
+ const caPath = this.validateSslPath(ssl.ca, 'CA certificate');
168554
+ result.ca = fs.readFileSync(caPath, 'utf8');
168555
+ }
168556
+ if (ssl.cert) {
168557
+ const certPath = this.validateSslPath(ssl.cert, 'client certificate');
168558
+ result.cert = fs.readFileSync(certPath, 'utf8');
168559
+ }
168560
+ if (ssl.key) {
168561
+ const keyPath = this.validateSslPath(ssl.key, 'client key');
168562
+ result.key = fs.readFileSync(keyPath, 'utf8');
168563
+ }
168564
+ return result;
168565
+ }
168566
+ validateSslPath(filePath, label) {
168567
+ const resolved = path.resolve(filePath);
168568
+ if (resolved !== path.normalize(resolved)) {
168569
+ throw new Error(`SSL ${label} path contains invalid sequences: ${filePath}`);
168570
+ }
168571
+ if (!fs.existsSync(resolved)) {
168572
+ throw new Error(`SSL ${label} not found: ${filePath}`);
168573
+ }
168574
+ return resolved;
168575
+ }
168576
+ async shutdown() {
168577
+ if (this.knex) {
168578
+ await this.knex.destroy();
168579
+ this.knex = null;
168580
+ }
168581
+ }
168582
+ async migrateSchema() {
168583
+ const knex = this.getKnex();
168584
+ const exists = await knex.schema.hasTable('schedules');
168585
+ if (!exists) {
168586
+ await knex.schema.createTable('schedules', table => {
168587
+ table.string('id', 36).primary();
168588
+ table.string('creator_id', 255).notNullable().index();
168589
+ table.string('creator_context', 255);
168590
+ table.string('creator_name', 255);
168591
+ table.string('timezone', 64).notNullable().defaultTo('UTC');
168592
+ table.string('schedule_expr', 255);
168593
+ table.bigInteger('run_at');
168594
+ table.boolean('is_recurring').notNullable();
168595
+ table.text('original_expression');
168596
+ table.string('workflow', 255);
168597
+ table.text('workflow_inputs');
168598
+ table.text('output_context');
168599
+ table.string('status', 20).notNullable().index();
168600
+ table.bigInteger('created_at').notNullable();
168601
+ table.bigInteger('last_run_at');
168602
+ table.bigInteger('next_run_at');
168603
+ table.integer('run_count').notNullable().defaultTo(0);
168604
+ table.integer('failure_count').notNullable().defaultTo(0);
168605
+ table.text('last_error');
168606
+ table.text('previous_response');
168607
+ table.index(['status', 'next_run_at']);
168608
+ });
168609
+ }
168610
+ // Create message_triggers table
168611
+ const triggersExist = await knex.schema.hasTable('message_triggers');
168612
+ if (!triggersExist) {
168613
+ await knex.schema.createTable('message_triggers', table => {
168614
+ table.string('id', 36).primary();
168615
+ table.string('creator_id', 255).notNullable().index();
168616
+ table.string('creator_context', 255);
168617
+ table.string('creator_name', 255);
168618
+ table.text('description');
168619
+ table.text('channels'); // JSON array
168620
+ table.text('from_users'); // JSON array
168621
+ table.boolean('from_bots').notNullable().defaultTo(false);
168622
+ table.text('contains'); // JSON array
168623
+ table.text('match_pattern');
168624
+ table.string('threads', 20).notNullable().defaultTo('any');
168625
+ table.string('workflow', 255).notNullable();
168626
+ table.text('inputs'); // JSON
168627
+ table.text('output_context'); // JSON
168628
+ table.string('status', 20).notNullable().defaultTo('active').index();
168629
+ table.boolean('enabled').notNullable().defaultTo(true);
168630
+ table.bigInteger('created_at').notNullable();
168631
+ });
168632
+ }
168633
+ // Create scheduler_locks table for distributed locking
168634
+ const locksExist = await knex.schema.hasTable('scheduler_locks');
168635
+ if (!locksExist) {
168636
+ await knex.schema.createTable('scheduler_locks', table => {
168637
+ table.string('lock_id', 255).primary();
168638
+ table.string('node_id', 255).notNullable();
168639
+ table.string('lock_token', 36).notNullable();
168640
+ table.bigInteger('acquired_at').notNullable();
168641
+ table.bigInteger('expires_at').notNullable();
168642
+ });
168643
+ }
168644
+ }
168645
+ getKnex() {
168646
+ if (!this.knex) {
168647
+ throw new Error('[KnexStore] Not initialized. Call initialize() first.');
168648
+ }
168649
+ return this.knex;
168650
+ }
168651
+ // --- CRUD ---
168652
+ async create(schedule) {
168653
+ const knex = this.getKnex();
168654
+ const newSchedule = {
168655
+ ...schedule,
168656
+ id: (0, uuid_1.v4)(),
168657
+ createdAt: Date.now(),
168658
+ runCount: 0,
168659
+ failureCount: 0,
168660
+ status: 'active',
168661
+ };
168662
+ await knex('schedules').insert(toInsertRow(newSchedule));
168663
+ logger_1.logger.info(`[KnexStore] Created schedule ${newSchedule.id} for user ${newSchedule.creatorId}`);
168664
+ return newSchedule;
168665
+ }
168666
+ async importSchedule(schedule) {
168667
+ const knex = this.getKnex();
168668
+ const existing = await knex('schedules').where('id', schedule.id).first();
168669
+ if (existing)
168670
+ return; // Already imported (idempotent)
168671
+ await knex('schedules').insert(toInsertRow(schedule));
168672
+ }
168673
+ async get(id) {
168674
+ const knex = this.getKnex();
168675
+ const row = await knex('schedules').where('id', id).first();
168676
+ return row ? fromDbRow(row) : undefined;
168677
+ }
168678
+ async update(id, patch) {
168679
+ const knex = this.getKnex();
168680
+ const existing = await knex('schedules').where('id', id).first();
168681
+ if (!existing)
168682
+ return undefined;
168683
+ const current = fromDbRow(existing);
168684
+ const updated = { ...current, ...patch, id: current.id };
168685
+ const row = toInsertRow(updated);
168686
+ // Remove id from update (PK cannot change)
168687
+ delete row.id;
168688
+ await knex('schedules').where('id', id).update(row);
168689
+ return updated;
168690
+ }
168691
+ async delete(id) {
168692
+ const knex = this.getKnex();
168693
+ const deleted = await knex('schedules').where('id', id).del();
168694
+ if (deleted > 0) {
168695
+ logger_1.logger.info(`[KnexStore] Deleted schedule ${id}`);
168696
+ return true;
168697
+ }
168698
+ return false;
168699
+ }
168700
+ // --- Queries ---
168701
+ async getByCreator(creatorId) {
168702
+ const knex = this.getKnex();
168703
+ const rows = await knex('schedules').where('creator_id', creatorId);
168704
+ return rows.map((r) => fromDbRow(r));
168705
+ }
168706
+ async getActiveSchedules() {
168707
+ const knex = this.getKnex();
168708
+ const rows = await knex('schedules').where('status', 'active');
168709
+ return rows.map((r) => fromDbRow(r));
168710
+ }
168711
+ async getDueSchedules(now) {
168712
+ const ts = now ?? Date.now();
168713
+ const knex = this.getKnex();
168714
+ // MSSQL uses 1/0 for booleans
168715
+ const bFalse = this.driver === 'mssql' ? 0 : false;
168716
+ const bTrue = this.driver === 'mssql' ? 1 : true;
168717
+ const rows = await knex('schedules')
168718
+ .where('status', 'active')
168719
+ .andWhere(function () {
168720
+ this.where(function () {
168721
+ this.where('is_recurring', bFalse)
168722
+ .whereNotNull('run_at')
168723
+ .where('run_at', '<=', ts);
168724
+ }).orWhere(function () {
168725
+ this.where('is_recurring', bTrue)
168726
+ .whereNotNull('next_run_at')
168727
+ .where('next_run_at', '<=', ts);
168728
+ });
168729
+ });
168730
+ return rows.map((r) => fromDbRow(r));
168731
+ }
168732
+ async findByWorkflow(creatorId, workflowName) {
168733
+ const knex = this.getKnex();
168734
+ const escaped = workflowName.toLowerCase().replace(/[%_\\]/g, '\\$&');
168735
+ const pattern = `%${escaped}%`;
168736
+ const rows = await knex('schedules')
168737
+ .where('creator_id', creatorId)
168738
+ .where('status', 'active')
168739
+ .whereRaw("LOWER(workflow) LIKE ? ESCAPE '\\'", [pattern]);
168740
+ return rows.map((r) => fromDbRow(r));
168741
+ }
168742
+ async getAll() {
168743
+ const knex = this.getKnex();
168744
+ const rows = await knex('schedules');
168745
+ return rows.map((r) => fromDbRow(r));
168746
+ }
168747
+ async getStats() {
168748
+ const knex = this.getKnex();
168749
+ // MSSQL uses 1/0 for booleans; PostgreSQL/MySQL accept both true/1
168750
+ const boolTrue = this.driver === 'mssql' ? '1' : 'true';
168751
+ const boolFalse = this.driver === 'mssql' ? '0' : 'false';
168752
+ const result = await knex('schedules')
168753
+ .select(knex.raw('COUNT(*) as total'), knex.raw("SUM(CASE WHEN status = 'active' THEN 1 ELSE 0 END) as active"), knex.raw("SUM(CASE WHEN status = 'paused' THEN 1 ELSE 0 END) as paused"), knex.raw("SUM(CASE WHEN status = 'completed' THEN 1 ELSE 0 END) as completed"), knex.raw("SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END) as failed"), knex.raw(`SUM(CASE WHEN is_recurring = ${boolTrue} THEN 1 ELSE 0 END) as recurring`), knex.raw(`SUM(CASE WHEN is_recurring = ${boolFalse} THEN 1 ELSE 0 END) as one_time`))
168754
+ .first();
168755
+ return {
168756
+ total: Number(result.total) || 0,
168757
+ active: Number(result.active) || 0,
168758
+ paused: Number(result.paused) || 0,
168759
+ completed: Number(result.completed) || 0,
168760
+ failed: Number(result.failed) || 0,
168761
+ recurring: Number(result.recurring) || 0,
168762
+ oneTime: Number(result.one_time) || 0,
168763
+ };
168764
+ }
168765
+ async validateLimits(creatorId, isRecurring, limits) {
168766
+ const knex = this.getKnex();
168767
+ if (limits.maxGlobal) {
168768
+ const result = await knex('schedules').count('* as cnt').first();
168769
+ if (Number(result?.cnt) >= limits.maxGlobal) {
168770
+ throw new Error(`Global schedule limit reached (${limits.maxGlobal})`);
168771
+ }
168772
+ }
168773
+ if (limits.maxPerUser) {
168774
+ const result = await knex('schedules')
168775
+ .where('creator_id', creatorId)
168776
+ .count('* as cnt')
168777
+ .first();
168778
+ if (Number(result?.cnt) >= limits.maxPerUser) {
168779
+ throw new Error(`You have reached the maximum number of schedules (${limits.maxPerUser})`);
168780
+ }
168781
+ }
168782
+ if (isRecurring && limits.maxRecurringPerUser) {
168783
+ const bTrue = this.driver === 'mssql' ? 1 : true;
168784
+ const result = await knex('schedules')
168785
+ .where('creator_id', creatorId)
168786
+ .where('is_recurring', bTrue)
168787
+ .count('* as cnt')
168788
+ .first();
168789
+ if (Number(result?.cnt) >= limits.maxRecurringPerUser) {
168790
+ throw new Error(`You have reached the maximum number of recurring schedules (${limits.maxRecurringPerUser})`);
168791
+ }
168792
+ }
168793
+ }
168794
+ // --- HA Distributed Locking (via scheduler_locks table) ---
168795
+ async tryAcquireLock(lockId, nodeId, ttlSeconds) {
168796
+ const knex = this.getKnex();
168797
+ const now = Date.now();
168798
+ const expiresAt = now + ttlSeconds * 1000;
168799
+ const token = (0, uuid_1.v4)();
168800
+ // Step 1: Try to claim an existing expired lock
168801
+ const updated = await knex('scheduler_locks')
168802
+ .where('lock_id', lockId)
168803
+ .where('expires_at', '<', now)
168804
+ .update({
168805
+ node_id: nodeId,
168806
+ lock_token: token,
168807
+ acquired_at: now,
168808
+ expires_at: expiresAt,
168809
+ });
168810
+ if (updated > 0)
168811
+ return token;
168812
+ // Step 2: Try to INSERT a new lock row
168813
+ try {
168814
+ await knex('scheduler_locks').insert({
168815
+ lock_id: lockId,
168816
+ node_id: nodeId,
168817
+ lock_token: token,
168818
+ acquired_at: now,
168819
+ expires_at: expiresAt,
168820
+ });
168821
+ return token;
168822
+ }
168823
+ catch {
168824
+ // Unique constraint violation — another node holds the lock
168825
+ return null;
168826
+ }
168827
+ }
168828
+ async releaseLock(lockId, lockToken) {
168829
+ const knex = this.getKnex();
168830
+ await knex('scheduler_locks').where('lock_id', lockId).where('lock_token', lockToken).del();
168831
+ }
168832
+ async renewLock(lockId, lockToken, ttlSeconds) {
168833
+ const knex = this.getKnex();
168834
+ const now = Date.now();
168835
+ const expiresAt = now + ttlSeconds * 1000;
168836
+ const updated = await knex('scheduler_locks')
168837
+ .where('lock_id', lockId)
168838
+ .where('lock_token', lockToken)
168839
+ .update({ acquired_at: now, expires_at: expiresAt });
168840
+ return updated > 0;
168841
+ }
168842
+ async flush() {
168843
+ // No-op for server-based backends
168844
+ }
168845
+ // --- Message Trigger CRUD ---
168846
+ async createTrigger(trigger) {
168847
+ const knex = this.getKnex();
168848
+ const newTrigger = {
168849
+ ...trigger,
168850
+ id: (0, uuid_1.v4)(),
168851
+ createdAt: Date.now(),
168852
+ };
168853
+ await knex('message_triggers').insert(toTriggerInsertRow(newTrigger));
168854
+ logger_1.logger.info(`[KnexStore] Created trigger ${newTrigger.id} for user ${newTrigger.creatorId}`);
168855
+ return newTrigger;
168856
+ }
168857
+ async getTrigger(id) {
168858
+ const knex = this.getKnex();
168859
+ const row = await knex('message_triggers').where('id', id).first();
168860
+ return row ? fromTriggerRow(row) : undefined;
168861
+ }
168862
+ async updateTrigger(id, patch) {
168863
+ const knex = this.getKnex();
168864
+ const existing = await knex('message_triggers').where('id', id).first();
168865
+ if (!existing)
168866
+ return undefined;
168867
+ const current = fromTriggerRow(existing);
168868
+ const updated = {
168869
+ ...current,
168870
+ ...patch,
168871
+ id: current.id,
168872
+ createdAt: current.createdAt,
168873
+ };
168874
+ const row = toTriggerInsertRow(updated);
168875
+ delete row.id;
168876
+ await knex('message_triggers').where('id', id).update(row);
168877
+ return updated;
168878
+ }
168879
+ async deleteTrigger(id) {
168880
+ const knex = this.getKnex();
168881
+ const deleted = await knex('message_triggers').where('id', id).del();
168882
+ if (deleted > 0) {
168883
+ logger_1.logger.info(`[KnexStore] Deleted trigger ${id}`);
168884
+ return true;
168885
+ }
168886
+ return false;
168887
+ }
168888
+ async getTriggersByCreator(creatorId) {
168889
+ const knex = this.getKnex();
168890
+ const rows = await knex('message_triggers').where('creator_id', creatorId);
168891
+ return rows.map((r) => fromTriggerRow(r));
168892
+ }
168893
+ async getActiveTriggers() {
168894
+ const knex = this.getKnex();
168895
+ const rows = await knex('message_triggers')
168896
+ .where('status', 'active')
168897
+ .where('enabled', this.driver === 'mssql' ? 1 : true);
168898
+ return rows.map((r) => fromTriggerRow(r));
168899
+ }
168900
+ }
168901
+ exports.KnexStoreBackend = KnexStoreBackend;
168902
+
168903
+
167100
168904
  /***/ }),
167101
168905
 
167102
168906
  /***/ 83864:
@@ -178102,6 +179906,35 @@ class OutputFormatters {
178102
179906
  exports.OutputFormatters = OutputFormatters;
178103
179907
 
178104
179908
 
179909
+ /***/ }),
179910
+
179911
+ /***/ 93866:
179912
+ /***/ ((__unused_webpack_module, exports) => {
179913
+
179914
+ "use strict";
179915
+
179916
+ Object.defineProperty(exports, "__esModule", ({ value: true }));
179917
+ exports.DefaultPolicyEngine = void 0;
179918
+ /**
179919
+ * Default (no-op) policy engine — always allows everything.
179920
+ * Used when no enterprise license is present or policy is disabled.
179921
+ */
179922
+ class DefaultPolicyEngine {
179923
+ async initialize(_config) { }
179924
+ async evaluateCheckExecution(_checkId, _checkConfig) {
179925
+ return { allowed: true };
179926
+ }
179927
+ async evaluateToolInvocation(_serverName, _methodName, _transport) {
179928
+ return { allowed: true };
179929
+ }
179930
+ async evaluateCapabilities(_checkId, _capabilities) {
179931
+ return { allowed: true };
179932
+ }
179933
+ async shutdown() { }
179934
+ }
179935
+ exports.DefaultPolicyEngine = DefaultPolicyEngine;
179936
+
179937
+
178105
179938
  /***/ }),
178106
179939
 
178107
179940
  /***/ 96611:
@@ -200040,7 +201873,7 @@ class StateMachineExecutionEngine {
200040
201873
  try {
200041
201874
  logger_1.logger.debug(`[PolicyEngine] Loading enterprise policy engine (engine=${configWithTagFilter.policy.engine})`);
200042
201875
  // @ts-ignore — enterprise/ may not exist in OSS builds (caught at runtime)
200043
- const { loadEnterprisePolicyEngine } = await Promise.resolve().then(() => __importStar(__nccwpck_require__(7065)));
201876
+ const { loadEnterprisePolicyEngine } = await Promise.resolve().then(() => __importStar(__nccwpck_require__(87068)));
200044
201877
  context.policyEngine = await loadEnterprisePolicyEngine(configWithTagFilter.policy);
200045
201878
  logger_1.logger.debug(`[PolicyEngine] Initialized: ${context.policyEngine?.constructor?.name || 'unknown'}`);
200046
201879
  }
@@ -210314,7 +212147,7 @@ async function initTelemetry(opts = {}) {
210314
212147
  const path = __nccwpck_require__(16928);
210315
212148
  const outDir = opts.file?.dir ||
210316
212149
  process.env.VISOR_TRACE_DIR ||
210317
- __nccwpck_require__.ab + "traces";
212150
+ path.join(process.cwd(), 'output', 'traces');
210318
212151
  fs.mkdirSync(outDir, { recursive: true });
210319
212152
  const ts = new Date().toISOString().replace(/[:.]/g, '-');
210320
212153
  process.env.VISOR_FALLBACK_TRACE_FILE = path.join(outDir, `run-${ts}.ndjson`);
@@ -210519,7 +212352,7 @@ async function shutdownTelemetry() {
210519
212352
  if (process.env.VISOR_TRACE_REPORT === 'true') {
210520
212353
  const fs = __nccwpck_require__(79896);
210521
212354
  const path = __nccwpck_require__(16928);
210522
- const outDir = process.env.VISOR_TRACE_DIR || __nccwpck_require__.ab + "traces";
212355
+ const outDir = process.env.VISOR_TRACE_DIR || path.join(process.cwd(), 'output', 'traces');
210523
212356
  if (!fs.existsSync(outDir))
210524
212357
  fs.mkdirSync(outDir, { recursive: true });
210525
212358
  const ts = new Date().toISOString().replace(/[:.]/g, '-');
@@ -211018,7 +212851,7 @@ function __getOrCreateNdjsonPath() {
211018
212851
  fs.mkdirSync(dir, { recursive: true });
211019
212852
  return __ndjsonPath;
211020
212853
  }
211021
- const outDir = process.env.VISOR_TRACE_DIR || __nccwpck_require__.ab + "traces";
212854
+ const outDir = process.env.VISOR_TRACE_DIR || path.join(process.cwd(), 'output', 'traces');
211022
212855
  if (!fs.existsSync(outDir))
211023
212856
  fs.mkdirSync(outDir, { recursive: true });
211024
212857
  if (!__ndjsonPath) {
@@ -224978,22 +226811,6 @@ class WorkflowRegistry {
224978
226811
  exports.WorkflowRegistry = WorkflowRegistry;
224979
226812
 
224980
226813
 
224981
- /***/ }),
224982
-
224983
- /***/ 7065:
224984
- /***/ ((module) => {
224985
-
224986
- module.exports = eval("require")("./enterprise/loader");
224987
-
224988
-
224989
- /***/ }),
224990
-
224991
- /***/ 71370:
224992
- /***/ ((module) => {
224993
-
224994
- module.exports = eval("require")("./enterprise/policy/policy-input-builder");
224995
-
224996
-
224997
226814
  /***/ }),
224998
226815
 
224999
226816
  /***/ 18327:
@@ -260667,22 +262484,16 @@ var init_JsonShapeDeserializer = __esm({
260667
262484
  if (Array.isArray(value) && ns.isListSchema()) {
260668
262485
  const listMember = ns.getValueSchema();
260669
262486
  const out = [];
260670
- const sparse = !!ns.getMergedTraits().sparse;
260671
262487
  for (const item of value) {
260672
- if (sparse || item != null) {
260673
- out.push(this._read(listMember, item));
260674
- }
262488
+ out.push(this._read(listMember, item));
260675
262489
  }
260676
262490
  return out;
260677
262491
  }
260678
262492
  if (ns.isMapSchema()) {
260679
262493
  const mapMember = ns.getValueSchema();
260680
262494
  const out = {};
260681
- const sparse = !!ns.getMergedTraits().sparse;
260682
262495
  for (const [_k, _v] of Object.entries(value)) {
260683
- if (sparse || _v != null) {
260684
- out[_k] = this._read(mapMember, _v);
260685
- }
262496
+ out[_k] = this._read(mapMember, _v);
260686
262497
  }
260687
262498
  return out;
260688
262499
  }
@@ -262331,18 +264142,18 @@ var init_XmlShapeDeserializer = __esm({
262331
264142
  return value;
262332
264143
  }
262333
264144
  if (typeof value === "object") {
262334
- const sparse = !!traits.sparse;
262335
264145
  const flat = !!traits.xmlFlattened;
262336
264146
  if (ns.isListSchema()) {
262337
264147
  const listValue = ns.getValueSchema();
262338
264148
  const buffer2 = [];
262339
264149
  const sourceKey = listValue.getMergedTraits().xmlName ?? "member";
262340
264150
  const source = flat ? value : (value[0] ?? value)[sourceKey];
264151
+ if (source == null) {
264152
+ return buffer2;
264153
+ }
262341
264154
  const sourceArray = Array.isArray(source) ? source : [source];
262342
264155
  for (const v5 of sourceArray) {
262343
- if (v5 != null || sparse) {
262344
- buffer2.push(this.readSchema(listValue, v5));
262345
- }
264156
+ buffer2.push(this.readSchema(listValue, v5));
262346
264157
  }
262347
264158
  return buffer2;
262348
264159
  }
@@ -262361,9 +264172,7 @@ var init_XmlShapeDeserializer = __esm({
262361
264172
  for (const entry of entries) {
262362
264173
  const key = entry[keyProperty];
262363
264174
  const value2 = entry[valueProperty];
262364
- if (value2 != null || sparse) {
262365
- buffer[key] = this.readSchema(memberNs, value2);
262366
- }
264175
+ buffer[key] = this.readSchema(memberNs, value2);
262367
264176
  }
262368
264177
  return buffer;
262369
264178
  }
@@ -266705,7 +268514,7 @@ var require_package2 = __commonJS({
266705
268514
  module2.exports = {
266706
268515
  name: "@aws-sdk/client-bedrock-runtime",
266707
268516
  description: "AWS SDK for JavaScript Bedrock Runtime Client for Node.js, Browser and React Native",
266708
- version: "3.1001.0",
268517
+ version: "3.1002.0",
266709
268518
  scripts: {
266710
268519
  build: "concurrently 'yarn:build:types' 'yarn:build:es' && yarn build:cjs",
266711
268520
  "build:cjs": "node ../../scripts/compilation/inline client-bedrock-runtime",
@@ -266729,21 +268538,21 @@ var require_package2 = __commonJS({
266729
268538
  dependencies: {
266730
268539
  "@aws-crypto/sha256-browser": "5.2.0",
266731
268540
  "@aws-crypto/sha256-js": "5.2.0",
266732
- "@aws-sdk/core": "^3.973.16",
266733
- "@aws-sdk/credential-provider-node": "^3.972.15",
268541
+ "@aws-sdk/core": "^3.973.17",
268542
+ "@aws-sdk/credential-provider-node": "^3.972.16",
266734
268543
  "@aws-sdk/eventstream-handler-node": "^3.972.9",
266735
268544
  "@aws-sdk/middleware-eventstream": "^3.972.6",
266736
268545
  "@aws-sdk/middleware-host-header": "^3.972.6",
266737
268546
  "@aws-sdk/middleware-logger": "^3.972.6",
266738
268547
  "@aws-sdk/middleware-recursion-detection": "^3.972.6",
266739
- "@aws-sdk/middleware-user-agent": "^3.972.16",
268548
+ "@aws-sdk/middleware-user-agent": "^3.972.17",
266740
268549
  "@aws-sdk/middleware-websocket": "^3.972.11",
266741
268550
  "@aws-sdk/region-config-resolver": "^3.972.6",
266742
- "@aws-sdk/token-providers": "3.1001.0",
268551
+ "@aws-sdk/token-providers": "3.1002.0",
266743
268552
  "@aws-sdk/types": "^3.973.4",
266744
268553
  "@aws-sdk/util-endpoints": "^3.996.3",
266745
268554
  "@aws-sdk/util-user-agent-browser": "^3.972.6",
266746
- "@aws-sdk/util-user-agent-node": "^3.973.1",
268555
+ "@aws-sdk/util-user-agent-node": "^3.973.2",
266747
268556
  "@smithy/config-resolver": "^4.4.9",
266748
268557
  "@smithy/core": "^3.23.7",
266749
268558
  "@smithy/eventstream-serde-browser": "^4.2.10",
@@ -267492,7 +269301,7 @@ var init_package = __esm({
267492
269301
  "node_modules/@aws-sdk/nested-clients/package.json"() {
267493
269302
  package_default = {
267494
269303
  name: "@aws-sdk/nested-clients",
267495
- version: "3.996.4",
269304
+ version: "3.996.5",
267496
269305
  description: "Nested clients for AWS SDK packages.",
267497
269306
  main: "./dist-cjs/index.js",
267498
269307
  module: "./dist-es/index.js",
@@ -267521,16 +269330,16 @@ var init_package = __esm({
267521
269330
  dependencies: {
267522
269331
  "@aws-crypto/sha256-browser": "5.2.0",
267523
269332
  "@aws-crypto/sha256-js": "5.2.0",
267524
- "@aws-sdk/core": "^3.973.16",
269333
+ "@aws-sdk/core": "^3.973.17",
267525
269334
  "@aws-sdk/middleware-host-header": "^3.972.6",
267526
269335
  "@aws-sdk/middleware-logger": "^3.972.6",
267527
269336
  "@aws-sdk/middleware-recursion-detection": "^3.972.6",
267528
- "@aws-sdk/middleware-user-agent": "^3.972.16",
269337
+ "@aws-sdk/middleware-user-agent": "^3.972.17",
267529
269338
  "@aws-sdk/region-config-resolver": "^3.972.6",
267530
269339
  "@aws-sdk/types": "^3.973.4",
267531
269340
  "@aws-sdk/util-endpoints": "^3.996.3",
267532
269341
  "@aws-sdk/util-user-agent-browser": "^3.972.6",
267533
- "@aws-sdk/util-user-agent-node": "^3.973.1",
269342
+ "@aws-sdk/util-user-agent-node": "^3.973.2",
267534
269343
  "@smithy/config-resolver": "^4.4.9",
267535
269344
  "@smithy/core": "^3.23.7",
267536
269345
  "@smithy/fetch-http-handler": "^5.3.12",
@@ -283179,20 +284988,15 @@ var init_zod = __esm({
283179
284988
 
283180
284989
  // src/agent/tasks/taskTool.js
283181
284990
  function createTaskCompletionBlockedMessage(taskSummary) {
283182
- return `<task_completion_blocked>
283183
- You cannot complete yet. The following tasks are still unresolved:
284991
+ return `You cannot complete yet. The following tasks are still unresolved:
283184
284992
 
283185
284993
  ${taskSummary}
283186
284994
 
283187
- Required action:
283188
- 1. For each "pending" or "in_progress" task, either:
283189
- - Complete the work and mark it: <task><action>complete</action><id>task-X</id></task>
283190
- - Or cancel if no longer needed: <task><action>update</action><id>task-X</id><status>cancelled</status></task>
283191
-
283192
- 2. After ALL tasks are resolved (completed or cancelled), call attempt_completion again.
284995
+ For each pending/in_progress task, either:
284996
+ - Complete it: call task tool with action="complete", id="task-X"
284997
+ - Cancel it: call task tool with action="update", id="task-X", status="cancelled"
283193
284998
 
283194
- Use <task><action>list</action></task> to review current status.
283195
- </task_completion_blocked>`;
284999
+ After all tasks are resolved, call attempt_completion again.`;
283196
285000
  }
283197
285001
  function createTaskTool(options = {}) {
283198
285002
  const { taskManager, tracer, debug = false } = options;
@@ -283415,145 +285219,46 @@ var init_taskTool = __esm({
283415
285219
  dependencies: external_exports.array(external_exports.string()).optional(),
283416
285220
  after: external_exports.string().optional()
283417
285221
  });
283418
- taskSystemPrompt = `[Task Management System]
283419
-
283420
- You have access to a task tracking tool to organize your work on complex requests.
283421
-
283422
- ## When to Create Tasks
283423
-
283424
- CREATE TASKS when the request has **multiple distinct deliverables or goals**:
283425
- - "Fix bug A AND add feature B" \u2192 Two separate tasks
283426
- - "Investigate auth, payments, AND notifications" \u2192 Three independent areas
283427
- - "Implement X, then add tests, then update docs" \u2192 Sequential phases with different outputs
283428
- - User explicitly asks for a plan or task breakdown
283429
-
283430
- SKIP TASKS for single-goal requests, even if they require multiple searches:
283431
- - "How does ranking work?" \u2192 Just investigate and answer (one goal)
283432
- - "What does function X do?" \u2192 Just look it up (one goal)
283433
- - "Explain the authentication flow" \u2192 Just trace and explain (one goal)
283434
- - "Find where errors are logged" \u2192 Just search and report (one goal)
283435
-
283436
- **Key insight**: Multiple *internal steps* (search, read, analyze) are NOT the same as multiple *goals*.
283437
- A single investigation with many steps is still ONE task, not many.
283438
-
283439
- ## Task Granularity
283440
-
283441
- Tasks represent LOGICAL UNITS OF WORK, not individual files or steps:
283442
- - "Fix 8 similar test files" \u2192 ONE task (same type of fix across files)
283443
- - "Update API + tests + docs" \u2192 THREE tasks (different types of work)
283444
- - "Implement feature in 5 files" \u2192 ONE task (single feature)
283445
-
283446
- **Rule of thumb**: If you're creating more than 3-4 tasks, you're probably too granular.
283447
-
283448
- **Anti-patterns to avoid**:
283449
- - One task per file \u274C
283450
- - One task per function \u274C
283451
- - One task per repository (when same type of work) \u274C
283452
-
283453
- **Good patterns**:
283454
- - One task per distinct deliverable \u2713
283455
- - One task per phase (implement, test, document) \u2713
283456
- - One task per different type of work \u2713
283457
-
283458
- MODIFY TASKS when (during execution):
283459
- - You discover the problem is more complex than expected \u2192 Add new tasks
283460
- - A single task covers too much scope \u2192 Split into smaller tasks
283461
- - You find related work that needs attention \u2192 Add dependent tasks
283462
- - A task becomes irrelevant based on findings \u2192 Cancel it
283463
- - Task priorities change based on discoveries \u2192 Update priority
283464
- - You learn new context \u2192 Update task description
283465
-
283466
- ## Task Workflow
283467
-
283468
- **STEP 1 - Plan (at start):**
283469
- Analyze the request and create tasks for each logical step:
283470
-
283471
- <task>
283472
- <action>create</action>
283473
- <tasks>[
283474
- {"title": "Search for authentication module", "priority": "high"},
283475
- {"title": "Analyze login flow implementation", "dependencies": ["task-1"]},
283476
- {"title": "Find session management code", "dependencies": ["task-1"]},
283477
- {"title": "Summarize authentication architecture", "dependencies": ["task-2", "task-3"]}
283478
- ]</tasks>
283479
- </task>
283480
-
283481
- **STEP 2 - Execute (during work):**
283482
- Update task status as you work:
283483
-
283484
- <task>
283485
- <action>update</action>
283486
- <id>task-1</id>
283487
- <status>in_progress</status>
283488
- </task>
283489
-
283490
- ... do the work (search, extract, etc.) ...
283491
-
283492
- <task>
283493
- <action>complete</action>
283494
- <id>task-1</id>
283495
- </task>
283496
-
283497
- **STEP 2b - Adapt (when you discover new work):**
283498
- As you work, you may discover that:
283499
- - A task is more complex than expected \u2192 Split it into subtasks
283500
- - New areas need investigation \u2192 Add new tasks
283501
- - Some tasks are no longer needed \u2192 Cancel them
283502
- - Task order should change \u2192 Update dependencies
283503
-
283504
- *Adding a new task when you discover more work:*
283505
- <task>
283506
- <action>create</action>
283507
- <title>Investigate caching layer</title>
283508
- <description>Found references to Redis caching in auth module</description>
283509
- </task>
283510
-
283511
- *Inserting a task after a specific task (to maintain logical order):*
283512
- <task>
283513
- <action>create</action>
283514
- <title>Check rate limiting</title>
283515
- <after>task-2</after>
283516
- </task>
283517
-
283518
- *Cancelling and splitting a complex task:*
283519
- <task>
283520
- <action>update</action>
283521
- <id>task-3</id>
283522
- <status>cancelled</status>
283523
- </task>
283524
- <task>
283525
- <action>create</action>
283526
- <tasks>[
283527
- {"title": "Review JWT token generation", "priority": "high"},
283528
- {"title": "Review token refresh logic"}
283529
- ]</tasks>
283530
- </task>
283531
-
283532
- **STEP 3 - Finish (before completion):**
283533
- Before calling attempt_completion, ensure ALL tasks are either:
283534
- - \`completed\` - you finished the work
283535
- - \`cancelled\` - no longer needed
283536
-
283537
- If you created tasks, you MUST resolve them all before completing.
283538
-
283539
- ## Key Rules
283540
-
283541
- 1. **Dependencies are enforced**: A task cannot start until its dependencies are completed
283542
- 2. **Circular dependencies are rejected**: task-1 \u2192 task-2 \u2192 task-1 is invalid
283543
- 3. **Completion is blocked**: attempt_completion will fail if tasks remain unresolved
283544
- 4. **List to review**: Use <task><action>list</action></task> to see current task status
283545
- 5. **Tasks are living documents**: Add, split, or cancel tasks as you learn more about the problem
285222
+ taskSystemPrompt = `[Task Management]
285223
+
285224
+ Use the task tool to track progress on complex requests with multiple distinct goals.
285225
+
285226
+ ## When to Use Tasks
285227
+
285228
+ CREATE tasks when the request has **multiple separate deliverables**:
285229
+ - "Fix bug A AND add feature B" \u2192 two tasks
285230
+ - "Investigate auth, payments, AND notifications" \u2192 three tasks
285231
+ - "Implement X, then add tests, then update docs" \u2192 three sequential tasks
285232
+
285233
+ SKIP tasks for single-goal requests, even complex ones:
285234
+ - "How does ranking work?" \u2014 just investigate and answer
285235
+ - "Explain the authentication flow" \u2014 just trace and explain
285236
+ Multiple internal steps (search, read, analyze) for one goal \u2260 multiple tasks.
285237
+
285238
+ ## Granularity
285239
+
285240
+ Tasks = logical units of work, not files or steps.
285241
+ - "Fix 8 similar test files" \u2192 ONE task (same fix repeated)
285242
+ - "Update API + tests + docs" \u2192 THREE tasks (different work types)
285243
+ - Max 3\u20134 tasks. More means you're too granular.
285244
+
285245
+ ## Workflow
285246
+
285247
+ 1. **Plan**: Call task tool with action="create" and a tasks array up front
285248
+ 2. **Execute**: Update status to "in_progress" / "completed" as you work. Add, split, or cancel tasks as you learn more.
285249
+ 3. **Finish**: All tasks must be "completed" or "cancelled" before calling attempt_completion.
285250
+
285251
+ ## Rules
285252
+
285253
+ - Dependencies are enforced: a task cannot start until its dependencies are completed
285254
+ - Circular dependencies are rejected
285255
+ - attempt_completion is blocked while tasks remain unresolved
283546
285256
  `;
283547
- taskGuidancePrompt = `<task_guidance>
283548
- Does this request have MULTIPLE DISTINCT GOALS?
285257
+ taskGuidancePrompt = `Does this request have MULTIPLE DISTINCT GOALS?
283549
285258
  - "Do A AND B AND C" (multiple goals) \u2192 Create tasks for each goal
283550
285259
  - "Investigate/explain/find X" (single goal) \u2192 Skip tasks, just answer directly
283551
-
283552
- Multiple internal steps (search, read, analyze) for ONE goal = NO tasks needed.
283553
- Only create tasks when there are separate deliverables the user is asking for.
283554
-
283555
- If creating tasks, use the task tool with action="create" first.
283556
- </task_guidance>`;
285260
+ Multiple internal steps for ONE goal = NO tasks needed.
285261
+ If creating tasks, use the task tool with action="create" first.`;
283557
285262
  }
283558
285263
  });
283559
285264
 
@@ -330052,9 +331757,10 @@ If the solution is clear, you can jump to implementation right away. If not, ask
330052
331757
  - After every significant change, verify the project still builds and passes linting. Do not wait until the end to discover breakage.
330053
331758
 
330054
331759
  # After Implementation
330055
- - Always run the project's tests before considering the task complete. If tests fail, fix them.
330056
- - Run lint and typecheck commands if known for the project.
330057
- - If a build, lint, or test fails, fix the issue before finishing.
331760
+ - Verify the project builds successfully. If it doesn't, fix the build before moving on.
331761
+ - Run lint and typecheck commands if known for the project. Fix any new warnings or errors you introduced.
331762
+ - Add tests for any new or changed functionality. Tests must cover the main path and important edge cases.
331763
+ - Run the project's full test suite. If any tests fail (including pre-existing ones you may have broken), fix them before finishing.
330058
331764
  - When the task is done, respond to the user with a concise summary of what was implemented, what files were changed, and any relevant details. Include links (e.g. pull request URL) so the user has everything they need.
330059
331765
 
330060
331766
  # GitHub Integration
@@ -339267,6 +340973,19 @@ function isContextLimitError(error2) {
339267
340973
  }
339268
340974
  return false;
339269
340975
  }
340976
+ function messageContainsCompletion(msg) {
340977
+ if (Array.isArray(msg.toolInvocations)) {
340978
+ if (msg.toolInvocations.some((t5) => t5.toolName === "attempt_completion")) return true;
340979
+ }
340980
+ if (Array.isArray(msg.tool_calls)) {
340981
+ if (msg.tool_calls.some((t5) => t5.function?.name === "attempt_completion")) return true;
340982
+ }
340983
+ if (Array.isArray(msg.content)) {
340984
+ if (msg.content.some((p5) => p5.type === "tool-call" && p5.toolName === "attempt_completion")) return true;
340985
+ }
340986
+ const text = typeof msg.content === "string" ? msg.content : "";
340987
+ return text.includes("attempt_completion");
340988
+ }
339270
340989
  function identifyMessageSegments(messages) {
339271
340990
  const segments = [];
339272
340991
  let currentSegment = null;
@@ -339275,27 +340994,23 @@ function identifyMessageSegments(messages) {
339275
340994
  if (msg.role === "system") {
339276
340995
  continue;
339277
340996
  }
340997
+ if (msg.role === "tool" && currentSegment) {
340998
+ currentSegment.monologueIndices.push(i5);
340999
+ continue;
341000
+ }
339278
341001
  if (msg.role === "user") {
339279
- const content = typeof msg.content === "string" ? msg.content : "";
339280
- const isToolResult = content.includes("<tool_result>");
339281
- if (isToolResult && currentSegment) {
339282
- currentSegment.finalIndex = i5;
341002
+ if (currentSegment) {
339283
341003
  segments.push(currentSegment);
339284
- currentSegment = null;
339285
- } else {
339286
- if (currentSegment) {
339287
- segments.push(currentSegment);
339288
- }
339289
- currentSegment = {
339290
- userIndex: i5,
339291
- monologueIndices: [],
339292
- finalIndex: null
339293
- };
339294
341004
  }
341005
+ currentSegment = {
341006
+ userIndex: i5,
341007
+ monologueIndices: [],
341008
+ finalIndex: null
341009
+ };
339295
341010
  }
339296
341011
  if (msg.role === "assistant" && currentSegment) {
339297
- const content = typeof msg.content === "string" ? msg.content : "";
339298
- if (content.includes("<attempt_completion>") || content.includes("attempt_completion")) {
341012
+ const hasCompletion = messageContainsCompletion(msg);
341013
+ if (hasCompletion) {
339299
341014
  currentSegment.monologueIndices.push(i5);
339300
341015
  currentSegment.finalIndex = i5;
339301
341016
  segments.push(currentSegment);
@@ -357402,8 +359117,7 @@ Instructions:
357402
359117
  - Format as a structured list if multiple items found
357403
359118
  - If nothing relevant is found in this chunk, respond with "No relevant items found in this chunk."
357404
359119
  - Do NOT summarize the code - extract the specific information requested
357405
- - IMPORTANT: When completing, always use the FULL format: <attempt_completion><result>YOUR ANSWER HERE</result></attempt_completion>
357406
- - Do NOT use the shorthand <attempt_complete></attempt_complete> format`;
359120
+ - When done, use the attempt_completion tool with your answer as the result.`;
357407
359121
  try {
357408
359122
  const result = await delegate({
357409
359123
  task,
@@ -357468,7 +359182,7 @@ async function aggregateResults(chunkResults2, aggregation, extractionPrompt, op
357468
359182
  ${stripResultTags(r5.result)}`).join("\n\n");
357469
359183
  const completionNote = `
357470
359184
 
357471
- IMPORTANT: When completing, always use the FULL format: <attempt_completion><result>YOUR ANSWER HERE</result></attempt_completion>`;
359185
+ When done, use the attempt_completion tool with your answer as the result.`;
357472
359186
  const aggregationPrompts = {
357473
359187
  summarize: `Synthesize these analyses into a comprehensive summary. Combine related findings, remove redundancy, and present a coherent overview.
357474
359188
 
@@ -357626,7 +359340,7 @@ Your answer should:
357626
359340
 
357627
359341
  Format your response as a well-structured document that fully answers: "${question}"
357628
359342
 
357629
- IMPORTANT: When completing, use the FULL format: <attempt_completion><result>YOUR ANSWER HERE</result></attempt_completion>`;
359343
+ When done, use the attempt_completion tool with your answer as the result.`;
357630
359344
  try {
357631
359345
  const result = await delegate({
357632
359346
  task: synthesisTask,
@@ -357957,7 +359671,7 @@ function buildSearchDelegateTask({ searchQuery, searchPath, exact, language, all
357957
359671
  "",
357958
359672
  "Strategy for complex queries:",
357959
359673
  "1. Analyze the query - identify key concepts, entities, and relationships",
357960
- '2. Run focused searches for each concept (e.g., "error handling" + "authentication" separately)',
359674
+ '2. Run focused searches for each independent concept (e.g., for "how do payments work and how are emails sent", search "payments" and "emails" separately since they are unrelated)',
357961
359675
  "3. Use extract to verify relevance of promising results",
357962
359676
  "4. Combine all relevant targets in your final response",
357963
359677
  "",
@@ -358696,9 +360410,7 @@ Example: <edit><file_path>${file_path}</file_path><symbol>${allMatches[0].qualif
358696
360410
  if (fileTracker) {
358697
360411
  const check = fileTracker.checkSymbolContent(resolvedPath2, symbol15, symbolInfo.code);
358698
360412
  if (!check.ok && check.reason === "stale") {
358699
- return `Error editing ${file_path}: Symbol "${symbol15}" has changed since you last read it. Use extract to re-read the current content, then retry.
358700
-
358701
- Example: <extract><targets>${file_path}#${symbol15}</targets></extract>`;
360413
+ return `Error editing ${file_path}: Symbol "${symbol15}" has changed since you last read it. Use the extract tool with targets="${file_path}#${symbol15}" to re-read the current content, then retry.`;
358702
360414
  }
358703
360415
  }
358704
360416
  const content = await import_fs12.promises.readFile(resolvedPath2, "utf-8");
@@ -358936,9 +360648,7 @@ Parameters:
358936
360648
  }
358937
360649
  if (options.fileTracker && !options.fileTracker.isFileSeen(resolvedPath2)) {
358938
360650
  const displayPath = toRelativePath(resolvedPath2, workspaceRoot);
358939
- return `Error editing ${displayPath}: This file has not been read yet in this session. Use 'extract' to read the file first, then retry your edit. This ensures you are working with the current file content.
358940
-
358941
- Example: <extract><targets>${displayPath}</targets></extract>`;
360651
+ return `Error editing ${displayPath}: This file has not been read yet in this session. Use the extract tool with targets="${displayPath}" to read the file first, then retry your edit.`;
358942
360652
  }
358943
360653
  if (symbol15 !== void 0 && symbol15 !== null) {
358944
360654
  return await handleSymbolEdit({ resolvedPath: resolvedPath2, file_path, symbol: symbol15, new_string, position, debug, cwd, fileTracker: options.fileTracker });
@@ -358958,7 +360668,7 @@ Example: <extract><targets>${displayPath}</targets></extract>`;
358958
360668
  const displayPath = toRelativePath(resolvedPath2, workspaceRoot);
358959
360669
  return `Error editing ${displayPath}: ${staleCheck.message}
358960
360670
 
358961
- Example: <extract><targets>${displayPath}</targets></extract>`;
360671
+ Use the extract tool with targets="${displayPath}" to re-read the file, then retry.`;
358962
360672
  }
358963
360673
  }
358964
360674
  const content = await import_fs12.promises.readFile(resolvedPath2, "utf-8");
@@ -395622,7 +397332,7 @@ module.exports = /*#__PURE__*/JSON.parse('{"100":"Continue","101":"Switching Pro
395622
397332
  /***/ ((module) => {
395623
397333
 
395624
397334
  "use strict";
395625
- module.exports = /*#__PURE__*/JSON.parse('{"name":"@probelabs/visor","version":"0.1.158","main":"dist/index.js","bin":{"visor":"./dist/index.js"},"exports":{".":{"require":"./dist/index.js","import":"./dist/index.js"},"./sdk":{"types":"./dist/sdk/sdk.d.ts","import":"./dist/sdk/sdk.mjs","require":"./dist/sdk/sdk.js"},"./cli":{"require":"./dist/index.js"}},"files":["dist/","defaults/","action.yml","README.md","LICENSE"],"publishConfig":{"access":"public","registry":"https://registry.npmjs.org/"},"scripts":{"build:cli":"ncc build src/index.ts -o dist && cp -r defaults dist/ && cp -r output dist/ && cp -r docs dist/ && cp -r examples dist/ && cp -r src/debug-visualizer/ui dist/debug-visualizer/ && node scripts/inject-version.js && echo \'#!/usr/bin/env node\' | cat - dist/index.js > temp && mv temp dist/index.js && chmod +x dist/index.js","build:sdk":"tsup src/sdk.ts --dts --sourcemap --format esm,cjs --out-dir dist/sdk","build":"./scripts/build-oss.sh","build:ee":"npm run build:cli && npm run build:sdk","test":"jest && npm run test:yaml","test:unit":"jest","prepublishOnly":"npm run build","test:watch":"jest --watch","test:coverage":"jest --coverage","test:ee":"jest --testPathPatterns=\'tests/ee\' --testPathIgnorePatterns=\'/node_modules/\' --no-coverage","test:manual:bash":"RUN_MANUAL_TESTS=true jest tests/manual/bash-config-manual.test.ts","lint":"eslint src tests --ext .ts","lint:fix":"eslint src tests --ext .ts --fix","format":"prettier --write src tests","format:check":"prettier --check src tests","clean":"","clean:traces":"node scripts/clean-traces.js","prebuild":"npm run clean && node scripts/generate-config-schema.js","pretest":"npm run clean:traces && node scripts/generate-config-schema.js && npm run build:cli","pretest:unit":"npm run clean:traces && node scripts/generate-config-schema.js && npm run build:cli","test:with-build":"npm run build:cli && jest","test:yaml":"node dist/index.js test --progress compact","test:yaml:parallel":"node dist/index.js test --progress compact --max-parallel 4","prepare":"husky","pre-commit":"lint-staged","deploy:site":"cd site && npx wrangler pages deploy . --project-name=visor-site --commit-dirty=true","deploy:worker":"npx wrangler deploy","deploy":"npm run deploy:site && npm run deploy:worker","publish:ee":"./scripts/publish-ee.sh","release":"./scripts/release.sh","release:patch":"./scripts/release.sh patch","release:minor":"./scripts/release.sh minor","release:major":"./scripts/release.sh major","release:prerelease":"./scripts/release.sh prerelease","docs:validate":"node scripts/validate-readme-links.js","workshop:setup":"npm install -D reveal-md@6.1.2","workshop:serve":"cd workshop && reveal-md slides.md -w","workshop:export":"reveal-md workshop/slides.md --static workshop/build","workshop:pdf":"reveal-md workshop/slides.md --print workshop/Visor-Workshop.pdf --print-size letter","workshop:pdf:ci":"reveal-md workshop/slides.md --print workshop/Visor-Workshop.pdf --print-size letter --puppeteer-launch-args=\\"--no-sandbox --disable-dev-shm-usage\\"","workshop:pdf:a4":"reveal-md workshop/slides.md --print workshop/Visor-Workshop-A4.pdf --print-size A4","workshop:build":"npm run workshop:export && npm run workshop:pdf","simulate:issue":"TS_NODE_TRANSPILE_ONLY=1 ts-node scripts/simulate-gh-run.ts --event issues --action opened --debug","simulate:comment":"TS_NODE_TRANSPILE_ONLY=1 ts-node scripts/simulate-gh-run.ts --event issue_comment --action created --debug"},"keywords":["code-review","ai","github-action","cli","pr-review","visor"],"author":"Probe Labs","license":"MIT","description":"AI workflow engine for code review, assistants, and automation — orchestrate checks, MCP tools, and AI providers with YAML-driven pipelines","repository":{"type":"git","url":"git+https://github.com/probelabs/visor.git"},"bugs":{"url":"https://github.com/probelabs/visor/issues"},"homepage":"https://github.com/probelabs/visor#readme","dependencies":{"@actions/core":"^1.11.1","@apidevtools/swagger-parser":"^12.1.0","@modelcontextprotocol/sdk":"^1.25.3","@nyariv/sandboxjs":"github:probelabs/SandboxJS#f1c13b8eee98734a8ea024061eada4aa9a9ff2e9","@octokit/action":"^8.0.2","@octokit/auth-app":"^8.1.0","@octokit/core":"^7.0.3","@octokit/rest":"^22.0.0","@opentelemetry/api":"^1.9.0","@opentelemetry/core":"^1.30.1","@opentelemetry/exporter-trace-otlp-grpc":"^0.203.0","@opentelemetry/exporter-trace-otlp-http":"^0.203.0","@opentelemetry/instrumentation":"^0.203.0","@opentelemetry/resources":"^1.30.1","@opentelemetry/sdk-metrics":"^1.30.1","@opentelemetry/sdk-node":"^0.203.0","@opentelemetry/sdk-trace-base":"^1.30.1","@opentelemetry/semantic-conventions":"^1.30.1","@probelabs/probe":"^0.6.0-rc271","@types/commander":"^2.12.0","@types/uuid":"^10.0.0","acorn":"^8.16.0","acorn-walk":"^8.3.5","ajv":"^8.17.1","ajv-formats":"^3.0.1","better-sqlite3":"^11.0.0","blessed":"^0.1.81","cli-table3":"^0.6.5","commander":"^14.0.0","deepmerge":"^4.3.1","dotenv":"^17.2.3","ignore":"^7.0.5","js-yaml":"^4.1.0","jsonpath-plus":"^10.4.0","liquidjs":"^10.21.1","minimatch":"^10.2.2","node-cron":"^3.0.3","open":"^9.1.0","simple-git":"^3.28.0","uuid":"^11.1.0","ws":"^8.18.3"},"optionalDependencies":{"@anthropic/claude-code-sdk":"npm:null@*","@open-policy-agent/opa-wasm":"^1.10.0","knex":"^3.1.0","mysql2":"^3.11.0","pg":"^8.13.0","tedious":"^19.0.0"},"devDependencies":{"@eslint/js":"^9.34.0","@kie/act-js":"^2.6.2","@kie/mock-github":"^2.0.1","@swc/core":"^1.13.2","@swc/jest":"^0.2.37","@types/better-sqlite3":"^7.6.0","@types/blessed":"^0.1.27","@types/jest":"^30.0.0","@types/js-yaml":"^4.0.9","@types/node":"^24.3.0","@types/node-cron":"^3.0.11","@types/ws":"^8.18.1","@typescript-eslint/eslint-plugin":"^8.42.0","@typescript-eslint/parser":"^8.42.0","@vercel/ncc":"^0.38.4","eslint":"^9.34.0","eslint-config-prettier":"^10.1.8","eslint-plugin-prettier":"^5.5.4","husky":"^9.1.7","jest":"^30.1.3","lint-staged":"^16.1.6","prettier":"^3.6.2","reveal-md":"^6.1.2","ts-json-schema-generator":"^1.5.1","ts-node":"^10.9.2","tsup":"^8.5.0","typescript":"^5.9.2","wrangler":"^3.0.0"},"peerDependenciesMeta":{"@anthropic/claude-code-sdk":{"optional":true}},"directories":{"test":"tests"},"lint-staged":{"src/**/*.{ts,js}":["eslint --fix","prettier --write"],"tests/**/*.{ts,js}":["eslint --fix","prettier --write"],"*.{json,md,yml,yaml}":["prettier --write"]}}');
397335
+ module.exports = /*#__PURE__*/JSON.parse('{"name":"@probelabs/visor","version":"0.1.42","main":"dist/index.js","bin":{"visor":"./dist/index.js"},"exports":{".":{"require":"./dist/index.js","import":"./dist/index.js"},"./sdk":{"types":"./dist/sdk/sdk.d.ts","import":"./dist/sdk/sdk.mjs","require":"./dist/sdk/sdk.js"},"./cli":{"require":"./dist/index.js"}},"files":["dist/","defaults/","action.yml","README.md","LICENSE"],"publishConfig":{"access":"public","registry":"https://registry.npmjs.org/"},"scripts":{"build:cli":"ncc build src/index.ts -o dist && cp -r defaults dist/ && cp -r output dist/ && cp -r docs dist/ && cp -r examples dist/ && cp -r src/debug-visualizer/ui dist/debug-visualizer/ && node scripts/inject-version.js && echo \'#!/usr/bin/env node\' | cat - dist/index.js > temp && mv temp dist/index.js && chmod +x dist/index.js","build:sdk":"tsup src/sdk.ts --dts --sourcemap --format esm,cjs --out-dir dist/sdk","build":"./scripts/build-oss.sh","build:ee":"npm run build:cli && npm run build:sdk","test":"jest && npm run test:yaml","test:unit":"jest","prepublishOnly":"npm run build","test:watch":"jest --watch","test:coverage":"jest --coverage","test:ee":"jest --testPathPatterns=\'tests/ee\' --testPathIgnorePatterns=\'/node_modules/\' --no-coverage","test:manual:bash":"RUN_MANUAL_TESTS=true jest tests/manual/bash-config-manual.test.ts","lint":"eslint src tests --ext .ts","lint:fix":"eslint src tests --ext .ts --fix","format":"prettier --write src tests","format:check":"prettier --check src tests","clean":"","clean:traces":"node scripts/clean-traces.js","prebuild":"npm run clean && node scripts/generate-config-schema.js","pretest":"npm run clean:traces && node scripts/generate-config-schema.js && npm run build:cli","pretest:unit":"npm run clean:traces && node scripts/generate-config-schema.js && npm run build:cli","test:with-build":"npm run build:cli && jest","test:yaml":"node dist/index.js test --progress compact","test:yaml:parallel":"node dist/index.js test --progress compact --max-parallel 4","prepare":"husky","pre-commit":"lint-staged","deploy:site":"cd site && npx wrangler pages deploy . --project-name=visor-site --commit-dirty=true","deploy:worker":"npx wrangler deploy","deploy":"npm run deploy:site && npm run deploy:worker","publish:ee":"./scripts/publish-ee.sh","release":"./scripts/release.sh","release:patch":"./scripts/release.sh patch","release:minor":"./scripts/release.sh minor","release:major":"./scripts/release.sh major","release:prerelease":"./scripts/release.sh prerelease","docs:validate":"node scripts/validate-readme-links.js","workshop:setup":"npm install -D reveal-md@6.1.2","workshop:serve":"cd workshop && reveal-md slides.md -w","workshop:export":"reveal-md workshop/slides.md --static workshop/build","workshop:pdf":"reveal-md workshop/slides.md --print workshop/Visor-Workshop.pdf --print-size letter","workshop:pdf:ci":"reveal-md workshop/slides.md --print workshop/Visor-Workshop.pdf --print-size letter --puppeteer-launch-args=\\"--no-sandbox --disable-dev-shm-usage\\"","workshop:pdf:a4":"reveal-md workshop/slides.md --print workshop/Visor-Workshop-A4.pdf --print-size A4","workshop:build":"npm run workshop:export && npm run workshop:pdf","simulate:issue":"TS_NODE_TRANSPILE_ONLY=1 ts-node scripts/simulate-gh-run.ts --event issues --action opened --debug","simulate:comment":"TS_NODE_TRANSPILE_ONLY=1 ts-node scripts/simulate-gh-run.ts --event issue_comment --action created --debug"},"keywords":["code-review","ai","github-action","cli","pr-review","visor"],"author":"Probe Labs","license":"MIT","description":"AI workflow engine for code review, assistants, and automation — orchestrate checks, MCP tools, and AI providers with YAML-driven pipelines","repository":{"type":"git","url":"git+https://github.com/probelabs/visor.git"},"bugs":{"url":"https://github.com/probelabs/visor/issues"},"homepage":"https://github.com/probelabs/visor#readme","dependencies":{"@actions/core":"^1.11.1","@apidevtools/swagger-parser":"^12.1.0","@modelcontextprotocol/sdk":"^1.25.3","@nyariv/sandboxjs":"github:probelabs/SandboxJS#f1c13b8eee98734a8ea024061eada4aa9a9ff2e9","@octokit/action":"^8.0.2","@octokit/auth-app":"^8.1.0","@octokit/core":"^7.0.3","@octokit/rest":"^22.0.0","@opentelemetry/api":"^1.9.0","@opentelemetry/core":"^1.30.1","@opentelemetry/exporter-trace-otlp-grpc":"^0.203.0","@opentelemetry/exporter-trace-otlp-http":"^0.203.0","@opentelemetry/instrumentation":"^0.203.0","@opentelemetry/resources":"^1.30.1","@opentelemetry/sdk-metrics":"^1.30.1","@opentelemetry/sdk-node":"^0.203.0","@opentelemetry/sdk-trace-base":"^1.30.1","@opentelemetry/semantic-conventions":"^1.30.1","@probelabs/probe":"^0.6.0-rc274","@types/commander":"^2.12.0","@types/uuid":"^10.0.0","acorn":"^8.16.0","acorn-walk":"^8.3.5","ajv":"^8.17.1","ajv-formats":"^3.0.1","better-sqlite3":"^11.0.0","blessed":"^0.1.81","cli-table3":"^0.6.5","commander":"^14.0.0","deepmerge":"^4.3.1","dotenv":"^17.2.3","ignore":"^7.0.5","js-yaml":"^4.1.0","jsonpath-plus":"^10.4.0","liquidjs":"^10.21.1","minimatch":"^10.2.2","node-cron":"^3.0.3","open":"^9.1.0","simple-git":"^3.28.0","uuid":"^11.1.0","ws":"^8.18.3"},"optionalDependencies":{"@anthropic/claude-code-sdk":"npm:null@*","@open-policy-agent/opa-wasm":"^1.10.0","knex":"^3.1.0","mysql2":"^3.11.0","pg":"^8.13.0","tedious":"^19.0.0"},"devDependencies":{"@eslint/js":"^9.34.0","@kie/act-js":"^2.6.2","@kie/mock-github":"^2.0.1","@swc/core":"^1.13.2","@swc/jest":"^0.2.37","@types/better-sqlite3":"^7.6.0","@types/blessed":"^0.1.27","@types/jest":"^30.0.0","@types/js-yaml":"^4.0.9","@types/node":"^24.3.0","@types/node-cron":"^3.0.11","@types/ws":"^8.18.1","@typescript-eslint/eslint-plugin":"^8.42.0","@typescript-eslint/parser":"^8.42.0","@vercel/ncc":"^0.38.4","eslint":"^9.34.0","eslint-config-prettier":"^10.1.8","eslint-plugin-prettier":"^5.5.4","husky":"^9.1.7","jest":"^30.1.3","lint-staged":"^16.1.6","prettier":"^3.6.2","reveal-md":"^6.1.2","ts-json-schema-generator":"^1.5.1","ts-node":"^10.9.2","tsup":"^8.5.0","typescript":"^5.9.2","wrangler":"^3.0.0"},"peerDependenciesMeta":{"@anthropic/claude-code-sdk":{"optional":true}},"directories":{"test":"tests"},"lint-staged":{"src/**/*.{ts,js}":["eslint --fix","prettier --write"],"tests/**/*.{ts,js}":["eslint --fix","prettier --write"],"*.{json,md,yml,yaml}":["prettier --write"]}}');
395626
397336
 
395627
397337
  /***/ })
395628
397338