@probelabs/visor 0.1.178 → 0.1.179-ee

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (101) hide show
  1. package/defaults/assistant.yaml +38 -16
  2. package/defaults/skills/code-explorer.yaml +8 -8
  3. package/dist/agent-protocol/tasks-cli-handler.d.ts.map +1 -1
  4. package/dist/agent-protocol/track-execution.d.ts.map +1 -1
  5. package/dist/defaults/assistant.yaml +38 -16
  6. package/dist/defaults/skills/code-explorer.yaml +8 -8
  7. package/dist/frontends/slack-frontend.d.ts +6 -0
  8. package/dist/frontends/slack-frontend.d.ts.map +1 -1
  9. package/dist/index.js +2143 -108
  10. package/dist/providers/ai-check-provider.d.ts.map +1 -1
  11. package/dist/sdk/{a2a-frontend-WYBMBBYG.mjs → a2a-frontend-KJFLIZJT.mjs} +2 -2
  12. package/dist/sdk/{check-provider-registry-3DZOXYIA.mjs → check-provider-registry-J27YX4IT.mjs} +5 -5
  13. package/dist/sdk/{check-provider-registry-T5J3H2N7.mjs → check-provider-registry-SYAHJMWJ.mjs} +5 -5
  14. package/dist/sdk/{chunk-6YGCACBF.mjs → chunk-CHARL3TY.mjs} +2 -2
  15. package/dist/sdk/{chunk-6YGCACBF.mjs.map → chunk-CHARL3TY.mjs.map} +1 -1
  16. package/dist/sdk/{chunk-B7XHSG3L.mjs → chunk-FTPLYUQ3.mjs} +163 -124
  17. package/dist/sdk/chunk-FTPLYUQ3.mjs.map +1 -0
  18. package/dist/sdk/{chunk-AK64Y6Y2.mjs → chunk-KWHLB5E3.mjs} +164 -125
  19. package/dist/sdk/chunk-KWHLB5E3.mjs.map +1 -0
  20. package/dist/sdk/{chunk-4ECMTCOM.mjs → chunk-OYHDBTKY.mjs} +2 -2
  21. package/dist/sdk/{chunk-ENSZDV3O.mjs → chunk-ZJYQMNPA.mjs} +3 -3
  22. package/dist/sdk/{failure-condition-evaluator-P3MS5DRL.mjs → failure-condition-evaluator-V2YGFRKO.mjs} +3 -3
  23. package/dist/sdk/{github-frontend-7RLEBJWG.mjs → github-frontend-4LM4NAZK.mjs} +3 -3
  24. package/dist/sdk/{host-I2TBBKD5.mjs → host-GBXJKNHL.mjs} +4 -4
  25. package/dist/sdk/{host-SE3MQHWG.mjs → host-XXPPPC76.mjs} +4 -4
  26. package/dist/sdk/knex-store-QCEW4I4R.mjs +527 -0
  27. package/dist/sdk/knex-store-QCEW4I4R.mjs.map +1 -0
  28. package/dist/sdk/loader-Q7K76ZIY.mjs +89 -0
  29. package/dist/sdk/loader-Q7K76ZIY.mjs.map +1 -0
  30. package/dist/sdk/opa-policy-engine-QCSSIMUF.mjs +655 -0
  31. package/dist/sdk/opa-policy-engine-QCSSIMUF.mjs.map +1 -0
  32. package/dist/sdk/{routing-2X6QF5IW.mjs → routing-YAYBIVPL.mjs} +4 -4
  33. package/dist/sdk/{schedule-tool-R6JJIDZ6.mjs → schedule-tool-OIVJDIDK.mjs} +5 -5
  34. package/dist/sdk/{schedule-tool-W4SQ334O.mjs → schedule-tool-WACIV77L.mjs} +5 -5
  35. package/dist/sdk/{schedule-tool-handler-AOMZV3Q3.mjs → schedule-tool-handler-ODKY57FO.mjs} +5 -5
  36. package/dist/sdk/{schedule-tool-handler-MPJFLH4J.mjs → schedule-tool-handler-SJF4ZKSB.mjs} +5 -5
  37. package/dist/sdk/sdk.js +1778 -328
  38. package/dist/sdk/sdk.js.map +1 -1
  39. package/dist/sdk/sdk.mjs +4 -4
  40. package/dist/sdk/{slack-frontend-XKSIOUXB.mjs → slack-frontend-OWD7BSWF.mjs} +22 -3
  41. package/dist/sdk/slack-frontend-OWD7BSWF.mjs.map +1 -0
  42. package/dist/sdk/{trace-helpers-4ADQ4GB3.mjs → trace-helpers-QL2B75AK.mjs} +2 -2
  43. package/dist/sdk/{track-execution-XTCZBUWX.mjs → track-execution-2Q66SXBZ.mjs} +20 -2
  44. package/dist/sdk/{track-execution-XTCZBUWX.mjs.map → track-execution-2Q66SXBZ.mjs.map} +1 -1
  45. package/dist/sdk/validator-XTZJZZJH.mjs +134 -0
  46. package/dist/sdk/validator-XTZJZZJH.mjs.map +1 -0
  47. package/dist/sdk/{workflow-check-provider-WHZP7BDF.mjs → workflow-check-provider-IXW6BMQA.mjs} +5 -5
  48. package/dist/sdk/{workflow-check-provider-WZN3B2S2.mjs → workflow-check-provider-UZQZYPOE.mjs} +5 -5
  49. package/dist/utils/workspace-manager.d.ts +2 -0
  50. package/dist/utils/workspace-manager.d.ts.map +1 -1
  51. package/package.json +2 -2
  52. package/dist/output/traces/run-2026-03-11T06-33-05-398Z.ndjson +0 -138
  53. package/dist/output/traces/run-2026-03-11T06-33-47-884Z.ndjson +0 -2296
  54. package/dist/sdk/a2a-frontend-U3PTNCLR.mjs +0 -1658
  55. package/dist/sdk/a2a-frontend-WYBMBBYG.mjs.map +0 -1
  56. package/dist/sdk/check-provider-registry-ZX76MY2L.mjs +0 -30
  57. package/dist/sdk/chunk-AK64Y6Y2.mjs.map +0 -1
  58. package/dist/sdk/chunk-ANEKFNAS.mjs +0 -45424
  59. package/dist/sdk/chunk-ANEKFNAS.mjs.map +0 -1
  60. package/dist/sdk/chunk-B7XHSG3L.mjs.map +0 -1
  61. package/dist/sdk/chunk-CDRKH5HH.mjs +0 -739
  62. package/dist/sdk/chunk-CDRKH5HH.mjs.map +0 -1
  63. package/dist/sdk/chunk-KG6PM4OL.mjs +0 -516
  64. package/dist/sdk/chunk-KG6PM4OL.mjs.map +0 -1
  65. package/dist/sdk/chunk-WZS4ARZB.mjs +0 -1502
  66. package/dist/sdk/chunk-WZS4ARZB.mjs.map +0 -1
  67. package/dist/sdk/failure-condition-evaluator-MMPKQGUA.mjs +0 -18
  68. package/dist/sdk/github-frontend-QTKOYB56.mjs +0 -1394
  69. package/dist/sdk/github-frontend-QTKOYB56.mjs.map +0 -1
  70. package/dist/sdk/routing-QHXBQS6X.mjs +0 -26
  71. package/dist/sdk/schedule-tool-MKT5FZ6J.mjs +0 -36
  72. package/dist/sdk/schedule-tool-handler-MPJFLH4J.mjs.map +0 -1
  73. package/dist/sdk/schedule-tool-handler-WY7WCFE5.mjs +0 -40
  74. package/dist/sdk/schedule-tool-handler-WY7WCFE5.mjs.map +0 -1
  75. package/dist/sdk/slack-frontend-XKSIOUXB.mjs.map +0 -1
  76. package/dist/sdk/trace-helpers-4ADQ4GB3.mjs.map +0 -1
  77. package/dist/sdk/trace-helpers-K47ZVJSU.mjs +0 -29
  78. package/dist/sdk/trace-helpers-K47ZVJSU.mjs.map +0 -1
  79. package/dist/sdk/workflow-check-provider-A3YH2UZJ.mjs +0 -30
  80. package/dist/sdk/workflow-check-provider-A3YH2UZJ.mjs.map +0 -1
  81. package/dist/sdk/workflow-check-provider-WHZP7BDF.mjs.map +0 -1
  82. package/dist/sdk/workflow-check-provider-WZN3B2S2.mjs.map +0 -1
  83. package/dist/traces/run-2026-03-11T06-33-05-398Z.ndjson +0 -138
  84. package/dist/traces/run-2026-03-11T06-33-47-884Z.ndjson +0 -2296
  85. /package/dist/sdk/{a2a-frontend-U3PTNCLR.mjs.map → a2a-frontend-KJFLIZJT.mjs.map} +0 -0
  86. /package/dist/sdk/{check-provider-registry-3DZOXYIA.mjs.map → check-provider-registry-J27YX4IT.mjs.map} +0 -0
  87. /package/dist/sdk/{check-provider-registry-T5J3H2N7.mjs.map → check-provider-registry-SYAHJMWJ.mjs.map} +0 -0
  88. /package/dist/sdk/{chunk-4ECMTCOM.mjs.map → chunk-OYHDBTKY.mjs.map} +0 -0
  89. /package/dist/sdk/{chunk-ENSZDV3O.mjs.map → chunk-ZJYQMNPA.mjs.map} +0 -0
  90. /package/dist/sdk/{check-provider-registry-ZX76MY2L.mjs.map → failure-condition-evaluator-V2YGFRKO.mjs.map} +0 -0
  91. /package/dist/sdk/{github-frontend-7RLEBJWG.mjs.map → github-frontend-4LM4NAZK.mjs.map} +0 -0
  92. /package/dist/sdk/{host-I2TBBKD5.mjs.map → host-GBXJKNHL.mjs.map} +0 -0
  93. /package/dist/sdk/{host-SE3MQHWG.mjs.map → host-XXPPPC76.mjs.map} +0 -0
  94. /package/dist/sdk/{failure-condition-evaluator-MMPKQGUA.mjs.map → routing-YAYBIVPL.mjs.map} +0 -0
  95. /package/dist/sdk/{failure-condition-evaluator-P3MS5DRL.mjs.map → schedule-tool-OIVJDIDK.mjs.map} +0 -0
  96. /package/dist/sdk/{routing-2X6QF5IW.mjs.map → schedule-tool-WACIV77L.mjs.map} +0 -0
  97. /package/dist/sdk/{routing-QHXBQS6X.mjs.map → schedule-tool-handler-ODKY57FO.mjs.map} +0 -0
  98. /package/dist/sdk/{schedule-tool-MKT5FZ6J.mjs.map → schedule-tool-handler-SJF4ZKSB.mjs.map} +0 -0
  99. /package/dist/sdk/{schedule-tool-R6JJIDZ6.mjs.map → trace-helpers-QL2B75AK.mjs.map} +0 -0
  100. /package/dist/sdk/{schedule-tool-W4SQ334O.mjs.map → workflow-check-provider-IXW6BMQA.mjs.map} +0 -0
  101. /package/dist/sdk/{schedule-tool-handler-AOMZV3Q3.mjs.map → workflow-check-provider-UZQZYPOE.mjs.map} +0 -0
package/dist/index.js CHANGED
@@ -1,8 +1,8 @@
1
1
  #!/usr/bin/env node
2
- process.env.VISOR_VERSION = '0.1.178';
3
- process.env.PROBE_VERSION = '0.6.0-rc293';
4
- process.env.VISOR_COMMIT_SHA = '79fcc6344e1ab954501ff6a7f9614a0372019b2b';
5
- process.env.VISOR_COMMIT_SHORT = '79fcc6344';
2
+ process.env.VISOR_VERSION = '0.1.179';
3
+ process.env.PROBE_VERSION = '0.6.0-rc294';
4
+ process.env.VISOR_COMMIT_SHA = 'c2ec610fc6c00b8e34666b8dad089ca3cf06bbf5';
5
+ process.env.VISOR_COMMIT_SHORT = 'c2ec610';
6
6
  /******/ (() => { // webpackBootstrap
7
7
  /******/ var __webpack_modules__ = ({
8
8
 
@@ -299882,6 +299882,19 @@ async function handleShow(positional, flags) {
299882
299882
  if (match.run_id)
299883
299883
  detailTable.push({ 'Run ID': match.run_id });
299884
299884
  detailTable.push({ Input: match.request_message });
299885
+ // Show AI response from status_message (recorded on completion/failure)
299886
+ const fullTask = store.getTask(match.id);
299887
+ if (fullTask?.status?.message) {
299888
+ const parts = fullTask.status.message.parts ?? [];
299889
+ const textPart = parts.find((p) => typeof p.text === 'string');
299890
+ if (textPart) {
299891
+ const responseText = textPart.text;
299892
+ // Truncate long responses for display
299893
+ const maxLen = 500;
299894
+ const display = responseText.length > maxLen ? responseText.slice(0, maxLen) + '...' : responseText;
299895
+ detailTable.push({ Response: display });
299896
+ }
299897
+ }
299885
299898
  // Show metadata
299886
299899
  const meta = match.metadata;
299887
299900
  const metaKeys = Object.keys(meta).filter(k => k !== 'source');
@@ -300057,10 +300070,35 @@ async function trackExecution(opts, executor) {
300057
300070
  logger_1.logger.info(`[TaskTracking] Task ${task.id} started (source=${source}, workflow=${workflowId || '-'}, instance=${instanceId})`);
300058
300071
  try {
300059
300072
  const result = await executor();
300073
+ // Extract AI response text from the result using the same pattern as Slack frontend:
300074
+ // result.reviewSummary.history has check outputs, look for output.text in any check
300075
+ let responseText = 'Execution completed';
300076
+ try {
300077
+ const history = result?.reviewSummary?.history;
300078
+ if (history) {
300079
+ // Look through check outputs for a text response (e.g., generate-response)
300080
+ for (const outputs of Object.values(history)) {
300081
+ if (!Array.isArray(outputs))
300082
+ continue;
300083
+ for (const out of outputs) {
300084
+ const text = out?.text;
300085
+ if (typeof text === 'string' && text.trim().length > 0) {
300086
+ responseText = text.trim();
300087
+ break;
300088
+ }
300089
+ }
300090
+ if (responseText !== 'Execution completed')
300091
+ break;
300092
+ }
300093
+ }
300094
+ }
300095
+ catch {
300096
+ // ignore extraction errors
300097
+ }
300060
300098
  const completedMsg = {
300061
300099
  message_id: crypto_1.default.randomUUID(),
300062
300100
  role: 'agent',
300063
- parts: [{ text: 'Execution completed' }],
300101
+ parts: [{ text: responseText }],
300064
300102
  };
300065
300103
  taskStore.updateTaskState(task.id, 'completed', completedMsg);
300066
300104
  logger_1.logger.info(`[TaskTracking] Task ${task.id} completed`);
@@ -302997,7 +303035,7 @@ async function handleDumpPolicyInput(checkId, argv) {
302997
303035
  let PolicyInputBuilder;
302998
303036
  try {
302999
303037
  // @ts-ignore — enterprise/ may not exist in OSS builds (caught at runtime)
303000
- const mod = await Promise.resolve().then(() => __importStar(__nccwpck_require__(71370)));
303038
+ const mod = await Promise.resolve().then(() => __importStar(__nccwpck_require__(17117)));
303001
303039
  PolicyInputBuilder = mod.PolicyInputBuilder;
303002
303040
  }
303003
303041
  catch {
@@ -311069,6 +311107,1810 @@ class EmailPollingRunner {
311069
311107
  exports.EmailPollingRunner = EmailPollingRunner;
311070
311108
 
311071
311109
 
311110
+ /***/ }),
311111
+
311112
+ /***/ 50069:
311113
+ /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
311114
+
311115
+ "use strict";
311116
+
311117
+ /**
311118
+ * Copyright (c) ProbeLabs. All rights reserved.
311119
+ * Licensed under the Elastic License 2.0; you may not use this file except
311120
+ * in compliance with the Elastic License 2.0.
311121
+ */
311122
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
311123
+ if (k2 === undefined) k2 = k;
311124
+ var desc = Object.getOwnPropertyDescriptor(m, k);
311125
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
311126
+ desc = { enumerable: true, get: function() { return m[k]; } };
311127
+ }
311128
+ Object.defineProperty(o, k2, desc);
311129
+ }) : (function(o, m, k, k2) {
311130
+ if (k2 === undefined) k2 = k;
311131
+ o[k2] = m[k];
311132
+ }));
311133
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
311134
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
311135
+ }) : function(o, v) {
311136
+ o["default"] = v;
311137
+ });
311138
+ var __importStar = (this && this.__importStar) || (function () {
311139
+ var ownKeys = function(o) {
311140
+ ownKeys = Object.getOwnPropertyNames || function (o) {
311141
+ var ar = [];
311142
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
311143
+ return ar;
311144
+ };
311145
+ return ownKeys(o);
311146
+ };
311147
+ return function (mod) {
311148
+ if (mod && mod.__esModule) return mod;
311149
+ var result = {};
311150
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
311151
+ __setModuleDefault(result, mod);
311152
+ return result;
311153
+ };
311154
+ })();
311155
+ Object.defineProperty(exports, "__esModule", ({ value: true }));
311156
+ exports.LicenseValidator = void 0;
311157
+ const crypto = __importStar(__nccwpck_require__(76982));
311158
+ const fs = __importStar(__nccwpck_require__(79896));
311159
+ const path = __importStar(__nccwpck_require__(16928));
311160
+ class LicenseValidator {
311161
+ /** Ed25519 public key for license verification (PEM format). */
311162
+ static PUBLIC_KEY = '-----BEGIN PUBLIC KEY-----\n' +
311163
+ 'MCowBQYDK2VwAyEAI/Zd08EFmgIdrDm/HXd0l3/5GBt7R1PrdvhdmEXhJlU=\n' +
311164
+ '-----END PUBLIC KEY-----\n';
311165
+ cache = null;
311166
+ static CACHE_TTL = 5 * 60 * 1000; // 5 minutes
311167
+ static GRACE_PERIOD = 72 * 3600 * 1000; // 72 hours after expiry
311168
+ /**
311169
+ * Load and validate license from environment or file.
311170
+ *
311171
+ * Resolution order:
311172
+ * 1. VISOR_LICENSE env var (JWT string)
311173
+ * 2. VISOR_LICENSE_FILE env var (path to file)
311174
+ * 3. .visor-license in project root (cwd)
311175
+ * 4. .visor-license in ~/.config/visor/
311176
+ */
311177
+ async loadAndValidate() {
311178
+ // Return cached result if still fresh
311179
+ if (this.cache && Date.now() - this.cache.validatedAt < LicenseValidator.CACHE_TTL) {
311180
+ return this.cache.payload;
311181
+ }
311182
+ const token = this.resolveToken();
311183
+ if (!token)
311184
+ return null;
311185
+ const payload = this.verifyAndDecode(token);
311186
+ if (!payload)
311187
+ return null;
311188
+ this.cache = { payload, validatedAt: Date.now() };
311189
+ return payload;
311190
+ }
311191
+ /** Check if a specific feature is licensed */
311192
+ hasFeature(feature) {
311193
+ if (!this.cache)
311194
+ return false;
311195
+ return this.cache.payload.features.includes(feature);
311196
+ }
311197
+ /** Check if license is valid (with grace period) */
311198
+ isValid() {
311199
+ if (!this.cache)
311200
+ return false;
311201
+ const now = Date.now();
311202
+ const expiryMs = this.cache.payload.exp * 1000;
311203
+ return now < expiryMs + LicenseValidator.GRACE_PERIOD;
311204
+ }
311205
+ /** Check if the license is within its grace period (expired but still valid) */
311206
+ isInGracePeriod() {
311207
+ if (!this.cache)
311208
+ return false;
311209
+ const now = Date.now();
311210
+ const expiryMs = this.cache.payload.exp * 1000;
311211
+ return now >= expiryMs && now < expiryMs + LicenseValidator.GRACE_PERIOD;
311212
+ }
311213
+ resolveToken() {
311214
+ // 1. Direct env var
311215
+ if (process.env.VISOR_LICENSE) {
311216
+ return process.env.VISOR_LICENSE.trim();
311217
+ }
311218
+ // 2. File path from env (validate against path traversal)
311219
+ if (process.env.VISOR_LICENSE_FILE) {
311220
+ // path.resolve() produces an absolute path with all '..' segments resolved,
311221
+ // so a separate resolved.includes('..') check is unnecessary.
311222
+ const resolved = path.resolve(process.env.VISOR_LICENSE_FILE);
311223
+ const home = process.env.HOME || process.env.USERPROFILE || '';
311224
+ const allowedPrefixes = [path.normalize(process.cwd())];
311225
+ if (home)
311226
+ allowedPrefixes.push(path.normalize(path.join(home, '.config', 'visor')));
311227
+ // Resolve symlinks so an attacker cannot create a symlink inside an
311228
+ // allowed prefix that points to an arbitrary file outside it.
311229
+ let realPath;
311230
+ try {
311231
+ realPath = fs.realpathSync(resolved);
311232
+ }
311233
+ catch {
311234
+ return null; // File doesn't exist or isn't accessible
311235
+ }
311236
+ const isSafe = allowedPrefixes.some(prefix => realPath === prefix || realPath.startsWith(prefix + path.sep));
311237
+ if (!isSafe)
311238
+ return null;
311239
+ return this.readFile(realPath);
311240
+ }
311241
+ // 3. .visor-license in cwd
311242
+ const cwdPath = path.join(process.cwd(), '.visor-license');
311243
+ const cwdToken = this.readFile(cwdPath);
311244
+ if (cwdToken)
311245
+ return cwdToken;
311246
+ // 4. ~/.config/visor/.visor-license
311247
+ const home = process.env.HOME || process.env.USERPROFILE || '';
311248
+ if (home) {
311249
+ const configPath = path.join(home, '.config', 'visor', '.visor-license');
311250
+ const configToken = this.readFile(configPath);
311251
+ if (configToken)
311252
+ return configToken;
311253
+ }
311254
+ return null;
311255
+ }
311256
+ readFile(filePath) {
311257
+ try {
311258
+ return fs.readFileSync(filePath, 'utf-8').trim();
311259
+ }
311260
+ catch {
311261
+ return null;
311262
+ }
311263
+ }
311264
+ verifyAndDecode(token) {
311265
+ try {
311266
+ const parts = token.split('.');
311267
+ if (parts.length !== 3)
311268
+ return null;
311269
+ const [headerB64, payloadB64, signatureB64] = parts;
311270
+ // Decode header to verify algorithm
311271
+ const header = JSON.parse(Buffer.from(headerB64, 'base64url').toString());
311272
+ if (header.alg !== 'EdDSA')
311273
+ return null;
311274
+ // Verify signature
311275
+ const data = `${headerB64}.${payloadB64}`;
311276
+ const signature = Buffer.from(signatureB64, 'base64url');
311277
+ const publicKey = crypto.createPublicKey(LicenseValidator.PUBLIC_KEY);
311278
+ // Validate that the loaded public key is actually Ed25519 (OID 1.3.101.112).
311279
+ // This prevents algorithm-confusion attacks if the embedded key were ever
311280
+ // swapped to a different type.
311281
+ if (publicKey.asymmetricKeyType !== 'ed25519') {
311282
+ return null;
311283
+ }
311284
+ // Ed25519 verification: algorithm must be null because EdDSA performs its
311285
+ // own internal hashing (SHA-512) — passing a digest algorithm here would
311286
+ // cause Node.js to throw. The key type is validated above.
311287
+ const isValid = crypto.verify(null, Buffer.from(data), publicKey, signature);
311288
+ if (!isValid)
311289
+ return null;
311290
+ // Decode payload
311291
+ const payload = JSON.parse(Buffer.from(payloadB64, 'base64url').toString());
311292
+ // Validate required fields
311293
+ if (!payload.org ||
311294
+ !Array.isArray(payload.features) ||
311295
+ typeof payload.exp !== 'number' ||
311296
+ typeof payload.iat !== 'number' ||
311297
+ !payload.sub) {
311298
+ return null;
311299
+ }
311300
+ // Check expiry (with grace period)
311301
+ const now = Date.now();
311302
+ const expiryMs = payload.exp * 1000;
311303
+ if (now >= expiryMs + LicenseValidator.GRACE_PERIOD) {
311304
+ return null;
311305
+ }
311306
+ return payload;
311307
+ }
311308
+ catch {
311309
+ return null;
311310
+ }
311311
+ }
311312
+ }
311313
+ exports.LicenseValidator = LicenseValidator;
311314
+
311315
+
311316
+ /***/ }),
311317
+
311318
+ /***/ 87068:
311319
+ /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
311320
+
311321
+ "use strict";
311322
+
311323
+ /**
311324
+ * Copyright (c) ProbeLabs. All rights reserved.
311325
+ * Licensed under the Elastic License 2.0; you may not use this file except
311326
+ * in compliance with the Elastic License 2.0.
311327
+ */
311328
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
311329
+ if (k2 === undefined) k2 = k;
311330
+ var desc = Object.getOwnPropertyDescriptor(m, k);
311331
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
311332
+ desc = { enumerable: true, get: function() { return m[k]; } };
311333
+ }
311334
+ Object.defineProperty(o, k2, desc);
311335
+ }) : (function(o, m, k, k2) {
311336
+ if (k2 === undefined) k2 = k;
311337
+ o[k2] = m[k];
311338
+ }));
311339
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
311340
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
311341
+ }) : function(o, v) {
311342
+ o["default"] = v;
311343
+ });
311344
+ var __importStar = (this && this.__importStar) || (function () {
311345
+ var ownKeys = function(o) {
311346
+ ownKeys = Object.getOwnPropertyNames || function (o) {
311347
+ var ar = [];
311348
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
311349
+ return ar;
311350
+ };
311351
+ return ownKeys(o);
311352
+ };
311353
+ return function (mod) {
311354
+ if (mod && mod.__esModule) return mod;
311355
+ var result = {};
311356
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
311357
+ __setModuleDefault(result, mod);
311358
+ return result;
311359
+ };
311360
+ })();
311361
+ Object.defineProperty(exports, "__esModule", ({ value: true }));
311362
+ exports.loadEnterprisePolicyEngine = loadEnterprisePolicyEngine;
311363
+ exports.loadEnterpriseStoreBackend = loadEnterpriseStoreBackend;
311364
+ const default_engine_1 = __nccwpck_require__(93866);
311365
+ /**
311366
+ * Load the enterprise policy engine if licensed, otherwise return the default no-op engine.
311367
+ *
311368
+ * This is the sole import boundary between OSS and enterprise code. Core code
311369
+ * must only import from this module (via dynamic `await import()`), never from
311370
+ * individual enterprise submodules.
311371
+ */
311372
+ async function loadEnterprisePolicyEngine(config) {
311373
+ try {
311374
+ const { LicenseValidator } = await Promise.resolve().then(() => __importStar(__nccwpck_require__(50069)));
311375
+ const validator = new LicenseValidator();
311376
+ const license = await validator.loadAndValidate();
311377
+ if (!license || !validator.hasFeature('policy')) {
311378
+ return new default_engine_1.DefaultPolicyEngine();
311379
+ }
311380
+ if (validator.isInGracePeriod()) {
311381
+ // eslint-disable-next-line no-console
311382
+ console.warn('[visor:enterprise] License has expired but is within the 72-hour grace period. ' +
311383
+ 'Please renew your license.');
311384
+ }
311385
+ const { OpaPolicyEngine } = await Promise.resolve().then(() => __importStar(__nccwpck_require__(39530)));
311386
+ const engine = new OpaPolicyEngine(config);
311387
+ await engine.initialize(config);
311388
+ return engine;
311389
+ }
311390
+ catch (err) {
311391
+ // Enterprise code not available or initialization failed
311392
+ const msg = err instanceof Error ? err.message : String(err);
311393
+ try {
311394
+ const { logger } = __nccwpck_require__(86999);
311395
+ logger.warn(`[PolicyEngine] Enterprise policy init failed, falling back to default: ${msg}`);
311396
+ }
311397
+ catch {
311398
+ // silent
311399
+ }
311400
+ return new default_engine_1.DefaultPolicyEngine();
311401
+ }
311402
+ }
311403
+ /**
311404
+ * Load the enterprise schedule store backend if licensed.
311405
+ *
311406
+ * @param driver Database driver ('postgresql', 'mysql', or 'mssql')
311407
+ * @param storageConfig Storage configuration with connection details
311408
+ * @param haConfig Optional HA configuration
311409
+ * @throws Error if enterprise license is not available or missing 'scheduler-sql' feature
311410
+ */
311411
+ async function loadEnterpriseStoreBackend(driver, storageConfig, haConfig) {
311412
+ const { LicenseValidator } = await Promise.resolve().then(() => __importStar(__nccwpck_require__(50069)));
311413
+ const validator = new LicenseValidator();
311414
+ const license = await validator.loadAndValidate();
311415
+ if (!license || !validator.hasFeature('scheduler-sql')) {
311416
+ throw new Error(`The ${driver} schedule storage driver requires a Visor Enterprise license ` +
311417
+ `with the 'scheduler-sql' feature. Please upgrade or use driver: 'sqlite' (default).`);
311418
+ }
311419
+ if (validator.isInGracePeriod()) {
311420
+ // eslint-disable-next-line no-console
311421
+ console.warn('[visor:enterprise] License has expired but is within the 72-hour grace period. ' +
311422
+ 'Please renew your license.');
311423
+ }
311424
+ const { KnexStoreBackend } = await Promise.resolve().then(() => __importStar(__nccwpck_require__(63737)));
311425
+ return new KnexStoreBackend(driver, storageConfig, haConfig);
311426
+ }
311427
+
311428
+
311429
+ /***/ }),
311430
+
311431
+ /***/ 628:
311432
+ /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
311433
+
311434
+ "use strict";
311435
+
311436
+ /**
311437
+ * Copyright (c) ProbeLabs. All rights reserved.
311438
+ * Licensed under the Elastic License 2.0; you may not use this file except
311439
+ * in compliance with the Elastic License 2.0.
311440
+ */
311441
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
311442
+ if (k2 === undefined) k2 = k;
311443
+ var desc = Object.getOwnPropertyDescriptor(m, k);
311444
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
311445
+ desc = { enumerable: true, get: function() { return m[k]; } };
311446
+ }
311447
+ Object.defineProperty(o, k2, desc);
311448
+ }) : (function(o, m, k, k2) {
311449
+ if (k2 === undefined) k2 = k;
311450
+ o[k2] = m[k];
311451
+ }));
311452
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
311453
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
311454
+ }) : function(o, v) {
311455
+ o["default"] = v;
311456
+ });
311457
+ var __importStar = (this && this.__importStar) || (function () {
311458
+ var ownKeys = function(o) {
311459
+ ownKeys = Object.getOwnPropertyNames || function (o) {
311460
+ var ar = [];
311461
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
311462
+ return ar;
311463
+ };
311464
+ return ownKeys(o);
311465
+ };
311466
+ return function (mod) {
311467
+ if (mod && mod.__esModule) return mod;
311468
+ var result = {};
311469
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
311470
+ __setModuleDefault(result, mod);
311471
+ return result;
311472
+ };
311473
+ })();
311474
+ Object.defineProperty(exports, "__esModule", ({ value: true }));
311475
+ exports.OpaCompiler = void 0;
311476
+ const fs = __importStar(__nccwpck_require__(79896));
311477
+ const path = __importStar(__nccwpck_require__(16928));
311478
+ const os = __importStar(__nccwpck_require__(70857));
311479
+ const crypto = __importStar(__nccwpck_require__(76982));
311480
+ const child_process_1 = __nccwpck_require__(35317);
311481
+ /**
311482
+ * OPA Rego Compiler - compiles .rego policy files to WASM bundles using the `opa` CLI.
311483
+ *
311484
+ * Handles:
311485
+ * - Resolving input paths to WASM bytes (direct .wasm, directory with policy.wasm, or .rego files)
311486
+ * - Compiling .rego files to WASM via `opa build`
311487
+ * - Caching compiled bundles based on content hashes
311488
+ * - Extracting policy.wasm from OPA tar.gz bundles
311489
+ *
311490
+ * Requires:
311491
+ * - `opa` CLI on PATH (only when auto-compiling .rego files)
311492
+ */
311493
+ class OpaCompiler {
311494
+ static CACHE_DIR = path.join(os.tmpdir(), 'visor-opa-cache');
311495
+ /**
311496
+ * Resolve the input paths to WASM bytes.
311497
+ *
311498
+ * Strategy:
311499
+ * 1. If any path is a .wasm file, read it directly
311500
+ * 2. If a directory contains policy.wasm, read it
311501
+ * 3. Otherwise, collect all .rego files and auto-compile via `opa build`
311502
+ */
311503
+ async resolveWasmBytes(paths) {
311504
+ // Collect .rego files and check for existing .wasm
311505
+ const regoFiles = [];
311506
+ for (const p of paths) {
311507
+ const resolved = path.resolve(p);
311508
+ // Reject paths containing '..' after resolution (path traversal)
311509
+ if (path.normalize(resolved).includes('..')) {
311510
+ throw new Error(`Policy path contains traversal sequences: ${p}`);
311511
+ }
311512
+ // Direct .wasm file
311513
+ if (resolved.endsWith('.wasm') && fs.existsSync(resolved)) {
311514
+ return fs.readFileSync(resolved);
311515
+ }
311516
+ if (!fs.existsSync(resolved))
311517
+ continue;
311518
+ const stat = fs.statSync(resolved);
311519
+ if (stat.isDirectory()) {
311520
+ // Check for pre-compiled policy.wasm in directory
311521
+ const wasmCandidate = path.join(resolved, 'policy.wasm');
311522
+ if (fs.existsSync(wasmCandidate)) {
311523
+ return fs.readFileSync(wasmCandidate);
311524
+ }
311525
+ // Collect all .rego files from directory
311526
+ const files = fs.readdirSync(resolved);
311527
+ for (const f of files) {
311528
+ if (f.endsWith('.rego')) {
311529
+ regoFiles.push(path.join(resolved, f));
311530
+ }
311531
+ }
311532
+ }
311533
+ else if (resolved.endsWith('.rego')) {
311534
+ regoFiles.push(resolved);
311535
+ }
311536
+ }
311537
+ if (regoFiles.length === 0) {
311538
+ throw new Error(`OPA WASM evaluator: no .wasm bundle or .rego files found in: ${paths.join(', ')}`);
311539
+ }
311540
+ // Auto-compile .rego -> .wasm
311541
+ return this.compileRego(regoFiles);
311542
+ }
311543
+ /**
311544
+ * Auto-compile .rego files to a WASM bundle using the `opa` CLI.
311545
+ *
311546
+ * Caches the compiled bundle based on a content hash of all input .rego files
311547
+ * so subsequent runs skip compilation if policies haven't changed.
311548
+ */
311549
+ compileRego(regoFiles) {
311550
+ // Check that `opa` CLI is available
311551
+ try {
311552
+ (0, child_process_1.execFileSync)('opa', ['version'], { stdio: 'pipe' });
311553
+ }
311554
+ catch {
311555
+ throw new Error('OPA CLI (`opa`) not found on PATH. Install it from https://www.openpolicyagent.org/docs/latest/#running-opa\n' +
311556
+ 'Or pre-compile your .rego files: opa build -t wasm -e visor -o bundle.tar.gz ' +
311557
+ regoFiles.join(' '));
311558
+ }
311559
+ // Compute content hash for cache key
311560
+ const hash = crypto.createHash('sha256');
311561
+ for (const f of regoFiles.sort()) {
311562
+ hash.update(fs.readFileSync(f));
311563
+ hash.update(f); // include filename for disambiguation
311564
+ }
311565
+ const cacheKey = hash.digest('hex').slice(0, 16);
311566
+ const cacheDir = OpaCompiler.CACHE_DIR;
311567
+ const cachedWasm = path.join(cacheDir, `${cacheKey}.wasm`);
311568
+ // Return cached bundle if still valid
311569
+ if (fs.existsSync(cachedWasm)) {
311570
+ return fs.readFileSync(cachedWasm);
311571
+ }
311572
+ // Compile to WASM via opa build
311573
+ fs.mkdirSync(cacheDir, { recursive: true });
311574
+ const bundleTar = path.join(cacheDir, `${cacheKey}-bundle.tar.gz`);
311575
+ try {
311576
+ const args = [
311577
+ 'build',
311578
+ '-t',
311579
+ 'wasm',
311580
+ '-e',
311581
+ 'visor', // entrypoint: the visor package tree
311582
+ '-o',
311583
+ bundleTar,
311584
+ ...regoFiles,
311585
+ ];
311586
+ (0, child_process_1.execFileSync)('opa', args, {
311587
+ stdio: 'pipe',
311588
+ timeout: 30000,
311589
+ });
311590
+ }
311591
+ catch (err) {
311592
+ const stderr = err?.stderr?.toString() || '';
311593
+ throw new Error(`Failed to compile .rego files to WASM:\n${stderr}\n` +
311594
+ 'Ensure your .rego files are valid and the `opa` CLI is installed.');
311595
+ }
311596
+ // Extract policy.wasm from the tar.gz bundle
311597
+ // OPA bundles are tar.gz with /policy.wasm inside
311598
+ try {
311599
+ (0, child_process_1.execFileSync)('tar', ['-xzf', bundleTar, '-C', cacheDir, '/policy.wasm'], {
311600
+ stdio: 'pipe',
311601
+ });
311602
+ const extractedWasm = path.join(cacheDir, 'policy.wasm');
311603
+ if (fs.existsSync(extractedWasm)) {
311604
+ // Move to cache-key named file
311605
+ fs.renameSync(extractedWasm, cachedWasm);
311606
+ }
311607
+ }
311608
+ catch {
311609
+ // Some tar implementations don't like leading /
311610
+ try {
311611
+ (0, child_process_1.execFileSync)('tar', ['-xzf', bundleTar, '-C', cacheDir, 'policy.wasm'], {
311612
+ stdio: 'pipe',
311613
+ });
311614
+ const extractedWasm = path.join(cacheDir, 'policy.wasm');
311615
+ if (fs.existsSync(extractedWasm)) {
311616
+ fs.renameSync(extractedWasm, cachedWasm);
311617
+ }
311618
+ }
311619
+ catch (err2) {
311620
+ throw new Error(`Failed to extract policy.wasm from OPA bundle: ${err2?.message || err2}`);
311621
+ }
311622
+ }
311623
+ // Clean up tar
311624
+ try {
311625
+ fs.unlinkSync(bundleTar);
311626
+ }
311627
+ catch { }
311628
+ if (!fs.existsSync(cachedWasm)) {
311629
+ throw new Error('OPA build succeeded but policy.wasm was not found in the bundle');
311630
+ }
311631
+ return fs.readFileSync(cachedWasm);
311632
+ }
311633
+ }
311634
+ exports.OpaCompiler = OpaCompiler;
311635
+
311636
+
311637
+ /***/ }),
311638
+
311639
+ /***/ 44693:
311640
+ /***/ ((__unused_webpack_module, exports) => {
311641
+
311642
+ "use strict";
311643
+
311644
+ /**
311645
+ * Copyright (c) ProbeLabs. All rights reserved.
311646
+ * Licensed under the Elastic License 2.0; you may not use this file except
311647
+ * in compliance with the Elastic License 2.0.
311648
+ */
311649
+ Object.defineProperty(exports, "__esModule", ({ value: true }));
311650
+ exports.OpaHttpEvaluator = void 0;
311651
+ /**
311652
+ * OPA HTTP Evaluator - evaluates policies via an external OPA server's REST API.
311653
+ *
311654
+ * Uses the built-in `fetch` API (Node 18+), so no extra dependencies are needed.
311655
+ */
311656
+ class OpaHttpEvaluator {
311657
+ baseUrl;
311658
+ timeout;
311659
+ constructor(baseUrl, timeout = 5000) {
311660
+ // Validate URL format and protocol
311661
+ let parsed;
311662
+ try {
311663
+ parsed = new URL(baseUrl);
311664
+ }
311665
+ catch {
311666
+ throw new Error(`OPA HTTP evaluator: invalid URL: ${baseUrl}`);
311667
+ }
311668
+ if (!['http:', 'https:'].includes(parsed.protocol)) {
311669
+ throw new Error(`OPA HTTP evaluator: url must use http:// or https:// protocol, got: ${baseUrl}`);
311670
+ }
311671
+ // Block cloud metadata, loopback, link-local, and private network addresses
311672
+ const hostname = parsed.hostname;
311673
+ if (this.isBlockedHostname(hostname)) {
311674
+ throw new Error(`OPA HTTP evaluator: url must not point to internal, loopback, or private network addresses`);
311675
+ }
311676
+ // Normalize: strip trailing slash
311677
+ this.baseUrl = baseUrl.replace(/\/+$/, '');
311678
+ this.timeout = timeout;
311679
+ }
311680
+ /**
311681
+ * Check if a hostname is blocked due to SSRF concerns.
311682
+ *
311683
+ * Blocks:
311684
+ * - Loopback addresses (127.x.x.x, localhost, 0.0.0.0, ::1)
311685
+ * - Link-local addresses (169.254.x.x)
311686
+ * - Private networks (10.x.x.x, 172.16-31.x.x, 192.168.x.x)
311687
+ * - IPv6 unique local addresses (fd00::/8)
311688
+ * - Cloud metadata services (*.internal)
311689
+ */
311690
+ isBlockedHostname(hostname) {
311691
+ if (!hostname)
311692
+ return true; // block empty hostnames
311693
+ // Normalize hostname: lowercase and remove brackets for IPv6
311694
+ const normalized = hostname.toLowerCase().replace(/^\[|\]$/g, '');
311695
+ // Block .internal domains (cloud metadata services)
311696
+ if (normalized === 'metadata.google.internal' || normalized.endsWith('.internal')) {
311697
+ return true;
311698
+ }
311699
+ // Block localhost variants
311700
+ if (normalized === 'localhost' || normalized === 'localhost.localdomain') {
311701
+ return true;
311702
+ }
311703
+ // Block IPv6 loopback
311704
+ if (normalized === '::1' || normalized === '0:0:0:0:0:0:0:1') {
311705
+ return true;
311706
+ }
311707
+ // Check IPv4 patterns
311708
+ const ipv4Pattern = /^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/;
311709
+ const ipv4Match = normalized.match(ipv4Pattern);
311710
+ if (ipv4Match) {
311711
+ const octets = ipv4Match.slice(1, 5).map(Number);
311712
+ // Validate octets are in range [0, 255]
311713
+ if (octets.some(octet => octet > 255)) {
311714
+ return false;
311715
+ }
311716
+ const [a, b] = octets;
311717
+ // Block loopback: 127.0.0.0/8
311718
+ if (a === 127) {
311719
+ return true;
311720
+ }
311721
+ // Block 0.0.0.0/8 (this host)
311722
+ if (a === 0) {
311723
+ return true;
311724
+ }
311725
+ // Block link-local: 169.254.0.0/16
311726
+ if (a === 169 && b === 254) {
311727
+ return true;
311728
+ }
311729
+ // Block private networks
311730
+ // 10.0.0.0/8
311731
+ if (a === 10) {
311732
+ return true;
311733
+ }
311734
+ // 172.16.0.0/12 (172.16.x.x through 172.31.x.x)
311735
+ if (a === 172 && b >= 16 && b <= 31) {
311736
+ return true;
311737
+ }
311738
+ // 192.168.0.0/16
311739
+ if (a === 192 && b === 168) {
311740
+ return true;
311741
+ }
311742
+ }
311743
+ // Check IPv6 patterns
311744
+ // Block unique local addresses: fd00::/8
311745
+ if (normalized.startsWith('fd') || normalized.startsWith('fc')) {
311746
+ return true;
311747
+ }
311748
+ // Block link-local: fe80::/10
311749
+ if (normalized.startsWith('fe80:')) {
311750
+ return true;
311751
+ }
311752
+ return false;
311753
+ }
311754
+ /**
311755
+ * Evaluate a policy rule against an input document via OPA REST API.
311756
+ *
311757
+ * @param input - The input document to evaluate
311758
+ * @param rulePath - OPA rule path (e.g., 'visor/check/execute')
311759
+ * @returns The result object from OPA, or undefined on error
311760
+ */
311761
+ async evaluate(input, rulePath) {
311762
+ // OPA Data API: POST /v1/data/<path>
311763
+ const encodedPath = rulePath
311764
+ .split('/')
311765
+ .map(s => encodeURIComponent(s))
311766
+ .join('/');
311767
+ const url = `${this.baseUrl}/v1/data/${encodedPath}`;
311768
+ const controller = new AbortController();
311769
+ const timer = setTimeout(() => controller.abort(), this.timeout);
311770
+ try {
311771
+ const response = await fetch(url, {
311772
+ method: 'POST',
311773
+ headers: { 'Content-Type': 'application/json' },
311774
+ body: JSON.stringify({ input }),
311775
+ signal: controller.signal,
311776
+ });
311777
+ if (!response.ok) {
311778
+ throw new Error(`OPA HTTP ${response.status}: ${response.statusText}`);
311779
+ }
311780
+ let body;
311781
+ try {
311782
+ body = await response.json();
311783
+ }
311784
+ catch (jsonErr) {
311785
+ throw new Error(`OPA HTTP evaluator: failed to parse JSON response: ${jsonErr instanceof Error ? jsonErr.message : String(jsonErr)}`);
311786
+ }
311787
+ // OPA returns { result: { ... } }
311788
+ return body?.result;
311789
+ }
311790
+ finally {
311791
+ clearTimeout(timer);
311792
+ }
311793
+ }
311794
+ async shutdown() {
311795
+ // No persistent connections to close
311796
+ }
311797
+ }
311798
+ exports.OpaHttpEvaluator = OpaHttpEvaluator;
311799
+
311800
+
311801
+ /***/ }),
311802
+
311803
+ /***/ 39530:
311804
+ /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
311805
+
311806
+ "use strict";
311807
+
311808
+ /**
311809
+ * Copyright (c) ProbeLabs. All rights reserved.
311810
+ * Licensed under the Elastic License 2.0; you may not use this file except
311811
+ * in compliance with the Elastic License 2.0.
311812
+ */
311813
+ Object.defineProperty(exports, "__esModule", ({ value: true }));
311814
+ exports.OpaPolicyEngine = void 0;
311815
+ const opa_wasm_evaluator_1 = __nccwpck_require__(8613);
311816
+ const opa_http_evaluator_1 = __nccwpck_require__(44693);
311817
+ const policy_input_builder_1 = __nccwpck_require__(17117);
311818
+ /**
311819
+ * Enterprise OPA Policy Engine.
311820
+ *
311821
+ * Wraps both WASM (local) and HTTP (remote) OPA evaluators behind the
311822
+ * OSS PolicyEngine interface. All OPA input building and role resolution
311823
+ * is handled internally — the OSS call sites pass only plain types.
311824
+ */
311825
+ class OpaPolicyEngine {
311826
+ evaluator = null;
311827
+ fallback;
311828
+ timeout;
311829
+ config;
311830
+ inputBuilder = null;
311831
+ logger = null;
311832
+ constructor(config) {
311833
+ this.config = config;
311834
+ this.fallback = config.fallback || 'deny';
311835
+ this.timeout = config.timeout || 5000;
311836
+ }
311837
+ async initialize(config) {
311838
+ // Resolve logger once at initialization
311839
+ try {
311840
+ this.logger = (__nccwpck_require__(86999).logger);
311841
+ }
311842
+ catch {
311843
+ // logger not available in this context
311844
+ }
311845
+ // Build actor/repo context from environment (available at engine init time)
311846
+ const actor = {
311847
+ authorAssociation: process.env.VISOR_AUTHOR_ASSOCIATION,
311848
+ login: process.env.VISOR_AUTHOR_LOGIN || process.env.GITHUB_ACTOR,
311849
+ isLocalMode: !process.env.GITHUB_ACTIONS,
311850
+ };
311851
+ const repo = {
311852
+ owner: process.env.GITHUB_REPOSITORY_OWNER,
311853
+ name: process.env.GITHUB_REPOSITORY?.split('/')[1],
311854
+ branch: process.env.GITHUB_HEAD_REF,
311855
+ baseBranch: process.env.GITHUB_BASE_REF,
311856
+ event: process.env.GITHUB_EVENT_NAME,
311857
+ };
311858
+ const prNum = process.env.GITHUB_PR_NUMBER
311859
+ ? parseInt(process.env.GITHUB_PR_NUMBER, 10)
311860
+ : undefined;
311861
+ const pullRequest = {
311862
+ number: prNum !== undefined && Number.isFinite(prNum) ? prNum : undefined,
311863
+ };
311864
+ this.inputBuilder = new policy_input_builder_1.PolicyInputBuilder(config, actor, repo, pullRequest);
311865
+ if (config.engine === 'local') {
311866
+ if (!config.rules) {
311867
+ throw new Error('OPA local mode requires `policy.rules` path to .wasm or .rego files');
311868
+ }
311869
+ const wasm = new opa_wasm_evaluator_1.OpaWasmEvaluator();
311870
+ await wasm.initialize(config.rules);
311871
+ if (config.data) {
311872
+ wasm.loadData(config.data);
311873
+ }
311874
+ this.evaluator = wasm;
311875
+ }
311876
+ else if (config.engine === 'remote') {
311877
+ if (!config.url) {
311878
+ throw new Error('OPA remote mode requires `policy.url` pointing to OPA server');
311879
+ }
311880
+ this.evaluator = new opa_http_evaluator_1.OpaHttpEvaluator(config.url, this.timeout);
311881
+ }
311882
+ else {
311883
+ this.evaluator = null;
311884
+ }
311885
+ }
311886
+ /**
311887
+ * Update actor/repo/PR context (e.g., after PR info becomes available).
311888
+ * Called by the enterprise loader when engine context is enriched.
311889
+ */
311890
+ setActorContext(actor, repo, pullRequest) {
311891
+ this.inputBuilder = new policy_input_builder_1.PolicyInputBuilder(this.config, actor, repo, pullRequest);
311892
+ }
311893
+ async evaluateCheckExecution(checkId, checkConfig) {
311894
+ if (!this.evaluator || !this.inputBuilder)
311895
+ return { allowed: true };
311896
+ const cfg = checkConfig && typeof checkConfig === 'object'
311897
+ ? checkConfig
311898
+ : {};
311899
+ const policyOverride = cfg.policy;
311900
+ const input = this.inputBuilder.forCheckExecution({
311901
+ id: checkId,
311902
+ type: cfg.type || 'ai',
311903
+ group: cfg.group,
311904
+ tags: cfg.tags,
311905
+ criticality: cfg.criticality,
311906
+ sandbox: cfg.sandbox,
311907
+ policy: policyOverride,
311908
+ });
311909
+ return this.doEvaluate(input, this.resolveRulePath('check.execute', policyOverride?.rule));
311910
+ }
311911
+ async evaluateToolInvocation(serverName, methodName, transport) {
311912
+ if (!this.evaluator || !this.inputBuilder)
311913
+ return { allowed: true };
311914
+ const input = this.inputBuilder.forToolInvocation(serverName, methodName, transport);
311915
+ return this.doEvaluate(input, 'visor/tool/invoke');
311916
+ }
311917
+ async evaluateCapabilities(checkId, capabilities) {
311918
+ if (!this.evaluator || !this.inputBuilder)
311919
+ return { allowed: true };
311920
+ const input = this.inputBuilder.forCapabilityResolve(checkId, capabilities);
311921
+ return this.doEvaluate(input, 'visor/capability/resolve');
311922
+ }
311923
+ async shutdown() {
311924
+ if (this.evaluator && 'shutdown' in this.evaluator) {
311925
+ await this.evaluator.shutdown();
311926
+ }
311927
+ this.evaluator = null;
311928
+ this.inputBuilder = null;
311929
+ }
311930
+ resolveRulePath(defaultScope, override) {
311931
+ if (override) {
311932
+ return override.startsWith('visor/') ? override : `visor/${override}`;
311933
+ }
311934
+ return `visor/${defaultScope.replace(/\./g, '/')}`;
311935
+ }
311936
+ async doEvaluate(input, rulePath) {
311937
+ try {
311938
+ this.logger?.debug(`[PolicyEngine] Evaluating ${rulePath}`, JSON.stringify(input));
311939
+ let timer;
311940
+ const timeoutPromise = new Promise((_resolve, reject) => {
311941
+ timer = setTimeout(() => reject(new Error('policy evaluation timeout')), this.timeout);
311942
+ });
311943
+ try {
311944
+ const result = await Promise.race([this.rawEvaluate(input, rulePath), timeoutPromise]);
311945
+ const decision = this.parseDecision(result);
311946
+ // In warn mode, override denied decisions to allowed but flag as warn
311947
+ if (!decision.allowed && this.fallback === 'warn') {
311948
+ decision.allowed = true;
311949
+ decision.warn = true;
311950
+ decision.reason = `audit: ${decision.reason || 'policy denied'}`;
311951
+ }
311952
+ this.logger?.debug(`[PolicyEngine] Decision for ${rulePath}: allowed=${decision.allowed}, warn=${decision.warn || false}, reason=${decision.reason || 'none'}`);
311953
+ return decision;
311954
+ }
311955
+ finally {
311956
+ if (timer)
311957
+ clearTimeout(timer);
311958
+ }
311959
+ }
311960
+ catch (err) {
311961
+ const msg = err instanceof Error ? err.message : String(err);
311962
+ this.logger?.warn(`[PolicyEngine] Evaluation failed for ${rulePath}: ${msg}`);
311963
+ return {
311964
+ allowed: this.fallback === 'allow' || this.fallback === 'warn',
311965
+ warn: this.fallback === 'warn' ? true : undefined,
311966
+ reason: `policy evaluation failed, fallback=${this.fallback}`,
311967
+ };
311968
+ }
311969
+ }
311970
+ async rawEvaluate(input, rulePath) {
311971
+ if (this.evaluator instanceof opa_wasm_evaluator_1.OpaWasmEvaluator) {
311972
+ const result = await this.evaluator.evaluate(input);
311973
+ // WASM compiled with `-e visor` entrypoint returns the full visor package tree.
311974
+ // Navigate to the specific rule subtree using rulePath segments.
311975
+ // e.g., 'visor/check/execute' → result.check.execute
311976
+ return this.navigateWasmResult(result, rulePath);
311977
+ }
311978
+ return this.evaluator.evaluate(input, rulePath);
311979
+ }
311980
+ /**
311981
+ * Navigate nested OPA WASM result tree to reach the specific rule's output.
311982
+ * The WASM entrypoint `-e visor` means the result root IS the visor package,
311983
+ * so we strip the `visor/` prefix and walk the remaining segments.
311984
+ */
311985
+ navigateWasmResult(result, rulePath) {
311986
+ if (!result || typeof result !== 'object')
311987
+ return result;
311988
+ // Strip the 'visor/' prefix (matches our compilation entrypoint)
311989
+ const segments = rulePath.replace(/^visor\//, '').split('/');
311990
+ let current = result;
311991
+ for (const seg of segments) {
311992
+ if (current && typeof current === 'object' && seg in current) {
311993
+ current = current[seg];
311994
+ }
311995
+ else {
311996
+ return undefined; // path not found in result tree
311997
+ }
311998
+ }
311999
+ return current;
312000
+ }
312001
+ parseDecision(result) {
312002
+ if (result === undefined || result === null) {
312003
+ return {
312004
+ allowed: this.fallback === 'allow' || this.fallback === 'warn',
312005
+ warn: this.fallback === 'warn' ? true : undefined,
312006
+ reason: this.fallback === 'warn' ? 'audit: no policy result' : 'no policy result',
312007
+ };
312008
+ }
312009
+ const allowed = result.allowed !== false;
312010
+ const decision = {
312011
+ allowed,
312012
+ reason: result.reason,
312013
+ };
312014
+ if (result.capabilities) {
312015
+ decision.capabilities = result.capabilities;
312016
+ }
312017
+ return decision;
312018
+ }
312019
+ }
312020
+ exports.OpaPolicyEngine = OpaPolicyEngine;
312021
+
312022
+
312023
+ /***/ }),
312024
+
312025
+ /***/ 8613:
312026
+ /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
312027
+
312028
+ "use strict";
312029
+
312030
+ /**
312031
+ * Copyright (c) ProbeLabs. All rights reserved.
312032
+ * Licensed under the Elastic License 2.0; you may not use this file except
312033
+ * in compliance with the Elastic License 2.0.
312034
+ */
312035
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
312036
+ if (k2 === undefined) k2 = k;
312037
+ var desc = Object.getOwnPropertyDescriptor(m, k);
312038
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
312039
+ desc = { enumerable: true, get: function() { return m[k]; } };
312040
+ }
312041
+ Object.defineProperty(o, k2, desc);
312042
+ }) : (function(o, m, k, k2) {
312043
+ if (k2 === undefined) k2 = k;
312044
+ o[k2] = m[k];
312045
+ }));
312046
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
312047
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
312048
+ }) : function(o, v) {
312049
+ o["default"] = v;
312050
+ });
312051
+ var __importStar = (this && this.__importStar) || (function () {
312052
+ var ownKeys = function(o) {
312053
+ ownKeys = Object.getOwnPropertyNames || function (o) {
312054
+ var ar = [];
312055
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
312056
+ return ar;
312057
+ };
312058
+ return ownKeys(o);
312059
+ };
312060
+ return function (mod) {
312061
+ if (mod && mod.__esModule) return mod;
312062
+ var result = {};
312063
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
312064
+ __setModuleDefault(result, mod);
312065
+ return result;
312066
+ };
312067
+ })();
312068
+ Object.defineProperty(exports, "__esModule", ({ value: true }));
312069
+ exports.OpaWasmEvaluator = void 0;
312070
+ const fs = __importStar(__nccwpck_require__(79896));
312071
+ const path = __importStar(__nccwpck_require__(16928));
312072
+ const opa_compiler_1 = __nccwpck_require__(628);
312073
+ /**
312074
+ * OPA WASM Evaluator - loads and evaluates OPA policies locally.
312075
+ *
312076
+ * Supports three input formats:
312077
+ * 1. Pre-compiled `.wasm` bundle — loaded directly (fastest startup)
312078
+ * 2. `.rego` files or directory — auto-compiled to WASM via `opa build` CLI
312079
+ * 3. Directory with `policy.wasm` inside — loaded directly
312080
+ *
312081
+ * Compilation and caching of .rego files is delegated to {@link OpaCompiler}.
312082
+ *
312083
+ * Requires:
312084
+ * - `@open-policy-agent/opa-wasm` npm package (optional dep)
312085
+ * - `opa` CLI on PATH (only when auto-compiling .rego files)
312086
+ */
312087
+ class OpaWasmEvaluator {
312088
+ policy = null;
312089
+ dataDocument = {};
312090
+ compiler = new opa_compiler_1.OpaCompiler();
312091
+ async initialize(rulesPath) {
312092
+ const paths = Array.isArray(rulesPath) ? rulesPath : [rulesPath];
312093
+ const wasmBytes = await this.compiler.resolveWasmBytes(paths);
312094
+ try {
312095
+ // Use createRequire to load the optional dep at runtime without ncc bundling it.
312096
+ // `new Function('id', 'return require(id)')` fails in ncc bundles because
312097
+ // `require` is not in the `new Function` scope. `createRequire` works correctly
312098
+ // because it creates a real Node.js require rooted at the given path.
312099
+ // eslint-disable-next-line @typescript-eslint/no-var-requires
312100
+ const { createRequire } = __nccwpck_require__(73339);
312101
+ const runtimeRequire = createRequire(__filename);
312102
+ const opaWasm = runtimeRequire('@open-policy-agent/opa-wasm');
312103
+ const loadPolicy = opaWasm.loadPolicy || opaWasm.default?.loadPolicy;
312104
+ if (!loadPolicy) {
312105
+ throw new Error('loadPolicy not found in @open-policy-agent/opa-wasm');
312106
+ }
312107
+ this.policy = await loadPolicy(wasmBytes);
312108
+ }
312109
+ catch (err) {
312110
+ if (err?.code === 'MODULE_NOT_FOUND' || err?.code === 'ERR_MODULE_NOT_FOUND') {
312111
+ throw new Error('OPA WASM evaluator requires @open-policy-agent/opa-wasm. ' +
312112
+ 'Install it with: npm install @open-policy-agent/opa-wasm');
312113
+ }
312114
+ throw err;
312115
+ }
312116
+ }
312117
+ /**
312118
+ * Load external data from a JSON file to use as the OPA data document.
312119
+ * The loaded data will be passed to `policy.setData()` during evaluation,
312120
+ * making it available in Rego via `data.<key>`.
312121
+ */
312122
+ loadData(dataPath) {
312123
+ const resolved = path.resolve(dataPath);
312124
+ if (path.normalize(resolved).includes('..')) {
312125
+ throw new Error(`Data path contains traversal sequences: ${dataPath}`);
312126
+ }
312127
+ if (!fs.existsSync(resolved)) {
312128
+ throw new Error(`OPA data file not found: ${resolved}`);
312129
+ }
312130
+ const stat = fs.statSync(resolved);
312131
+ if (stat.size > 10 * 1024 * 1024) {
312132
+ throw new Error(`OPA data file exceeds 10MB limit: ${resolved} (${stat.size} bytes)`);
312133
+ }
312134
+ const raw = fs.readFileSync(resolved, 'utf-8');
312135
+ try {
312136
+ const parsed = JSON.parse(raw);
312137
+ if (typeof parsed !== 'object' || parsed === null || Array.isArray(parsed)) {
312138
+ throw new Error('OPA data file must contain a JSON object (not an array or primitive)');
312139
+ }
312140
+ this.dataDocument = parsed;
312141
+ }
312142
+ catch (err) {
312143
+ if (err.message.startsWith('OPA data file must')) {
312144
+ throw err;
312145
+ }
312146
+ throw new Error(`Failed to parse OPA data file ${resolved}: ${err.message}`);
312147
+ }
312148
+ }
312149
+ async evaluate(input) {
312150
+ if (!this.policy) {
312151
+ throw new Error('OPA WASM evaluator not initialized');
312152
+ }
312153
+ this.policy.setData(this.dataDocument);
312154
+ const resultSet = this.policy.evaluate(input);
312155
+ if (Array.isArray(resultSet) && resultSet.length > 0) {
312156
+ return resultSet[0].result;
312157
+ }
312158
+ return undefined;
312159
+ }
312160
+ async shutdown() {
312161
+ if (this.policy) {
312162
+ // opa-wasm policy objects may have a close/free method for WASM cleanup
312163
+ if (typeof this.policy.close === 'function') {
312164
+ try {
312165
+ this.policy.close();
312166
+ }
312167
+ catch { }
312168
+ }
312169
+ else if (typeof this.policy.free === 'function') {
312170
+ try {
312171
+ this.policy.free();
312172
+ }
312173
+ catch { }
312174
+ }
312175
+ }
312176
+ this.policy = null;
312177
+ }
312178
+ }
312179
+ exports.OpaWasmEvaluator = OpaWasmEvaluator;
312180
+
312181
+
312182
+ /***/ }),
312183
+
312184
+ /***/ 17117:
312185
+ /***/ ((__unused_webpack_module, exports) => {
312186
+
312187
+ "use strict";
312188
+
312189
+ /**
312190
+ * Copyright (c) ProbeLabs. All rights reserved.
312191
+ * Licensed under the Elastic License 2.0; you may not use this file except
312192
+ * in compliance with the Elastic License 2.0.
312193
+ */
312194
+ Object.defineProperty(exports, "__esModule", ({ value: true }));
312195
+ exports.PolicyInputBuilder = void 0;
312196
+ /**
312197
+ * Builds OPA-compatible input documents from engine context.
312198
+ *
312199
+ * Resolves actor roles from the `policy.roles` config section by matching
312200
+ * the actor's authorAssociation and login against role definitions.
312201
+ */
312202
+ class PolicyInputBuilder {
312203
+ roles;
312204
+ actor;
312205
+ repository;
312206
+ pullRequest;
312207
+ constructor(policyConfig, actor, repository, pullRequest) {
312208
+ this.roles = policyConfig.roles || {};
312209
+ this.actor = actor;
312210
+ this.repository = repository;
312211
+ this.pullRequest = pullRequest;
312212
+ }
312213
+ /** Resolve which roles apply to the current actor. */
312214
+ resolveRoles() {
312215
+ const matched = [];
312216
+ for (const [roleName, roleConfig] of Object.entries(this.roles)) {
312217
+ let identityMatch = false;
312218
+ if (roleConfig.author_association &&
312219
+ this.actor.authorAssociation &&
312220
+ roleConfig.author_association.includes(this.actor.authorAssociation)) {
312221
+ identityMatch = true;
312222
+ }
312223
+ if (!identityMatch &&
312224
+ roleConfig.users &&
312225
+ this.actor.login &&
312226
+ roleConfig.users.includes(this.actor.login)) {
312227
+ identityMatch = true;
312228
+ }
312229
+ // Slack user ID match
312230
+ if (!identityMatch &&
312231
+ roleConfig.slack_users &&
312232
+ this.actor.slack?.userId &&
312233
+ roleConfig.slack_users.includes(this.actor.slack.userId)) {
312234
+ identityMatch = true;
312235
+ }
312236
+ // Email match (case-insensitive)
312237
+ if (!identityMatch && roleConfig.emails && this.actor.slack?.email) {
312238
+ const actorEmail = this.actor.slack.email.toLowerCase();
312239
+ if (roleConfig.emails.some(e => e.toLowerCase() === actorEmail)) {
312240
+ identityMatch = true;
312241
+ }
312242
+ }
312243
+ // Note: teams-based role resolution requires GitHub API access (read:org scope)
312244
+ // and is not yet implemented. If configured, the role will not match via teams.
312245
+ if (!identityMatch)
312246
+ continue;
312247
+ // slack_channels gate: if set, the role only applies when triggered from one of these channels
312248
+ if (roleConfig.slack_channels && roleConfig.slack_channels.length > 0) {
312249
+ if (!this.actor.slack?.channelId ||
312250
+ !roleConfig.slack_channels.includes(this.actor.slack.channelId)) {
312251
+ continue;
312252
+ }
312253
+ }
312254
+ matched.push(roleName);
312255
+ }
312256
+ return matched;
312257
+ }
312258
+ buildActor() {
312259
+ return {
312260
+ authorAssociation: this.actor.authorAssociation,
312261
+ login: this.actor.login,
312262
+ roles: this.resolveRoles(),
312263
+ isLocalMode: this.actor.isLocalMode,
312264
+ ...(this.actor.slack && { slack: this.actor.slack }),
312265
+ };
312266
+ }
312267
+ forCheckExecution(check) {
312268
+ return {
312269
+ scope: 'check.execute',
312270
+ check: {
312271
+ id: check.id,
312272
+ type: check.type,
312273
+ group: check.group,
312274
+ tags: check.tags,
312275
+ criticality: check.criticality,
312276
+ sandbox: check.sandbox,
312277
+ policy: check.policy,
312278
+ },
312279
+ actor: this.buildActor(),
312280
+ repository: this.repository,
312281
+ pullRequest: this.pullRequest,
312282
+ };
312283
+ }
312284
+ forToolInvocation(serverName, methodName, transport) {
312285
+ return {
312286
+ scope: 'tool.invoke',
312287
+ tool: { serverName, methodName, transport },
312288
+ actor: this.buildActor(),
312289
+ repository: this.repository,
312290
+ pullRequest: this.pullRequest,
312291
+ };
312292
+ }
312293
+ forCapabilityResolve(checkId, capabilities) {
312294
+ return {
312295
+ scope: 'capability.resolve',
312296
+ check: { id: checkId, type: 'ai' },
312297
+ capability: capabilities,
312298
+ actor: this.buildActor(),
312299
+ repository: this.repository,
312300
+ pullRequest: this.pullRequest,
312301
+ };
312302
+ }
312303
+ }
312304
+ exports.PolicyInputBuilder = PolicyInputBuilder;
312305
+
312306
+
312307
+ /***/ }),
312308
+
312309
+ /***/ 63737:
312310
+ /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
312311
+
312312
+ "use strict";
312313
+
312314
+ /**
312315
+ * Copyright (c) ProbeLabs. All rights reserved.
312316
+ * Licensed under the Elastic License 2.0; you may not use this file except
312317
+ * in compliance with the Elastic License 2.0.
312318
+ */
312319
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
312320
+ if (k2 === undefined) k2 = k;
312321
+ var desc = Object.getOwnPropertyDescriptor(m, k);
312322
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
312323
+ desc = { enumerable: true, get: function() { return m[k]; } };
312324
+ }
312325
+ Object.defineProperty(o, k2, desc);
312326
+ }) : (function(o, m, k, k2) {
312327
+ if (k2 === undefined) k2 = k;
312328
+ o[k2] = m[k];
312329
+ }));
312330
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
312331
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
312332
+ }) : function(o, v) {
312333
+ o["default"] = v;
312334
+ });
312335
+ var __importStar = (this && this.__importStar) || (function () {
312336
+ var ownKeys = function(o) {
312337
+ ownKeys = Object.getOwnPropertyNames || function (o) {
312338
+ var ar = [];
312339
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
312340
+ return ar;
312341
+ };
312342
+ return ownKeys(o);
312343
+ };
312344
+ return function (mod) {
312345
+ if (mod && mod.__esModule) return mod;
312346
+ var result = {};
312347
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
312348
+ __setModuleDefault(result, mod);
312349
+ return result;
312350
+ };
312351
+ })();
312352
+ Object.defineProperty(exports, "__esModule", ({ value: true }));
312353
+ exports.KnexStoreBackend = void 0;
312354
+ /**
312355
+ * Knex-backed schedule store for PostgreSQL, MySQL, and MSSQL (Enterprise)
312356
+ *
312357
+ * Uses Knex query builder for database-agnostic SQL. Same schema as SQLite backend
312358
+ * but with real distributed locking via row-level claims (claimed_by/claimed_at/lock_token).
312359
+ */
312360
+ const fs = __importStar(__nccwpck_require__(79896));
312361
+ const path = __importStar(__nccwpck_require__(16928));
312362
+ const uuid_1 = __nccwpck_require__(31914);
312363
+ const logger_1 = __nccwpck_require__(86999);
312364
+ function toNum(val) {
312365
+ if (val === null || val === undefined)
312366
+ return undefined;
312367
+ return typeof val === 'string' ? parseInt(val, 10) : val;
312368
+ }
312369
+ function safeJsonParse(value) {
312370
+ if (!value)
312371
+ return undefined;
312372
+ try {
312373
+ return JSON.parse(value);
312374
+ }
312375
+ catch {
312376
+ return undefined;
312377
+ }
312378
+ }
312379
+ function fromTriggerRow(row) {
312380
+ return {
312381
+ id: row.id,
312382
+ creatorId: row.creator_id,
312383
+ creatorContext: row.creator_context ?? undefined,
312384
+ creatorName: row.creator_name ?? undefined,
312385
+ description: row.description ?? undefined,
312386
+ channels: safeJsonParse(row.channels),
312387
+ fromUsers: safeJsonParse(row.from_users),
312388
+ fromBots: row.from_bots === true || row.from_bots === 1,
312389
+ contains: safeJsonParse(row.contains),
312390
+ matchPattern: row.match_pattern ?? undefined,
312391
+ threads: row.threads,
312392
+ workflow: row.workflow,
312393
+ inputs: safeJsonParse(row.inputs),
312394
+ outputContext: safeJsonParse(row.output_context),
312395
+ status: row.status,
312396
+ enabled: row.enabled === true || row.enabled === 1,
312397
+ createdAt: toNum(row.created_at),
312398
+ };
312399
+ }
312400
+ function toTriggerInsertRow(trigger) {
312401
+ return {
312402
+ id: trigger.id,
312403
+ creator_id: trigger.creatorId,
312404
+ creator_context: trigger.creatorContext ?? null,
312405
+ creator_name: trigger.creatorName ?? null,
312406
+ description: trigger.description ?? null,
312407
+ channels: trigger.channels ? JSON.stringify(trigger.channels) : null,
312408
+ from_users: trigger.fromUsers ? JSON.stringify(trigger.fromUsers) : null,
312409
+ from_bots: trigger.fromBots,
312410
+ contains: trigger.contains ? JSON.stringify(trigger.contains) : null,
312411
+ match_pattern: trigger.matchPattern ?? null,
312412
+ threads: trigger.threads,
312413
+ workflow: trigger.workflow,
312414
+ inputs: trigger.inputs ? JSON.stringify(trigger.inputs) : null,
312415
+ output_context: trigger.outputContext ? JSON.stringify(trigger.outputContext) : null,
312416
+ status: trigger.status,
312417
+ enabled: trigger.enabled,
312418
+ created_at: trigger.createdAt,
312419
+ };
312420
+ }
312421
+ function fromDbRow(row) {
312422
+ return {
312423
+ id: row.id,
312424
+ creatorId: row.creator_id,
312425
+ creatorContext: row.creator_context ?? undefined,
312426
+ creatorName: row.creator_name ?? undefined,
312427
+ timezone: row.timezone,
312428
+ schedule: row.schedule_expr,
312429
+ runAt: toNum(row.run_at),
312430
+ isRecurring: row.is_recurring === true || row.is_recurring === 1,
312431
+ originalExpression: row.original_expression,
312432
+ workflow: row.workflow ?? undefined,
312433
+ workflowInputs: safeJsonParse(row.workflow_inputs),
312434
+ outputContext: safeJsonParse(row.output_context),
312435
+ status: row.status,
312436
+ createdAt: toNum(row.created_at),
312437
+ lastRunAt: toNum(row.last_run_at),
312438
+ nextRunAt: toNum(row.next_run_at),
312439
+ runCount: row.run_count,
312440
+ failureCount: row.failure_count,
312441
+ lastError: row.last_error ?? undefined,
312442
+ previousResponse: row.previous_response ?? undefined,
312443
+ };
312444
+ }
312445
+ function toInsertRow(schedule) {
312446
+ return {
312447
+ id: schedule.id,
312448
+ creator_id: schedule.creatorId,
312449
+ creator_context: schedule.creatorContext ?? null,
312450
+ creator_name: schedule.creatorName ?? null,
312451
+ timezone: schedule.timezone,
312452
+ schedule_expr: schedule.schedule,
312453
+ run_at: schedule.runAt ?? null,
312454
+ is_recurring: schedule.isRecurring,
312455
+ original_expression: schedule.originalExpression,
312456
+ workflow: schedule.workflow ?? null,
312457
+ workflow_inputs: schedule.workflowInputs ? JSON.stringify(schedule.workflowInputs) : null,
312458
+ output_context: schedule.outputContext ? JSON.stringify(schedule.outputContext) : null,
312459
+ status: schedule.status,
312460
+ created_at: schedule.createdAt,
312461
+ last_run_at: schedule.lastRunAt ?? null,
312462
+ next_run_at: schedule.nextRunAt ?? null,
312463
+ run_count: schedule.runCount,
312464
+ failure_count: schedule.failureCount,
312465
+ last_error: schedule.lastError ?? null,
312466
+ previous_response: schedule.previousResponse ?? null,
312467
+ };
312468
+ }
312469
+ /**
312470
+ * Enterprise Knex-backed store for PostgreSQL, MySQL, and MSSQL
312471
+ */
312472
+ class KnexStoreBackend {
312473
+ knex = null;
312474
+ driver;
312475
+ connection;
312476
+ constructor(driver, storageConfig, _haConfig) {
312477
+ this.driver = driver;
312478
+ this.connection = (storageConfig.connection || {});
312479
+ }
312480
+ async initialize() {
312481
+ // Load knex dynamically
312482
+ const { createRequire } = __nccwpck_require__(73339);
312483
+ const runtimeRequire = createRequire(__filename);
312484
+ let knexFactory;
312485
+ try {
312486
+ knexFactory = runtimeRequire('knex');
312487
+ }
312488
+ catch (err) {
312489
+ const code = err?.code;
312490
+ if (code === 'MODULE_NOT_FOUND' || code === 'ERR_MODULE_NOT_FOUND') {
312491
+ throw new Error('knex is required for PostgreSQL/MySQL/MSSQL schedule storage. ' +
312492
+ 'Install it with: npm install knex');
312493
+ }
312494
+ throw err;
312495
+ }
312496
+ const clientMap = {
312497
+ postgresql: 'pg',
312498
+ mysql: 'mysql2',
312499
+ mssql: 'tedious',
312500
+ };
312501
+ const client = clientMap[this.driver];
312502
+ // Build connection config
312503
+ let connection;
312504
+ if (this.connection.connection_string) {
312505
+ connection = this.connection.connection_string;
312506
+ }
312507
+ else if (this.driver === 'mssql') {
312508
+ connection = this.buildMssqlConnection();
312509
+ }
312510
+ else {
312511
+ connection = this.buildStandardConnection();
312512
+ }
312513
+ this.knex = knexFactory({
312514
+ client,
312515
+ connection,
312516
+ pool: {
312517
+ min: this.connection.pool?.min ?? 0,
312518
+ max: this.connection.pool?.max ?? 10,
312519
+ },
312520
+ });
312521
+ // Run schema migration
312522
+ await this.migrateSchema();
312523
+ logger_1.logger.info(`[KnexStore] Initialized (${this.driver})`);
312524
+ }
312525
+ buildStandardConnection() {
312526
+ return {
312527
+ host: this.connection.host || 'localhost',
312528
+ port: this.connection.port,
312529
+ database: this.connection.database || 'visor',
312530
+ user: this.connection.user,
312531
+ password: this.connection.password,
312532
+ ssl: this.resolveSslConfig(),
312533
+ };
312534
+ }
312535
+ buildMssqlConnection() {
312536
+ const ssl = this.connection.ssl;
312537
+ const sslEnabled = ssl === true || (typeof ssl === 'object' && ssl.enabled !== false);
312538
+ return {
312539
+ server: this.connection.host || 'localhost',
312540
+ port: this.connection.port,
312541
+ database: this.connection.database || 'visor',
312542
+ user: this.connection.user,
312543
+ password: this.connection.password,
312544
+ options: {
312545
+ encrypt: sslEnabled,
312546
+ trustServerCertificate: typeof ssl === 'object' ? ssl.reject_unauthorized === false : !sslEnabled,
312547
+ },
312548
+ };
312549
+ }
312550
+ resolveSslConfig() {
312551
+ const ssl = this.connection.ssl;
312552
+ if (ssl === false || ssl === undefined)
312553
+ return false;
312554
+ if (ssl === true)
312555
+ return { rejectUnauthorized: true };
312556
+ // Object config
312557
+ if (ssl.enabled === false)
312558
+ return false;
312559
+ const result = {
312560
+ rejectUnauthorized: ssl.reject_unauthorized !== false,
312561
+ };
312562
+ if (ssl.ca) {
312563
+ const caPath = this.validateSslPath(ssl.ca, 'CA certificate');
312564
+ result.ca = fs.readFileSync(caPath, 'utf8');
312565
+ }
312566
+ if (ssl.cert) {
312567
+ const certPath = this.validateSslPath(ssl.cert, 'client certificate');
312568
+ result.cert = fs.readFileSync(certPath, 'utf8');
312569
+ }
312570
+ if (ssl.key) {
312571
+ const keyPath = this.validateSslPath(ssl.key, 'client key');
312572
+ result.key = fs.readFileSync(keyPath, 'utf8');
312573
+ }
312574
+ return result;
312575
+ }
312576
+ validateSslPath(filePath, label) {
312577
+ const resolved = path.resolve(filePath);
312578
+ if (resolved !== path.normalize(resolved)) {
312579
+ throw new Error(`SSL ${label} path contains invalid sequences: ${filePath}`);
312580
+ }
312581
+ if (!fs.existsSync(resolved)) {
312582
+ throw new Error(`SSL ${label} not found: ${filePath}`);
312583
+ }
312584
+ return resolved;
312585
+ }
312586
+ async shutdown() {
312587
+ if (this.knex) {
312588
+ await this.knex.destroy();
312589
+ this.knex = null;
312590
+ }
312591
+ }
312592
+ async migrateSchema() {
312593
+ const knex = this.getKnex();
312594
+ const exists = await knex.schema.hasTable('schedules');
312595
+ if (!exists) {
312596
+ await knex.schema.createTable('schedules', table => {
312597
+ table.string('id', 36).primary();
312598
+ table.string('creator_id', 255).notNullable().index();
312599
+ table.string('creator_context', 255);
312600
+ table.string('creator_name', 255);
312601
+ table.string('timezone', 64).notNullable().defaultTo('UTC');
312602
+ table.string('schedule_expr', 255);
312603
+ table.bigInteger('run_at');
312604
+ table.boolean('is_recurring').notNullable();
312605
+ table.text('original_expression');
312606
+ table.string('workflow', 255);
312607
+ table.text('workflow_inputs');
312608
+ table.text('output_context');
312609
+ table.string('status', 20).notNullable().index();
312610
+ table.bigInteger('created_at').notNullable();
312611
+ table.bigInteger('last_run_at');
312612
+ table.bigInteger('next_run_at');
312613
+ table.integer('run_count').notNullable().defaultTo(0);
312614
+ table.integer('failure_count').notNullable().defaultTo(0);
312615
+ table.text('last_error');
312616
+ table.text('previous_response');
312617
+ table.index(['status', 'next_run_at']);
312618
+ });
312619
+ }
312620
+ // Create message_triggers table
312621
+ const triggersExist = await knex.schema.hasTable('message_triggers');
312622
+ if (!triggersExist) {
312623
+ await knex.schema.createTable('message_triggers', table => {
312624
+ table.string('id', 36).primary();
312625
+ table.string('creator_id', 255).notNullable().index();
312626
+ table.string('creator_context', 255);
312627
+ table.string('creator_name', 255);
312628
+ table.text('description');
312629
+ table.text('channels'); // JSON array
312630
+ table.text('from_users'); // JSON array
312631
+ table.boolean('from_bots').notNullable().defaultTo(false);
312632
+ table.text('contains'); // JSON array
312633
+ table.text('match_pattern');
312634
+ table.string('threads', 20).notNullable().defaultTo('any');
312635
+ table.string('workflow', 255).notNullable();
312636
+ table.text('inputs'); // JSON
312637
+ table.text('output_context'); // JSON
312638
+ table.string('status', 20).notNullable().defaultTo('active').index();
312639
+ table.boolean('enabled').notNullable().defaultTo(true);
312640
+ table.bigInteger('created_at').notNullable();
312641
+ });
312642
+ }
312643
+ // Create scheduler_locks table for distributed locking
312644
+ const locksExist = await knex.schema.hasTable('scheduler_locks');
312645
+ if (!locksExist) {
312646
+ await knex.schema.createTable('scheduler_locks', table => {
312647
+ table.string('lock_id', 255).primary();
312648
+ table.string('node_id', 255).notNullable();
312649
+ table.string('lock_token', 36).notNullable();
312650
+ table.bigInteger('acquired_at').notNullable();
312651
+ table.bigInteger('expires_at').notNullable();
312652
+ });
312653
+ }
312654
+ }
312655
+ getKnex() {
312656
+ if (!this.knex) {
312657
+ throw new Error('[KnexStore] Not initialized. Call initialize() first.');
312658
+ }
312659
+ return this.knex;
312660
+ }
312661
+ // --- CRUD ---
312662
+ async create(schedule) {
312663
+ const knex = this.getKnex();
312664
+ const newSchedule = {
312665
+ ...schedule,
312666
+ id: (0, uuid_1.v4)(),
312667
+ createdAt: Date.now(),
312668
+ runCount: 0,
312669
+ failureCount: 0,
312670
+ status: 'active',
312671
+ };
312672
+ await knex('schedules').insert(toInsertRow(newSchedule));
312673
+ logger_1.logger.info(`[KnexStore] Created schedule ${newSchedule.id} for user ${newSchedule.creatorId}`);
312674
+ return newSchedule;
312675
+ }
312676
+ async importSchedule(schedule) {
312677
+ const knex = this.getKnex();
312678
+ const existing = await knex('schedules').where('id', schedule.id).first();
312679
+ if (existing)
312680
+ return; // Already imported (idempotent)
312681
+ await knex('schedules').insert(toInsertRow(schedule));
312682
+ }
312683
+ async get(id) {
312684
+ const knex = this.getKnex();
312685
+ const row = await knex('schedules').where('id', id).first();
312686
+ return row ? fromDbRow(row) : undefined;
312687
+ }
312688
+ async update(id, patch) {
312689
+ const knex = this.getKnex();
312690
+ const existing = await knex('schedules').where('id', id).first();
312691
+ if (!existing)
312692
+ return undefined;
312693
+ const current = fromDbRow(existing);
312694
+ const updated = { ...current, ...patch, id: current.id };
312695
+ const row = toInsertRow(updated);
312696
+ // Remove id from update (PK cannot change)
312697
+ delete row.id;
312698
+ await knex('schedules').where('id', id).update(row);
312699
+ return updated;
312700
+ }
312701
+ async delete(id) {
312702
+ const knex = this.getKnex();
312703
+ const deleted = await knex('schedules').where('id', id).del();
312704
+ if (deleted > 0) {
312705
+ logger_1.logger.info(`[KnexStore] Deleted schedule ${id}`);
312706
+ return true;
312707
+ }
312708
+ return false;
312709
+ }
312710
+ // --- Queries ---
312711
+ async getByCreator(creatorId) {
312712
+ const knex = this.getKnex();
312713
+ const rows = await knex('schedules').where('creator_id', creatorId);
312714
+ return rows.map((r) => fromDbRow(r));
312715
+ }
312716
+ async getActiveSchedules() {
312717
+ const knex = this.getKnex();
312718
+ const rows = await knex('schedules').where('status', 'active');
312719
+ return rows.map((r) => fromDbRow(r));
312720
+ }
312721
+ async getDueSchedules(now) {
312722
+ const ts = now ?? Date.now();
312723
+ const knex = this.getKnex();
312724
+ // MSSQL uses 1/0 for booleans
312725
+ const bFalse = this.driver === 'mssql' ? 0 : false;
312726
+ const bTrue = this.driver === 'mssql' ? 1 : true;
312727
+ const rows = await knex('schedules')
312728
+ .where('status', 'active')
312729
+ .andWhere(function () {
312730
+ this.where(function () {
312731
+ this.where('is_recurring', bFalse)
312732
+ .whereNotNull('run_at')
312733
+ .where('run_at', '<=', ts);
312734
+ }).orWhere(function () {
312735
+ this.where('is_recurring', bTrue)
312736
+ .whereNotNull('next_run_at')
312737
+ .where('next_run_at', '<=', ts);
312738
+ });
312739
+ });
312740
+ return rows.map((r) => fromDbRow(r));
312741
+ }
312742
+ async findByWorkflow(creatorId, workflowName) {
312743
+ const knex = this.getKnex();
312744
+ const escaped = workflowName.toLowerCase().replace(/[%_\\]/g, '\\$&');
312745
+ const pattern = `%${escaped}%`;
312746
+ const rows = await knex('schedules')
312747
+ .where('creator_id', creatorId)
312748
+ .where('status', 'active')
312749
+ .whereRaw("LOWER(workflow) LIKE ? ESCAPE '\\'", [pattern]);
312750
+ return rows.map((r) => fromDbRow(r));
312751
+ }
312752
+ async getAll() {
312753
+ const knex = this.getKnex();
312754
+ const rows = await knex('schedules');
312755
+ return rows.map((r) => fromDbRow(r));
312756
+ }
312757
+ async getStats() {
312758
+ const knex = this.getKnex();
312759
+ // MSSQL uses 1/0 for booleans; PostgreSQL/MySQL accept both true/1
312760
+ const boolTrue = this.driver === 'mssql' ? '1' : 'true';
312761
+ const boolFalse = this.driver === 'mssql' ? '0' : 'false';
312762
+ const result = await knex('schedules')
312763
+ .select(knex.raw('COUNT(*) as total'), knex.raw("SUM(CASE WHEN status = 'active' THEN 1 ELSE 0 END) as active"), knex.raw("SUM(CASE WHEN status = 'paused' THEN 1 ELSE 0 END) as paused"), knex.raw("SUM(CASE WHEN status = 'completed' THEN 1 ELSE 0 END) as completed"), knex.raw("SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END) as failed"), knex.raw(`SUM(CASE WHEN is_recurring = ${boolTrue} THEN 1 ELSE 0 END) as recurring`), knex.raw(`SUM(CASE WHEN is_recurring = ${boolFalse} THEN 1 ELSE 0 END) as one_time`))
312764
+ .first();
312765
+ return {
312766
+ total: Number(result.total) || 0,
312767
+ active: Number(result.active) || 0,
312768
+ paused: Number(result.paused) || 0,
312769
+ completed: Number(result.completed) || 0,
312770
+ failed: Number(result.failed) || 0,
312771
+ recurring: Number(result.recurring) || 0,
312772
+ oneTime: Number(result.one_time) || 0,
312773
+ };
312774
+ }
312775
+ async validateLimits(creatorId, isRecurring, limits) {
312776
+ const knex = this.getKnex();
312777
+ if (limits.maxGlobal) {
312778
+ const result = await knex('schedules').count('* as cnt').first();
312779
+ if (Number(result?.cnt) >= limits.maxGlobal) {
312780
+ throw new Error(`Global schedule limit reached (${limits.maxGlobal})`);
312781
+ }
312782
+ }
312783
+ if (limits.maxPerUser) {
312784
+ const result = await knex('schedules')
312785
+ .where('creator_id', creatorId)
312786
+ .count('* as cnt')
312787
+ .first();
312788
+ if (Number(result?.cnt) >= limits.maxPerUser) {
312789
+ throw new Error(`You have reached the maximum number of schedules (${limits.maxPerUser})`);
312790
+ }
312791
+ }
312792
+ if (isRecurring && limits.maxRecurringPerUser) {
312793
+ const bTrue = this.driver === 'mssql' ? 1 : true;
312794
+ const result = await knex('schedules')
312795
+ .where('creator_id', creatorId)
312796
+ .where('is_recurring', bTrue)
312797
+ .count('* as cnt')
312798
+ .first();
312799
+ if (Number(result?.cnt) >= limits.maxRecurringPerUser) {
312800
+ throw new Error(`You have reached the maximum number of recurring schedules (${limits.maxRecurringPerUser})`);
312801
+ }
312802
+ }
312803
+ }
312804
+ // --- HA Distributed Locking (via scheduler_locks table) ---
312805
+ async tryAcquireLock(lockId, nodeId, ttlSeconds) {
312806
+ const knex = this.getKnex();
312807
+ const now = Date.now();
312808
+ const expiresAt = now + ttlSeconds * 1000;
312809
+ const token = (0, uuid_1.v4)();
312810
+ // Step 1: Try to claim an existing expired lock
312811
+ const updated = await knex('scheduler_locks')
312812
+ .where('lock_id', lockId)
312813
+ .where('expires_at', '<', now)
312814
+ .update({
312815
+ node_id: nodeId,
312816
+ lock_token: token,
312817
+ acquired_at: now,
312818
+ expires_at: expiresAt,
312819
+ });
312820
+ if (updated > 0)
312821
+ return token;
312822
+ // Step 2: Try to INSERT a new lock row
312823
+ try {
312824
+ await knex('scheduler_locks').insert({
312825
+ lock_id: lockId,
312826
+ node_id: nodeId,
312827
+ lock_token: token,
312828
+ acquired_at: now,
312829
+ expires_at: expiresAt,
312830
+ });
312831
+ return token;
312832
+ }
312833
+ catch {
312834
+ // Unique constraint violation — another node holds the lock
312835
+ return null;
312836
+ }
312837
+ }
312838
+ async releaseLock(lockId, lockToken) {
312839
+ const knex = this.getKnex();
312840
+ await knex('scheduler_locks').where('lock_id', lockId).where('lock_token', lockToken).del();
312841
+ }
312842
+ async renewLock(lockId, lockToken, ttlSeconds) {
312843
+ const knex = this.getKnex();
312844
+ const now = Date.now();
312845
+ const expiresAt = now + ttlSeconds * 1000;
312846
+ const updated = await knex('scheduler_locks')
312847
+ .where('lock_id', lockId)
312848
+ .where('lock_token', lockToken)
312849
+ .update({ acquired_at: now, expires_at: expiresAt });
312850
+ return updated > 0;
312851
+ }
312852
+ async flush() {
312853
+ // No-op for server-based backends
312854
+ }
312855
+ // --- Message Trigger CRUD ---
312856
+ async createTrigger(trigger) {
312857
+ const knex = this.getKnex();
312858
+ const newTrigger = {
312859
+ ...trigger,
312860
+ id: (0, uuid_1.v4)(),
312861
+ createdAt: Date.now(),
312862
+ };
312863
+ await knex('message_triggers').insert(toTriggerInsertRow(newTrigger));
312864
+ logger_1.logger.info(`[KnexStore] Created trigger ${newTrigger.id} for user ${newTrigger.creatorId}`);
312865
+ return newTrigger;
312866
+ }
312867
+ async getTrigger(id) {
312868
+ const knex = this.getKnex();
312869
+ const row = await knex('message_triggers').where('id', id).first();
312870
+ return row ? fromTriggerRow(row) : undefined;
312871
+ }
312872
+ async updateTrigger(id, patch) {
312873
+ const knex = this.getKnex();
312874
+ const existing = await knex('message_triggers').where('id', id).first();
312875
+ if (!existing)
312876
+ return undefined;
312877
+ const current = fromTriggerRow(existing);
312878
+ const updated = {
312879
+ ...current,
312880
+ ...patch,
312881
+ id: current.id,
312882
+ createdAt: current.createdAt,
312883
+ };
312884
+ const row = toTriggerInsertRow(updated);
312885
+ delete row.id;
312886
+ await knex('message_triggers').where('id', id).update(row);
312887
+ return updated;
312888
+ }
312889
+ async deleteTrigger(id) {
312890
+ const knex = this.getKnex();
312891
+ const deleted = await knex('message_triggers').where('id', id).del();
312892
+ if (deleted > 0) {
312893
+ logger_1.logger.info(`[KnexStore] Deleted trigger ${id}`);
312894
+ return true;
312895
+ }
312896
+ return false;
312897
+ }
312898
+ async getTriggersByCreator(creatorId) {
312899
+ const knex = this.getKnex();
312900
+ const rows = await knex('message_triggers').where('creator_id', creatorId);
312901
+ return rows.map((r) => fromTriggerRow(r));
312902
+ }
312903
+ async getActiveTriggers() {
312904
+ const knex = this.getKnex();
312905
+ const rows = await knex('message_triggers')
312906
+ .where('status', 'active')
312907
+ .where('enabled', this.driver === 'mssql' ? 1 : true);
312908
+ return rows.map((r) => fromTriggerRow(r));
312909
+ }
312910
+ }
312911
+ exports.KnexStoreBackend = KnexStoreBackend;
312912
+
312913
+
311072
312914
  /***/ }),
311073
312915
 
311074
312916
  /***/ 83864:
@@ -312972,17 +314814,28 @@ class SlackFrontend {
312972
314814
  const message = ev?.error?.message || 'Execution error';
312973
314815
  await this.maybePostError(ctx, 'Check failed', message, ev?.checkId).catch(() => { });
312974
314816
  }));
312975
- // On terminal state, replace 👀 with 👍 if we acked an inbound Slack message
314817
+ // On terminal state, replace 👀 with 👍 if we acked an inbound Slack message.
314818
+ // For error states, always post an error notice (bypass errorNotified flag)
314819
+ // so the user is never left without feedback when a run fails.
312976
314820
  this.subs.push(bus.on('StateTransition', async (env) => {
312977
314821
  const ev = (env && env.payload) || env;
312978
- if (ev && (ev.to === 'Completed' || ev.to === 'Error')) {
314822
+ if (ev && ev.to === 'Completed') {
314823
+ await this.finalizeReactions(ctx).catch(() => { });
314824
+ }
314825
+ else if (ev && ev.to === 'Error') {
314826
+ if (!this.errorNotified) {
314827
+ await this.maybePostError(ctx, 'Run failed', 'Workflow finished with errors').catch(() => { });
314828
+ }
312979
314829
  await this.finalizeReactions(ctx).catch(() => { });
312980
314830
  }
312981
314831
  }));
312982
314832
  this.subs.push(bus.on('Shutdown', async (env) => {
312983
314833
  const ev = (env && env.payload) || env;
312984
314834
  const message = ev?.error?.message || 'Fatal error';
312985
- await this.maybePostError(ctx, 'Run failed', message).catch(() => { });
314835
+ // Always post shutdown errors bypass errorNotified since a fatal
314836
+ // shutdown is critical and must reach the user even if a previous
314837
+ // (possibly unrelated) check already posted a non-fatal error.
314838
+ await this.forcePostError(ctx, 'Run failed', message).catch(() => { });
312986
314839
  }));
312987
314840
  // Add 👀 acknowledgement as soon as first check is scheduled for Slack-driven runs
312988
314841
  this.subs.push(bus.on('CheckScheduled', async () => {
@@ -313111,6 +314964,16 @@ class SlackFrontend {
313111
314964
  async maybePostError(ctx, title, message, checkId) {
313112
314965
  if (this.errorNotified)
313113
314966
  return;
314967
+ return this.postErrorToSlack(ctx, title, message, checkId);
314968
+ }
314969
+ /**
314970
+ * Post error to Slack regardless of errorNotified flag.
314971
+ * Used for fatal/shutdown errors that must always reach the user.
314972
+ */
314973
+ async forcePostError(ctx, title, message, checkId) {
314974
+ return this.postErrorToSlack(ctx, title, message, checkId);
314975
+ }
314976
+ async postErrorToSlack(ctx, title, message, checkId) {
313114
314977
  const slack = this.getSlack(ctx);
313115
314978
  if (!slack)
313116
314979
  return;
@@ -323677,6 +325540,35 @@ class OutputFormatters {
323677
325540
  exports.OutputFormatters = OutputFormatters;
323678
325541
 
323679
325542
 
325543
+ /***/ }),
325544
+
325545
+ /***/ 93866:
325546
+ /***/ ((__unused_webpack_module, exports) => {
325547
+
325548
+ "use strict";
325549
+
325550
+ Object.defineProperty(exports, "__esModule", ({ value: true }));
325551
+ exports.DefaultPolicyEngine = void 0;
325552
+ /**
325553
+ * Default (no-op) policy engine — always allows everything.
325554
+ * Used when no enterprise license is present or policy is disabled.
325555
+ */
325556
+ class DefaultPolicyEngine {
325557
+ async initialize(_config) { }
325558
+ async evaluateCheckExecution(_checkId, _checkConfig) {
325559
+ return { allowed: true };
325560
+ }
325561
+ async evaluateToolInvocation(_serverName, _methodName, _transport) {
325562
+ return { allowed: true };
325563
+ }
325564
+ async evaluateCapabilities(_checkId, _capabilities) {
325565
+ return { allowed: true };
325566
+ }
325567
+ async shutdown() { }
325568
+ }
325569
+ exports.DefaultPolicyEngine = DefaultPolicyEngine;
325570
+
325571
+
323680
325572
  /***/ }),
323681
325573
 
323682
325574
  /***/ 96611:
@@ -325541,46 +327433,51 @@ class AICheckProvider extends check_provider_interface_1.CheckProvider {
325541
327433
  // Check-level AI configuration (ai object)
325542
327434
  if (config.ai) {
325543
327435
  const aiAny = config.ai;
327436
+ // Helper to resolve Liquid templates in ai config values (e.g., "{{ inputs.max_iterations }}")
327437
+ const resolveLiquid = async (val) => {
327438
+ if (typeof val !== 'string' || !val.includes('{{'))
327439
+ return undefined;
327440
+ try {
327441
+ return (await this.liquidEngine.parseAndRender(val, {
327442
+ inputs: config.workflowInputs || {},
327443
+ env: process.env,
327444
+ })).trim();
327445
+ }
327446
+ catch {
327447
+ return undefined;
327448
+ }
327449
+ };
327450
+ // Helper to resolve a boolean that may be a Liquid template string
327451
+ const resolveBool = async (val) => {
327452
+ const resolved = (await resolveLiquid(val)) ?? val;
327453
+ if (typeof resolved === 'boolean')
327454
+ return resolved;
327455
+ if (typeof resolved === 'string')
327456
+ return resolved === 'true';
327457
+ return !!resolved;
327458
+ };
325544
327459
  const skipTransport = aiAny.skip_transport_context === true;
325545
327460
  // Only set properties that are actually defined to avoid overriding env vars
325546
327461
  if (aiAny.apiKey !== undefined) {
325547
327462
  aiConfig.apiKey = aiAny.apiKey;
325548
327463
  }
325549
327464
  if (aiAny.model !== undefined) {
325550
- let modelVal = String(aiAny.model);
325551
- if (modelVal.includes('{{')) {
325552
- try {
325553
- const rendered = await this.liquidEngine.parseAndRender(modelVal, {
325554
- inputs: config.workflowInputs || {},
325555
- env: process.env,
325556
- });
325557
- modelVal = rendered.trim();
325558
- }
325559
- catch { }
325560
- }
327465
+ const modelVal = (await resolveLiquid(aiAny.model)) ?? String(aiAny.model);
325561
327466
  if (modelVal) {
325562
327467
  aiConfig.model = modelVal;
325563
327468
  }
325564
327469
  }
325565
327470
  if (aiAny.timeout !== undefined) {
325566
- aiConfig.timeout = aiAny.timeout;
327471
+ const resolvedTimeout = (await resolveLiquid(aiAny.timeout)) ?? aiAny.timeout;
327472
+ aiConfig.timeout = Number(resolvedTimeout);
325567
327473
  }
325568
327474
  if (aiAny.max_iterations !== undefined || aiAny.maxIterations !== undefined) {
325569
327475
  const raw = aiAny.max_iterations ?? aiAny.maxIterations;
325570
- aiConfig.maxIterations = Number(raw);
327476
+ const resolved = (await resolveLiquid(raw)) ?? raw;
327477
+ aiConfig.maxIterations = Number(resolved);
325571
327478
  }
325572
327479
  if (aiAny.provider !== undefined) {
325573
- let providerVal = String(aiAny.provider);
325574
- if (providerVal.includes('{{')) {
325575
- try {
325576
- const rendered = await this.liquidEngine.parseAndRender(providerVal, {
325577
- inputs: config.workflowInputs || {},
325578
- env: process.env,
325579
- });
325580
- providerVal = rendered.trim();
325581
- }
325582
- catch { }
325583
- }
327480
+ const providerVal = (await resolveLiquid(aiAny.provider)) ?? String(aiAny.provider);
325584
327481
  if (providerVal) {
325585
327482
  aiConfig.provider = providerVal;
325586
327483
  }
@@ -325589,36 +327486,40 @@ class AICheckProvider extends check_provider_interface_1.CheckProvider {
325589
327486
  aiConfig.debug = aiAny.debug;
325590
327487
  }
325591
327488
  if (aiAny.enableDelegate !== undefined) {
325592
- aiConfig.enableDelegate = aiAny.enableDelegate;
327489
+ aiConfig.enableDelegate = await resolveBool(aiAny.enableDelegate);
325593
327490
  }
325594
327491
  if (aiAny.enableTasks !== undefined) {
325595
- aiConfig.enableTasks = aiAny.enableTasks;
327492
+ aiConfig.enableTasks = await resolveBool(aiAny.enableTasks);
325596
327493
  }
325597
327494
  if (aiAny.enableExecutePlan !== undefined) {
325598
- aiConfig.enableExecutePlan = aiAny.enableExecutePlan;
327495
+ aiConfig.enableExecutePlan = await resolveBool(aiAny.enableExecutePlan);
325599
327496
  }
325600
327497
  if (aiAny.allowEdit !== undefined) {
325601
- aiConfig.allowEdit = aiAny.allowEdit;
327498
+ aiConfig.allowEdit = await resolveBool(aiAny.allowEdit);
325602
327499
  }
325603
327500
  if (aiAny.allowedTools !== undefined) {
325604
327501
  aiConfig.allowedTools = aiAny.allowedTools;
325605
327502
  this.logDebug(`[AI Provider] Read allowedTools from YAML: ${JSON.stringify(aiAny.allowedTools)}`);
325606
327503
  }
325607
327504
  if (aiAny.disableTools !== undefined) {
325608
- aiConfig.disableTools = aiAny.disableTools;
327505
+ aiConfig.disableTools = await resolveBool(aiAny.disableTools);
325609
327506
  this.logDebug(`[AI Provider] Read disableTools from YAML: ${aiAny.disableTools}`);
325610
327507
  }
325611
327508
  if (aiAny.allowBash !== undefined) {
325612
- aiConfig.allowBash = aiAny.allowBash;
327509
+ aiConfig.allowBash = await resolveBool(aiAny.allowBash);
325613
327510
  }
325614
327511
  if (aiAny.bashConfig !== undefined) {
325615
327512
  aiConfig.bashConfig = aiAny.bashConfig;
325616
327513
  }
325617
327514
  if (aiAny.search_delegate_provider !== undefined) {
325618
- aiConfig.search_delegate_provider = aiAny.search_delegate_provider;
327515
+ aiConfig.search_delegate_provider =
327516
+ (await resolveLiquid(aiAny.search_delegate_provider)) ??
327517
+ aiAny.search_delegate_provider;
325619
327518
  }
325620
327519
  if (aiAny.search_delegate_model !== undefined) {
325621
- aiConfig.search_delegate_model = aiAny.search_delegate_model;
327520
+ aiConfig.search_delegate_model =
327521
+ (await resolveLiquid(aiAny.search_delegate_model)) ??
327522
+ aiAny.search_delegate_model;
325622
327523
  }
325623
327524
  if (aiAny.completion_prompt !== undefined) {
325624
327525
  aiConfig.completionPrompt = aiAny.completion_prompt;
@@ -325758,6 +327659,11 @@ class AICheckProvider extends check_provider_interface_1.CheckProvider {
325758
327659
  if (config.ai_max_iterations !== undefined && aiConfig.maxIterations === undefined) {
325759
327660
  aiConfig.maxIterations = config.ai_max_iterations;
325760
327661
  }
327662
+ // Default to 100 iterations if not configured (ProbeAgent's own default is 30)
327663
+ // Guard against NaN from template rendering (e.g., Number("{{ ... }}") = NaN)
327664
+ if (aiConfig.maxIterations === undefined || Number.isNaN(aiConfig.maxIterations)) {
327665
+ aiConfig.maxIterations = 100;
327666
+ }
325761
327667
  // Pass shared concurrency limiter for global AI call gating
325762
327668
  const sharedLimiter = sessionInfo?._parentContext?.sharedConcurrencyLimiter;
325763
327669
  if (sharedLimiter) {
@@ -346846,7 +348752,7 @@ class StateMachineExecutionEngine {
346846
348752
  try {
346847
348753
  logger_1.logger.debug(`[PolicyEngine] Loading enterprise policy engine (engine=${configWithTagFilter.policy.engine})`);
346848
348754
  // @ts-ignore — enterprise/ may not exist in OSS builds (caught at runtime)
346849
- const { loadEnterprisePolicyEngine } = await Promise.resolve().then(() => __importStar(__nccwpck_require__(7065)));
348755
+ const { loadEnterprisePolicyEngine } = await Promise.resolve().then(() => __importStar(__nccwpck_require__(87068)));
346850
348756
  context.policyEngine = await loadEnterprisePolicyEngine(configWithTagFilter.policy);
346851
348757
  logger_1.logger.debug(`[PolicyEngine] Initialized: ${context.policyEngine?.constructor?.name || 'unknown'}`);
346852
348758
  }
@@ -358601,7 +360507,7 @@ async function initTelemetry(opts = {}) {
358601
360507
  const path = __nccwpck_require__(16928);
358602
360508
  const outDir = opts.file?.dir ||
358603
360509
  process.env.VISOR_TRACE_DIR ||
358604
- __nccwpck_require__.ab + "traces";
360510
+ path.join(process.cwd(), 'output', 'traces');
358605
360511
  fs.mkdirSync(outDir, { recursive: true });
358606
360512
  const ts = new Date().toISOString().replace(/[:.]/g, '-');
358607
360513
  process.env.VISOR_FALLBACK_TRACE_FILE = path.join(outDir, `run-${ts}.ndjson`);
@@ -358851,7 +360757,7 @@ async function shutdownTelemetry() {
358851
360757
  if (process.env.VISOR_TRACE_REPORT === 'true') {
358852
360758
  const fs = __nccwpck_require__(79896);
358853
360759
  const path = __nccwpck_require__(16928);
358854
- const outDir = process.env.VISOR_TRACE_DIR || __nccwpck_require__.ab + "traces";
360760
+ const outDir = process.env.VISOR_TRACE_DIR || path.join(process.cwd(), 'output', 'traces');
358855
360761
  if (!fs.existsSync(outDir))
358856
360762
  fs.mkdirSync(outDir, { recursive: true });
358857
360763
  const ts = new Date().toISOString().replace(/[:.]/g, '-');
@@ -359402,7 +361308,7 @@ function __getOrCreateNdjsonPath() {
359402
361308
  fs.mkdirSync(dir, { recursive: true });
359403
361309
  return __ndjsonPath;
359404
361310
  }
359405
- const outDir = process.env.VISOR_TRACE_DIR || __nccwpck_require__.ab + "traces";
361311
+ const outDir = process.env.VISOR_TRACE_DIR || path.join(process.cwd(), 'output', 'traces');
359406
361312
  if (!fs.existsSync(outDir))
359407
361313
  fs.mkdirSync(outDir, { recursive: true });
359408
361314
  if (!__ndjsonPath) {
@@ -372640,9 +374546,39 @@ class WorkspaceManager {
372640
374546
  const mainProjectName = sanitizePathComponent(configuredMainProjectName || this.extractProjectName(this.originalPath));
372641
374547
  this.usedNames.add(mainProjectName);
372642
374548
  // Create worktree for main project
372643
- const mainProjectPath = path.join(this.workspacePath, mainProjectName);
374549
+ let mainProjectPath = path.join(this.workspacePath, mainProjectName);
372644
374550
  // Check if original path is a git repository
372645
374551
  const isGitRepo = await this.isGitRepository(this.originalPath);
374552
+ // Prune stale worktree references (from previous runs that were cleaned up).
374553
+ // Without this, the worktree list grows unboundedly and can slow git operations.
374554
+ if (isGitRepo) {
374555
+ try {
374556
+ await command_executor_1.commandExecutor.execute(`git -C ${shellEscape(this.originalPath)} worktree prune`, {
374557
+ timeout: 15000,
374558
+ });
374559
+ }
374560
+ catch {
374561
+ // Best-effort — don't fail workspace init if prune fails
374562
+ }
374563
+ }
374564
+ // Detect if originalPath is a subdirectory of a git repo.
374565
+ // `git worktree add` always checks out the full repo, so if the user runs
374566
+ // visor from a subdirectory (e.g. /repo/subdir), the worktree will contain
374567
+ // the entire repo and we need to adjust mainProjectPath to point to the
374568
+ // corresponding subdirectory inside the worktree.
374569
+ let subdirOffset = '';
374570
+ if (isGitRepo) {
374571
+ const gitRootResult = await command_executor_1.commandExecutor.execute(`git -C ${shellEscape(this.originalPath)} rev-parse --show-toplevel`, { timeout: 5000 });
374572
+ if (gitRootResult.exitCode === 0) {
374573
+ const gitRoot = gitRootResult.stdout.trim();
374574
+ const normalizedOriginal = path.resolve(this.originalPath);
374575
+ const normalizedRoot = path.resolve(gitRoot);
374576
+ if (normalizedOriginal !== normalizedRoot) {
374577
+ subdirOffset = path.relative(normalizedRoot, normalizedOriginal);
374578
+ logger_1.logger.info(`[Workspace] Original path is a subdirectory of git repo: ${subdirOffset}`);
374579
+ }
374580
+ }
374581
+ }
372646
374582
  if (isGitRepo) {
372647
374583
  // Check if main project worktree already exists (reused workspace, e.g. Slack thread)
372648
374584
  const exists = await this.pathExists(mainProjectPath);
@@ -372680,6 +374616,23 @@ class WorkspaceManager {
372680
374616
  }
372681
374617
  }
372682
374618
  }
374619
+ // Remember the worktree root before any subdirectory adjustment.
374620
+ // Cleanup needs the actual worktree path (not the subdirectory inside it).
374621
+ const worktreeRootPath = mainProjectPath;
374622
+ // If the original path was a subdirectory, adjust mainProjectPath to
374623
+ // point to the corresponding subdirectory inside the worktree.
374624
+ // e.g. worktree at /tmp/ws/Oel contains full repo; if originalPath was
374625
+ // /repo/Oel, then mainProjectPath becomes /tmp/ws/Oel/Oel
374626
+ if (subdirOffset) {
374627
+ mainProjectPath = path.join(mainProjectPath, subdirOffset);
374628
+ logger_1.logger.info(`[Workspace] Adjusted main project path to subdirectory: ${mainProjectPath}`);
374629
+ // Ensure the subdirectory exists in the worktree
374630
+ const subdirExists = await this.pathExists(mainProjectPath);
374631
+ if (!subdirExists) {
374632
+ logger_1.logger.warn(`[Workspace] Subdirectory '${subdirOffset}' not found in worktree — falling back to worktree root`);
374633
+ mainProjectPath = path.join(this.workspacePath, mainProjectName);
374634
+ }
374635
+ }
372683
374636
  // Scan existing entries in the workspace directory to populate usedNames.
372684
374637
  // This handles reused workspaces (e.g. Slack threads) where a previous run
372685
374638
  // left symlinks on disk but the in-memory state was cleared by cleanup().
@@ -372702,6 +374655,7 @@ class WorkspaceManager {
372702
374655
  mainProjectPath,
372703
374656
  mainProjectName,
372704
374657
  originalPath: this.originalPath,
374658
+ worktreeRootPath,
372705
374659
  };
372706
374660
  this.initialized = true;
372707
374661
  logger_1.logger.info(`Workspace initialized: ${this.workspacePath}`);
@@ -372810,14 +374764,16 @@ class WorkspaceManager {
372810
374764
  ]);
372811
374765
  }
372812
374766
  try {
372813
- // Remove main project worktree if it exists
374767
+ // Remove main project worktree if it exists.
374768
+ // Use worktreeRootPath (the actual git worktree) rather than mainProjectPath
374769
+ // which may include a subdirectory offset (e.g. Oel/Oel instead of Oel).
372814
374770
  if (this.mainProjectInfo) {
372815
- const mainProjectPath = this.mainProjectInfo.mainProjectPath;
374771
+ const worktreePath = this.mainProjectInfo.worktreeRootPath || this.mainProjectInfo.mainProjectPath;
372816
374772
  // Check if path exists and if it's a worktree (not a symlink)
372817
374773
  try {
372818
- const stats = await fsp.lstat(mainProjectPath);
374774
+ const stats = await fsp.lstat(worktreePath);
372819
374775
  if (!stats.isSymbolicLink()) {
372820
- await this.removeMainProjectWorktree(mainProjectPath);
374776
+ await this.removeMainProjectWorktree(worktreePath);
372821
374777
  }
372822
374778
  }
372823
374779
  catch {
@@ -375710,22 +377666,6 @@ class WorkflowRegistry {
375710
377666
  exports.WorkflowRegistry = WorkflowRegistry;
375711
377667
 
375712
377668
 
375713
- /***/ }),
375714
-
375715
- /***/ 7065:
375716
- /***/ ((module) => {
375717
-
375718
- module.exports = eval("require")("./enterprise/loader");
375719
-
375720
-
375721
- /***/ }),
375722
-
375723
- /***/ 71370:
375724
- /***/ ((module) => {
375725
-
375726
- module.exports = eval("require")("./enterprise/policy/policy-input-builder");
375727
-
375728
-
375729
377669
  /***/ }),
375730
377670
 
375731
377671
  /***/ 18327:
@@ -545893,9 +547833,13 @@ var init_ProbeAgent = __esm({
545893
547833
  }
545894
547834
  return await this.fallbackManager.executeWithFallback(
545895
547835
  async (provider, model, config2) => {
547836
+ let fallbackModel = provider(model);
547837
+ if (this.concurrencyLimiter) {
547838
+ fallbackModel = _ProbeAgent._wrapModelWithLimiter(fallbackModel, this.concurrencyLimiter, this.debug);
547839
+ }
545896
547840
  const fallbackOptions = {
545897
547841
  ...options,
545898
- model: provider(model),
547842
+ model: fallbackModel,
545899
547843
  abortSignal: controller.signal
545900
547844
  };
545901
547845
  if (config2.provider !== "google" && fallbackOptions.tools) {
@@ -545923,6 +547867,132 @@ var init_ProbeAgent = __esm({
545923
547867
  }
545924
547868
  );
545925
547869
  }
547870
+ /**
547871
+ * Wrap a LanguageModelV1 model so each doStream/doGenerate call acquires and
547872
+ * releases a concurrency limiter slot. This gates individual LLM API calls
547873
+ * (seconds each) instead of entire multi-step agent sessions (minutes).
547874
+ *
547875
+ * @param {Object} model - LanguageModelV1 model instance
547876
+ * @param {Object} limiter - Concurrency limiter with acquire/release/getStats
547877
+ * @param {boolean} debug - Enable debug logging
547878
+ * @returns {Object} Wrapped model with per-call concurrency gating
547879
+ * @private
547880
+ */
547881
+ static _wrapModelWithLimiter(model, limiter, debug) {
547882
+ return new Proxy(model, {
547883
+ get(target, prop) {
547884
+ if (prop === "doStream") {
547885
+ return async function(...args) {
547886
+ await limiter.acquire(null);
547887
+ if (debug) {
547888
+ const stats = limiter.getStats();
547889
+ console.log(`[DEBUG] Acquired AI slot for LLM call (${stats.globalActive}/${stats.maxConcurrent}, queue: ${stats.queueSize})`);
547890
+ }
547891
+ try {
547892
+ const result = await target.doStream(...args);
547893
+ const originalStream = result.stream;
547894
+ const originalReader = originalStream.getReader();
547895
+ let released = false;
547896
+ const releaseOnce = () => {
547897
+ if (released) return;
547898
+ released = true;
547899
+ limiter.release(null);
547900
+ };
547901
+ const wrappedStream = new ReadableStream({
547902
+ async pull(controller) {
547903
+ try {
547904
+ const { done, value: value2 } = await originalReader.read();
547905
+ if (done) {
547906
+ controller.close();
547907
+ releaseOnce();
547908
+ if (debug) {
547909
+ const stats = limiter.getStats();
547910
+ console.log(`[DEBUG] Released AI slot after LLM stream complete (${stats.globalActive}/${stats.maxConcurrent})`);
547911
+ }
547912
+ } else {
547913
+ controller.enqueue(value2);
547914
+ }
547915
+ } catch (err) {
547916
+ releaseOnce();
547917
+ if (debug) {
547918
+ console.log(`[DEBUG] Released AI slot on LLM stream error`);
547919
+ }
547920
+ controller.error(err);
547921
+ }
547922
+ },
547923
+ cancel() {
547924
+ releaseOnce();
547925
+ if (debug) {
547926
+ console.log(`[DEBUG] Released AI slot on LLM stream cancel`);
547927
+ }
547928
+ originalReader.cancel();
547929
+ }
547930
+ });
547931
+ return { ...result, stream: wrappedStream };
547932
+ } catch (err) {
547933
+ limiter.release(null);
547934
+ if (debug) {
547935
+ console.log(`[DEBUG] Released AI slot on doStream error`);
547936
+ }
547937
+ throw err;
547938
+ }
547939
+ };
547940
+ }
547941
+ if (prop === "doGenerate") {
547942
+ return async function(...args) {
547943
+ await limiter.acquire(null);
547944
+ if (debug) {
547945
+ const stats = limiter.getStats();
547946
+ console.log(`[DEBUG] Acquired AI slot for LLM generate (${stats.globalActive}/${stats.maxConcurrent})`);
547947
+ }
547948
+ try {
547949
+ const result = await target.doGenerate(...args);
547950
+ return result;
547951
+ } finally {
547952
+ limiter.release(null);
547953
+ if (debug) {
547954
+ const stats = limiter.getStats();
547955
+ console.log(`[DEBUG] Released AI slot after LLM generate (${stats.globalActive}/${stats.maxConcurrent})`);
547956
+ }
547957
+ }
547958
+ };
547959
+ }
547960
+ const value = target[prop];
547961
+ return typeof value === "function" ? value.bind(target) : value;
547962
+ }
547963
+ });
547964
+ }
547965
+ /**
547966
+ * Wrap an engine stream result so its textStream async generator acquires
547967
+ * and releases a concurrency limiter slot. Acquire happens when iteration
547968
+ * begins; release happens in finally (completion, error, or break).
547969
+ *
547970
+ * @param {Object} result - Engine result with { textStream, usage, ... }
547971
+ * @param {Object} limiter - Concurrency limiter with acquire/release/getStats
547972
+ * @param {boolean} debug - Enable debug logging
547973
+ * @returns {Object} Result with wrapped textStream
547974
+ * @private
547975
+ */
547976
+ static _wrapEngineStreamWithLimiter(result, limiter, debug) {
547977
+ const originalStream = result.textStream;
547978
+ async function* gatedStream() {
547979
+ await limiter.acquire(null);
547980
+ if (debug) {
547981
+ const stats = limiter.getStats();
547982
+ console.log(`[DEBUG] Acquired AI slot for engine stream (${stats.globalActive}/${stats.maxConcurrent}, queue: ${stats.queueSize})`);
547983
+ }
547984
+ try {
547985
+ yield* originalStream;
547986
+ } finally {
547987
+ limiter.release(null);
547988
+ if (debug) {
547989
+ const stats = limiter.getStats();
547990
+ console.log(`[DEBUG] Released AI slot after engine stream (${stats.globalActive}/${stats.maxConcurrent})`);
547991
+ }
547992
+ }
547993
+ }
547994
+ return { ...result, textStream: gatedStream() };
547995
+ }
545926
547996
  /**
545927
547997
  * Execute streamText with retry and fallback support
545928
547998
  * @param {Object} options - streamText options
@@ -545931,12 +548001,8 @@ var init_ProbeAgent = __esm({
545931
548001
  */
545932
548002
  async streamTextWithRetryAndFallback(options) {
545933
548003
  const limiter = this.concurrencyLimiter;
545934
- if (limiter) {
545935
- await limiter.acquire(null);
545936
- if (this.debug) {
545937
- const stats = limiter.getStats();
545938
- console.log(`[DEBUG] Acquired global AI concurrency slot (${stats.globalActive}/${stats.maxConcurrent}, queue: ${stats.queueSize})`);
545939
- }
548004
+ if (limiter && options.model) {
548005
+ options = { ...options, model: _ProbeAgent._wrapModelWithLimiter(options.model, limiter, this.debug) };
545940
548006
  }
545941
548007
  const controller = new AbortController();
545942
548008
  const timeoutState = { timeoutId: null };
@@ -545964,6 +548030,9 @@ var init_ProbeAgent = __esm({
545964
548030
  if (useClaudeCode || useCodex) {
545965
548031
  try {
545966
548032
  result = await this._tryEngineStreamPath(options, controller, timeoutState);
548033
+ if (result && limiter) {
548034
+ result = _ProbeAgent._wrapEngineStreamWithLimiter(result, limiter, this.debug);
548035
+ }
545967
548036
  } catch (error40) {
545968
548037
  if (this.debug) {
545969
548038
  const engineType = useClaudeCode ? "Claude Code" : "Codex";
@@ -545974,41 +548043,7 @@ var init_ProbeAgent = __esm({
545974
548043
  if (!result) {
545975
548044
  result = await this._executeWithVercelProvider(options, controller);
545976
548045
  }
545977
- if (limiter && result.textStream) {
545978
- const originalStream = result.textStream;
545979
- const debug = this.debug;
545980
- const wrappedStream = (async function* () {
545981
- try {
545982
- for await (const chunk of originalStream) {
545983
- yield chunk;
545984
- }
545985
- } finally {
545986
- limiter.release(null);
545987
- if (debug) {
545988
- const stats = limiter.getStats();
545989
- console.log(`[DEBUG] Released global AI concurrency slot (${stats.globalActive}/${stats.maxConcurrent}, queue: ${stats.queueSize})`);
545990
- }
545991
- }
545992
- })();
545993
- return new Proxy(result, {
545994
- get(target, prop) {
545995
- if (prop === "textStream") return wrappedStream;
545996
- const value = target[prop];
545997
- return typeof value === "function" ? value.bind(target) : value;
545998
- }
545999
- });
546000
- } else if (limiter) {
546001
- limiter.release(null);
546002
- }
546003
548046
  return result;
546004
- } catch (error40) {
546005
- if (limiter) {
546006
- limiter.release(null);
546007
- if (this.debug) {
546008
- console.log(`[DEBUG] Released global AI concurrency slot on error`);
546009
- }
546010
- }
546011
- throw error40;
546012
548047
  } finally {
546013
548048
  if (timeoutState.timeoutId) {
546014
548049
  clearTimeout(timeoutState.timeoutId);
@@ -609177,7 +611212,7 @@ module.exports = /*#__PURE__*/JSON.parse('["aaa","aarp","abb","abbott","abbvie",
609177
611212
  /***/ ((module) => {
609178
611213
 
609179
611214
  "use strict";
609180
- module.exports = /*#__PURE__*/JSON.parse('{"name":"@probelabs/visor","version":"0.1.178","main":"dist/index.js","bin":{"visor":"./dist/index.js"},"exports":{".":{"require":"./dist/index.js","import":"./dist/index.js"},"./sdk":{"types":"./dist/sdk/sdk.d.ts","import":"./dist/sdk/sdk.mjs","require":"./dist/sdk/sdk.js"},"./cli":{"require":"./dist/index.js"}},"files":["dist/","defaults/","action.yml","README.md","LICENSE"],"publishConfig":{"access":"public","registry":"https://registry.npmjs.org/"},"scripts":{"build:cli":"ncc build src/index.ts -o dist && cp -r defaults dist/ && cp -r output dist/ && cp -r docs dist/ && cp -r examples dist/ && cp -r src/debug-visualizer/ui dist/debug-visualizer/ && node scripts/inject-version.js && echo \'#!/usr/bin/env node\' | cat - dist/index.js > temp && mv temp dist/index.js && chmod +x dist/index.js","build:sdk":"tsup src/sdk.ts --dts --sourcemap --format esm,cjs --out-dir dist/sdk","build":"./scripts/build-oss.sh","build:ee":"npm run build:cli && npm run build:sdk","test":"jest && npm run test:yaml","test:unit":"jest","prepublishOnly":"npm run build","test:watch":"jest --watch","test:coverage":"jest --coverage","test:ee":"jest --testPathPatterns=\'tests/ee\' --testPathIgnorePatterns=\'/node_modules/\' --no-coverage","test:manual:bash":"RUN_MANUAL_TESTS=true jest tests/manual/bash-config-manual.test.ts","lint":"eslint src tests --ext .ts","lint:fix":"eslint src tests --ext .ts --fix","format":"prettier --write src tests","format:check":"prettier --check src tests","clean":"","clean:traces":"node scripts/clean-traces.js","prebuild":"npm run clean && node scripts/generate-config-schema.js","pretest":"npm run clean:traces && node scripts/generate-config-schema.js && npm run build:cli","pretest:unit":"npm run clean:traces && node scripts/generate-config-schema.js && npm run build:cli","test:with-build":"npm run build:cli && jest","test:yaml":"node dist/index.js test --progress compact","test:yaml:parallel":"node dist/index.js test --progress compact --max-parallel 4","prepare":"husky","pre-commit":"lint-staged","deploy:site":"cd site && npx wrangler pages deploy . --project-name=visor-site --commit-dirty=true","deploy:worker":"npx wrangler deploy","deploy":"npm run deploy:site && npm run deploy:worker","publish:ee":"./scripts/publish-ee.sh","release":"./scripts/release.sh","release:patch":"./scripts/release.sh patch","release:minor":"./scripts/release.sh minor","release:major":"./scripts/release.sh major","release:prerelease":"./scripts/release.sh prerelease","docs:validate":"node scripts/validate-readme-links.js","workshop:setup":"npm install -D reveal-md@6.1.2","workshop:serve":"cd workshop && reveal-md slides.md -w","workshop:export":"reveal-md workshop/slides.md --static workshop/build","workshop:pdf":"reveal-md workshop/slides.md --print workshop/Visor-Workshop.pdf --print-size letter","workshop:pdf:ci":"reveal-md workshop/slides.md --print workshop/Visor-Workshop.pdf --print-size letter --puppeteer-launch-args=\\"--no-sandbox --disable-dev-shm-usage\\"","workshop:pdf:a4":"reveal-md workshop/slides.md --print workshop/Visor-Workshop-A4.pdf --print-size A4","workshop:build":"npm run workshop:export && npm run workshop:pdf","simulate:issue":"TS_NODE_TRANSPILE_ONLY=1 ts-node scripts/simulate-gh-run.ts --event issues --action opened --debug","simulate:comment":"TS_NODE_TRANSPILE_ONLY=1 ts-node scripts/simulate-gh-run.ts --event issue_comment --action created --debug"},"keywords":["code-review","ai","github-action","cli","pr-review","visor"],"author":"Probe Labs","license":"MIT","description":"AI workflow engine for code review, assistants, and automation — orchestrate checks, MCP tools, and AI providers with YAML-driven pipelines","repository":{"type":"git","url":"git+https://github.com/probelabs/visor.git"},"bugs":{"url":"https://github.com/probelabs/visor/issues"},"homepage":"https://github.com/probelabs/visor#readme","dependencies":{"@actions/core":"^1.11.1","@apidevtools/swagger-parser":"^12.1.0","@grammyjs/runner":"^2.0.3","@modelcontextprotocol/sdk":"^1.25.3","@nyariv/sandboxjs":"github:probelabs/SandboxJS#23c4bb611f7d05f3cb8c523917b5f57103e48108","@octokit/action":"^8.0.2","@octokit/auth-app":"^8.1.0","@octokit/core":"^7.0.3","@octokit/rest":"^22.0.0","@opentelemetry/api":"^1.9.0","@opentelemetry/api-logs":"^0.203.0","@opentelemetry/core":"^1.30.1","@opentelemetry/exporter-logs-otlp-http":"^0.203.0","@opentelemetry/exporter-metrics-otlp-http":"^0.203.0","@opentelemetry/exporter-trace-otlp-grpc":"^0.203.0","@opentelemetry/exporter-trace-otlp-http":"^0.203.0","@opentelemetry/instrumentation":"^0.203.0","@opentelemetry/resources":"^1.30.1","@opentelemetry/sdk-logs":"^0.203.0","@opentelemetry/sdk-metrics":"^1.30.1","@opentelemetry/sdk-node":"^0.203.0","@opentelemetry/sdk-trace-base":"^1.30.1","@opentelemetry/semantic-conventions":"^1.30.1","@probelabs/probe":"^0.6.0-rc293","@types/commander":"^2.12.0","@types/uuid":"^10.0.0","acorn":"^8.16.0","acorn-walk":"^8.3.5","ajv":"^8.17.1","ajv-formats":"^3.0.1","better-sqlite3":"^11.0.0","blessed":"^0.1.81","botbuilder":"^4.23.3","botframework-connector":"^4.23.3","cli-table3":"^0.6.5","commander":"^14.0.0","deepmerge":"^4.3.1","dotenv":"^17.2.3","grammy":"^1.41.1","ignore":"^7.0.5","imapflow":"^1.2.12","js-yaml":"^4.1.0","jsonpath-plus":"^10.4.0","liquidjs":"^10.21.1","mailparser":"^3.9.3","minimatch":"^10.2.2","node-cron":"^3.0.3","nodemailer":"^8.0.1","open":"^9.1.0","resend":"^6.9.3","simple-git":"^3.28.0","uuid":"^11.1.0","ws":"^8.18.3"},"optionalDependencies":{"@anthropic/claude-code-sdk":"npm:null@*","@open-policy-agent/opa-wasm":"^1.10.0","knex":"^3.1.0","mysql2":"^3.11.0","pg":"^8.13.0","tedious":"^19.0.0"},"devDependencies":{"@eslint/js":"^9.34.0","@kie/act-js":"^2.6.2","@kie/mock-github":"^2.0.1","@swc/core":"^1.13.2","@swc/jest":"^0.2.37","@types/better-sqlite3":"^7.6.0","@types/blessed":"^0.1.27","@types/jest":"^30.0.0","@types/js-yaml":"^4.0.9","@types/mailparser":"^3.4.6","@types/node":"^24.3.0","@types/node-cron":"^3.0.11","@types/nodemailer":"^7.0.11","@types/ws":"^8.18.1","@typescript-eslint/eslint-plugin":"^8.42.0","@typescript-eslint/parser":"^8.42.0","@vercel/ncc":"^0.38.4","eslint":"^9.34.0","eslint-config-prettier":"^10.1.8","eslint-plugin-prettier":"^5.5.4","husky":"^9.1.7","jest":"^30.1.3","lint-staged":"^16.1.6","prettier":"^3.6.2","reveal-md":"^6.1.2","ts-json-schema-generator":"^1.5.1","ts-node":"^10.9.2","tsup":"^8.5.0","typescript":"^5.9.2","wrangler":"^3.0.0"},"peerDependenciesMeta":{"@anthropic/claude-code-sdk":{"optional":true}},"directories":{"test":"tests"},"lint-staged":{"src/**/*.{ts,js}":["eslint --fix","prettier --write"],"tests/**/*.{ts,js}":["eslint --fix","prettier --write"],"*.{json,md,yml,yaml}":["prettier --write"]}}');
611215
+ module.exports = /*#__PURE__*/JSON.parse('{"name":"@probelabs/visor","version":"0.1.42","main":"dist/index.js","bin":{"visor":"./dist/index.js"},"exports":{".":{"require":"./dist/index.js","import":"./dist/index.js"},"./sdk":{"types":"./dist/sdk/sdk.d.ts","import":"./dist/sdk/sdk.mjs","require":"./dist/sdk/sdk.js"},"./cli":{"require":"./dist/index.js"}},"files":["dist/","defaults/","action.yml","README.md","LICENSE"],"publishConfig":{"access":"public","registry":"https://registry.npmjs.org/"},"scripts":{"build:cli":"ncc build src/index.ts -o dist && cp -r defaults dist/ && cp -r output dist/ && cp -r docs dist/ && cp -r examples dist/ && cp -r src/debug-visualizer/ui dist/debug-visualizer/ && node scripts/inject-version.js && echo \'#!/usr/bin/env node\' | cat - dist/index.js > temp && mv temp dist/index.js && chmod +x dist/index.js","build:sdk":"tsup src/sdk.ts --dts --sourcemap --format esm,cjs --out-dir dist/sdk","build":"./scripts/build-oss.sh","build:ee":"npm run build:cli && npm run build:sdk","test":"jest && npm run test:yaml","test:unit":"jest","prepublishOnly":"npm run build","test:watch":"jest --watch","test:coverage":"jest --coverage","test:ee":"jest --testPathPatterns=\'tests/ee\' --testPathIgnorePatterns=\'/node_modules/\' --no-coverage","test:manual:bash":"RUN_MANUAL_TESTS=true jest tests/manual/bash-config-manual.test.ts","lint":"eslint src tests --ext .ts","lint:fix":"eslint src tests --ext .ts --fix","format":"prettier --write src tests","format:check":"prettier --check src tests","clean":"","clean:traces":"node scripts/clean-traces.js","prebuild":"npm run clean && node scripts/generate-config-schema.js","pretest":"npm run clean:traces && node scripts/generate-config-schema.js && npm run build:cli","pretest:unit":"npm run clean:traces && node scripts/generate-config-schema.js && npm run build:cli","test:with-build":"npm run build:cli && jest","test:yaml":"node dist/index.js test --progress compact","test:yaml:parallel":"node dist/index.js test --progress compact --max-parallel 4","prepare":"husky","pre-commit":"lint-staged","deploy:site":"cd site && npx wrangler pages deploy . --project-name=visor-site --commit-dirty=true","deploy:worker":"npx wrangler deploy","deploy":"npm run deploy:site && npm run deploy:worker","publish:ee":"./scripts/publish-ee.sh","release":"./scripts/release.sh","release:patch":"./scripts/release.sh patch","release:minor":"./scripts/release.sh minor","release:major":"./scripts/release.sh major","release:prerelease":"./scripts/release.sh prerelease","docs:validate":"node scripts/validate-readme-links.js","workshop:setup":"npm install -D reveal-md@6.1.2","workshop:serve":"cd workshop && reveal-md slides.md -w","workshop:export":"reveal-md workshop/slides.md --static workshop/build","workshop:pdf":"reveal-md workshop/slides.md --print workshop/Visor-Workshop.pdf --print-size letter","workshop:pdf:ci":"reveal-md workshop/slides.md --print workshop/Visor-Workshop.pdf --print-size letter --puppeteer-launch-args=\\"--no-sandbox --disable-dev-shm-usage\\"","workshop:pdf:a4":"reveal-md workshop/slides.md --print workshop/Visor-Workshop-A4.pdf --print-size A4","workshop:build":"npm run workshop:export && npm run workshop:pdf","simulate:issue":"TS_NODE_TRANSPILE_ONLY=1 ts-node scripts/simulate-gh-run.ts --event issues --action opened --debug","simulate:comment":"TS_NODE_TRANSPILE_ONLY=1 ts-node scripts/simulate-gh-run.ts --event issue_comment --action created --debug"},"keywords":["code-review","ai","github-action","cli","pr-review","visor"],"author":"Probe Labs","license":"MIT","description":"AI workflow engine for code review, assistants, and automation — orchestrate checks, MCP tools, and AI providers with YAML-driven pipelines","repository":{"type":"git","url":"git+https://github.com/probelabs/visor.git"},"bugs":{"url":"https://github.com/probelabs/visor/issues"},"homepage":"https://github.com/probelabs/visor#readme","dependencies":{"@actions/core":"^1.11.1","@apidevtools/swagger-parser":"^12.1.0","@grammyjs/runner":"^2.0.3","@modelcontextprotocol/sdk":"^1.25.3","@nyariv/sandboxjs":"github:probelabs/SandboxJS#23c4bb611f7d05f3cb8c523917b5f57103e48108","@octokit/action":"^8.0.2","@octokit/auth-app":"^8.1.0","@octokit/core":"^7.0.3","@octokit/rest":"^22.0.0","@opentelemetry/api":"^1.9.0","@opentelemetry/api-logs":"^0.203.0","@opentelemetry/core":"^1.30.1","@opentelemetry/exporter-logs-otlp-http":"^0.203.0","@opentelemetry/exporter-metrics-otlp-http":"^0.203.0","@opentelemetry/exporter-trace-otlp-grpc":"^0.203.0","@opentelemetry/exporter-trace-otlp-http":"^0.203.0","@opentelemetry/instrumentation":"^0.203.0","@opentelemetry/resources":"^1.30.1","@opentelemetry/sdk-logs":"^0.203.0","@opentelemetry/sdk-metrics":"^1.30.1","@opentelemetry/sdk-node":"^0.203.0","@opentelemetry/sdk-trace-base":"^1.30.1","@opentelemetry/semantic-conventions":"^1.30.1","@probelabs/probe":"^0.6.0-rc294","@types/commander":"^2.12.0","@types/uuid":"^10.0.0","acorn":"^8.16.0","acorn-walk":"^8.3.5","ajv":"^8.17.1","ajv-formats":"^3.0.1","better-sqlite3":"^11.0.0","blessed":"^0.1.81","botbuilder":"^4.23.3","botframework-connector":"^4.23.3","cli-table3":"^0.6.5","commander":"^14.0.0","deepmerge":"^4.3.1","dotenv":"^17.2.3","grammy":"^1.41.1","ignore":"^7.0.5","imapflow":"^1.2.12","js-yaml":"^4.1.0","jsonpath-plus":"^10.4.0","liquidjs":"^10.21.1","mailparser":"^3.9.3","minimatch":"^10.2.2","node-cron":"^3.0.3","nodemailer":"^8.0.1","open":"^9.1.0","resend":"^6.9.3","simple-git":"^3.28.0","uuid":"^11.1.0","ws":"^8.18.3"},"optionalDependencies":{"@anthropic/claude-code-sdk":"npm:null@*","@open-policy-agent/opa-wasm":"^1.10.0","knex":"^3.1.0","mysql2":"^3.11.0","pg":"^8.13.0","tedious":"^19.0.0"},"devDependencies":{"@eslint/js":"^9.34.0","@kie/act-js":"^2.6.2","@kie/mock-github":"^2.0.1","@swc/core":"^1.13.2","@swc/jest":"^0.2.37","@types/better-sqlite3":"^7.6.0","@types/blessed":"^0.1.27","@types/jest":"^30.0.0","@types/js-yaml":"^4.0.9","@types/mailparser":"^3.4.6","@types/node":"^24.3.0","@types/node-cron":"^3.0.11","@types/nodemailer":"^7.0.11","@types/ws":"^8.18.1","@typescript-eslint/eslint-plugin":"^8.42.0","@typescript-eslint/parser":"^8.42.0","@vercel/ncc":"^0.38.4","eslint":"^9.34.0","eslint-config-prettier":"^10.1.8","eslint-plugin-prettier":"^5.5.4","husky":"^9.1.7","jest":"^30.1.3","lint-staged":"^16.1.6","prettier":"^3.6.2","reveal-md":"^6.1.2","ts-json-schema-generator":"^1.5.1","ts-node":"^10.9.2","tsup":"^8.5.0","typescript":"^5.9.2","wrangler":"^3.0.0"},"peerDependenciesMeta":{"@anthropic/claude-code-sdk":{"optional":true}},"directories":{"test":"tests"},"lint-staged":{"src/**/*.{ts,js}":["eslint --fix","prettier --write"],"tests/**/*.{ts,js}":["eslint --fix","prettier --write"],"*.{json,md,yml,yaml}":["prettier --write"]}}');
609181
611216
 
609182
611217
  /***/ })
609183
611218