@probelabs/visor 0.1.147 → 0.1.148-ee

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (93) hide show
  1. package/dist/frontends/github-frontend.d.ts +2 -1
  2. package/dist/frontends/github-frontend.d.ts.map +1 -1
  3. package/dist/index.js +2444 -134
  4. package/dist/providers/ai-check-provider.d.ts.map +1 -1
  5. package/dist/scheduler/schedule-tool.d.ts.map +1 -1
  6. package/dist/scheduler/scheduler.d.ts +5 -0
  7. package/dist/scheduler/scheduler.d.ts.map +1 -1
  8. package/dist/sdk/{check-provider-registry-CTZA3EVE.mjs → check-provider-registry-AMYY2ZJY.mjs} +5 -6
  9. package/dist/sdk/{check-provider-registry-SCPM6DIT.mjs → check-provider-registry-DVQDGTOE.mjs} +5 -6
  10. package/dist/sdk/{chunk-4F5UVWAN.mjs → chunk-62TNF5PJ.mjs} +2 -2
  11. package/dist/sdk/{chunk-4F5UVWAN.mjs.map → chunk-62TNF5PJ.mjs.map} +1 -1
  12. package/dist/sdk/{chunk-H23T7J6Y.mjs → chunk-75Q63UNX.mjs} +2743 -277
  13. package/dist/sdk/chunk-75Q63UNX.mjs.map +1 -0
  14. package/dist/sdk/{chunk-JKWLGLDR.mjs → chunk-CISJ6DJW.mjs} +3 -3
  15. package/dist/sdk/{chunk-EWGX7LI7.mjs → chunk-H4AYMOAT.mjs} +2742 -276
  16. package/dist/sdk/chunk-H4AYMOAT.mjs.map +1 -0
  17. package/dist/sdk/{chunk-2NFKN6CY.mjs → chunk-RJLJUTSU.mjs} +2 -2
  18. package/dist/sdk/{failure-condition-evaluator-FHNZL2US.mjs → failure-condition-evaluator-IVCTD4BZ.mjs} +3 -3
  19. package/dist/sdk/{github-frontend-V3WUHL6E.mjs → github-frontend-DFT5G32K.mjs} +16 -4
  20. package/dist/sdk/github-frontend-DFT5G32K.mjs.map +1 -0
  21. package/dist/sdk/{host-GVR4UGZ3.mjs → host-H7IX4GBK.mjs} +2 -2
  22. package/dist/sdk/{host-UQUQIYFG.mjs → host-NZXGBBJI.mjs} +2 -2
  23. package/dist/sdk/knex-store-HPXJILBL.mjs +411 -0
  24. package/dist/sdk/knex-store-HPXJILBL.mjs.map +1 -0
  25. package/dist/sdk/loader-YSRMVXC3.mjs +89 -0
  26. package/dist/sdk/loader-YSRMVXC3.mjs.map +1 -0
  27. package/dist/sdk/opa-policy-engine-S2S2ULEI.mjs +655 -0
  28. package/dist/sdk/opa-policy-engine-S2S2ULEI.mjs.map +1 -0
  29. package/dist/sdk/{routing-CZ36LVVS.mjs → routing-LU5PAREW.mjs} +4 -4
  30. package/dist/sdk/{check-provider-registry-CDL5AJSI.mjs → schedule-tool-4JMWZCCK.mjs} +15 -10
  31. package/dist/sdk/{workflow-check-provider-3K7732MW.mjs → schedule-tool-CONR4VW3.mjs} +15 -10
  32. package/dist/sdk/{schedule-tool-handler-KFYNV7HL.mjs → schedule-tool-handler-AXMR7NBI.mjs} +5 -6
  33. package/dist/sdk/{schedule-tool-handler-QUMAF2DJ.mjs → schedule-tool-handler-YUC6CAXX.mjs} +5 -6
  34. package/dist/sdk/sdk.js +2831 -371
  35. package/dist/sdk/sdk.js.map +1 -1
  36. package/dist/sdk/sdk.mjs +4 -5
  37. package/dist/sdk/sdk.mjs.map +1 -1
  38. package/dist/sdk/{trace-helpers-W7TF5ZKF.mjs → trace-helpers-6ROJR7N3.mjs} +2 -2
  39. package/dist/sdk/validator-XTZJZZJH.mjs +134 -0
  40. package/dist/sdk/validator-XTZJZZJH.mjs.map +1 -0
  41. package/dist/sdk/{workflow-check-provider-5453TW65.mjs → workflow-check-provider-DYSO3PML.mjs} +5 -6
  42. package/dist/sdk/{workflow-check-provider-HMABCGB5.mjs → workflow-check-provider-MMB7L3YG.mjs} +5 -6
  43. package/dist/state-machine/context/build-engine-context.d.ts.map +1 -1
  44. package/dist/utils/tool-resolver.d.ts.map +1 -1
  45. package/dist/utils/workspace-manager.d.ts +31 -8
  46. package/dist/utils/workspace-manager.d.ts.map +1 -1
  47. package/dist/utils/worktree-manager.d.ts +6 -0
  48. package/dist/utils/worktree-manager.d.ts.map +1 -1
  49. package/package.json +2 -2
  50. package/dist/output/traces/run-2026-02-27T11-27-22-261Z.ndjson +0 -138
  51. package/dist/output/traces/run-2026-02-27T11-28-08-546Z.ndjson +0 -1442
  52. package/dist/sdk/chunk-EWGX7LI7.mjs.map +0 -1
  53. package/dist/sdk/chunk-FBJ7MC7R.mjs +0 -1502
  54. package/dist/sdk/chunk-H23T7J6Y.mjs.map +0 -1
  55. package/dist/sdk/chunk-JKWLGLDR.mjs.map +0 -1
  56. package/dist/sdk/chunk-R77LN3OE.mjs +0 -40693
  57. package/dist/sdk/chunk-R77LN3OE.mjs.map +0 -1
  58. package/dist/sdk/chunk-V2QW6ECX.mjs +0 -739
  59. package/dist/sdk/chunk-V2QW6ECX.mjs.map +0 -1
  60. package/dist/sdk/chunk-XKCER23W.mjs +0 -1490
  61. package/dist/sdk/chunk-XKCER23W.mjs.map +0 -1
  62. package/dist/sdk/chunk-YQZW3D2V.mjs +0 -443
  63. package/dist/sdk/chunk-YQZW3D2V.mjs.map +0 -1
  64. package/dist/sdk/failure-condition-evaluator-2B5WY7QN.mjs +0 -17
  65. package/dist/sdk/github-frontend-47EU2HBY.mjs +0 -1356
  66. package/dist/sdk/github-frontend-47EU2HBY.mjs.map +0 -1
  67. package/dist/sdk/github-frontend-V3WUHL6E.mjs.map +0 -1
  68. package/dist/sdk/routing-THIWDEYY.mjs +0 -25
  69. package/dist/sdk/schedule-tool-2COUUTF7.mjs +0 -18
  70. package/dist/sdk/schedule-tool-handler-GEH62OUM.mjs +0 -40
  71. package/dist/sdk/trace-helpers-EHDZ42HH.mjs +0 -25
  72. package/dist/sdk/trace-helpers-EHDZ42HH.mjs.map +0 -1
  73. package/dist/sdk/trace-helpers-W7TF5ZKF.mjs.map +0 -1
  74. package/dist/sdk/workflow-check-provider-3K7732MW.mjs.map +0 -1
  75. package/dist/sdk/workflow-check-provider-5453TW65.mjs.map +0 -1
  76. package/dist/sdk/workflow-check-provider-HMABCGB5.mjs.map +0 -1
  77. package/dist/traces/run-2026-02-27T11-27-22-261Z.ndjson +0 -138
  78. package/dist/traces/run-2026-02-27T11-28-08-546Z.ndjson +0 -1442
  79. /package/dist/sdk/{check-provider-registry-CDL5AJSI.mjs.map → check-provider-registry-AMYY2ZJY.mjs.map} +0 -0
  80. /package/dist/sdk/{check-provider-registry-CTZA3EVE.mjs.map → check-provider-registry-DVQDGTOE.mjs.map} +0 -0
  81. /package/dist/sdk/{chunk-FBJ7MC7R.mjs.map → chunk-CISJ6DJW.mjs.map} +0 -0
  82. /package/dist/sdk/{chunk-2NFKN6CY.mjs.map → chunk-RJLJUTSU.mjs.map} +0 -0
  83. /package/dist/sdk/{check-provider-registry-SCPM6DIT.mjs.map → failure-condition-evaluator-IVCTD4BZ.mjs.map} +0 -0
  84. /package/dist/sdk/{host-GVR4UGZ3.mjs.map → host-H7IX4GBK.mjs.map} +0 -0
  85. /package/dist/sdk/{host-UQUQIYFG.mjs.map → host-NZXGBBJI.mjs.map} +0 -0
  86. /package/dist/sdk/{failure-condition-evaluator-2B5WY7QN.mjs.map → routing-LU5PAREW.mjs.map} +0 -0
  87. /package/dist/sdk/{failure-condition-evaluator-FHNZL2US.mjs.map → schedule-tool-4JMWZCCK.mjs.map} +0 -0
  88. /package/dist/sdk/{routing-CZ36LVVS.mjs.map → schedule-tool-CONR4VW3.mjs.map} +0 -0
  89. /package/dist/sdk/{routing-THIWDEYY.mjs.map → schedule-tool-handler-AXMR7NBI.mjs.map} +0 -0
  90. /package/dist/sdk/{schedule-tool-2COUUTF7.mjs.map → schedule-tool-handler-YUC6CAXX.mjs.map} +0 -0
  91. /package/dist/sdk/{schedule-tool-handler-GEH62OUM.mjs.map → trace-helpers-6ROJR7N3.mjs.map} +0 -0
  92. /package/dist/sdk/{schedule-tool-handler-KFYNV7HL.mjs.map → workflow-check-provider-DYSO3PML.mjs.map} +0 -0
  93. /package/dist/sdk/{schedule-tool-handler-QUMAF2DJ.mjs.map → workflow-check-provider-MMB7L3YG.mjs.map} +0 -0
package/dist/index.js CHANGED
@@ -1,8 +1,8 @@
1
1
  #!/usr/bin/env node
2
- process.env.VISOR_VERSION = '0.1.147';
3
- process.env.PROBE_VERSION = '0.6.0-rc262';
4
- process.env.VISOR_COMMIT_SHA = '986bd9df126dcf4c0564d12701994e4bad407897';
5
- process.env.VISOR_COMMIT_SHORT = '986bd9d';
2
+ process.env.VISOR_VERSION = '0.1.148';
3
+ process.env.PROBE_VERSION = '0.6.0-rc264';
4
+ process.env.VISOR_COMMIT_SHA = '8d09da19e112d8649810da46458aa77d3f034fc6';
5
+ process.env.VISOR_COMMIT_SHORT = '8d09da1';
6
6
  /******/ (() => { // webpackBootstrap
7
7
  /******/ var __webpack_modules__ = ({
8
8
 
@@ -161193,7 +161193,7 @@ async function handleDumpPolicyInput(checkId, argv) {
161193
161193
  let PolicyInputBuilder;
161194
161194
  try {
161195
161195
  // @ts-ignore — enterprise/ may not exist in OSS builds (caught at runtime)
161196
- const mod = await Promise.resolve().then(() => __importStar(__nccwpck_require__(71370)));
161196
+ const mod = await Promise.resolve().then(() => __importStar(__nccwpck_require__(17117)));
161197
161197
  PolicyInputBuilder = mod.PolicyInputBuilder;
161198
161198
  }
161199
161199
  catch {
@@ -167035,6 +167035,1690 @@ class DependencyResolver {
167035
167035
  exports.DependencyResolver = DependencyResolver;
167036
167036
 
167037
167037
 
167038
+ /***/ }),
167039
+
167040
+ /***/ 50069:
167041
+ /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
167042
+
167043
+ "use strict";
167044
+
167045
+ /**
167046
+ * Copyright (c) ProbeLabs. All rights reserved.
167047
+ * Licensed under the Elastic License 2.0; you may not use this file except
167048
+ * in compliance with the Elastic License 2.0.
167049
+ */
167050
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
167051
+ if (k2 === undefined) k2 = k;
167052
+ var desc = Object.getOwnPropertyDescriptor(m, k);
167053
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
167054
+ desc = { enumerable: true, get: function() { return m[k]; } };
167055
+ }
167056
+ Object.defineProperty(o, k2, desc);
167057
+ }) : (function(o, m, k, k2) {
167058
+ if (k2 === undefined) k2 = k;
167059
+ o[k2] = m[k];
167060
+ }));
167061
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
167062
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
167063
+ }) : function(o, v) {
167064
+ o["default"] = v;
167065
+ });
167066
+ var __importStar = (this && this.__importStar) || (function () {
167067
+ var ownKeys = function(o) {
167068
+ ownKeys = Object.getOwnPropertyNames || function (o) {
167069
+ var ar = [];
167070
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
167071
+ return ar;
167072
+ };
167073
+ return ownKeys(o);
167074
+ };
167075
+ return function (mod) {
167076
+ if (mod && mod.__esModule) return mod;
167077
+ var result = {};
167078
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
167079
+ __setModuleDefault(result, mod);
167080
+ return result;
167081
+ };
167082
+ })();
167083
+ Object.defineProperty(exports, "__esModule", ({ value: true }));
167084
+ exports.LicenseValidator = void 0;
167085
+ const crypto = __importStar(__nccwpck_require__(76982));
167086
+ const fs = __importStar(__nccwpck_require__(79896));
167087
+ const path = __importStar(__nccwpck_require__(16928));
167088
+ class LicenseValidator {
167089
+ /** Ed25519 public key for license verification (PEM format). */
167090
+ static PUBLIC_KEY = '-----BEGIN PUBLIC KEY-----\n' +
167091
+ 'MCowBQYDK2VwAyEAI/Zd08EFmgIdrDm/HXd0l3/5GBt7R1PrdvhdmEXhJlU=\n' +
167092
+ '-----END PUBLIC KEY-----\n';
167093
+ cache = null;
167094
+ static CACHE_TTL = 5 * 60 * 1000; // 5 minutes
167095
+ static GRACE_PERIOD = 72 * 3600 * 1000; // 72 hours after expiry
167096
+ /**
167097
+ * Load and validate license from environment or file.
167098
+ *
167099
+ * Resolution order:
167100
+ * 1. VISOR_LICENSE env var (JWT string)
167101
+ * 2. VISOR_LICENSE_FILE env var (path to file)
167102
+ * 3. .visor-license in project root (cwd)
167103
+ * 4. .visor-license in ~/.config/visor/
167104
+ */
167105
+ async loadAndValidate() {
167106
+ // Return cached result if still fresh
167107
+ if (this.cache && Date.now() - this.cache.validatedAt < LicenseValidator.CACHE_TTL) {
167108
+ return this.cache.payload;
167109
+ }
167110
+ const token = this.resolveToken();
167111
+ if (!token)
167112
+ return null;
167113
+ const payload = this.verifyAndDecode(token);
167114
+ if (!payload)
167115
+ return null;
167116
+ this.cache = { payload, validatedAt: Date.now() };
167117
+ return payload;
167118
+ }
167119
+ /** Check if a specific feature is licensed */
167120
+ hasFeature(feature) {
167121
+ if (!this.cache)
167122
+ return false;
167123
+ return this.cache.payload.features.includes(feature);
167124
+ }
167125
+ /** Check if license is valid (with grace period) */
167126
+ isValid() {
167127
+ if (!this.cache)
167128
+ return false;
167129
+ const now = Date.now();
167130
+ const expiryMs = this.cache.payload.exp * 1000;
167131
+ return now < expiryMs + LicenseValidator.GRACE_PERIOD;
167132
+ }
167133
+ /** Check if the license is within its grace period (expired but still valid) */
167134
+ isInGracePeriod() {
167135
+ if (!this.cache)
167136
+ return false;
167137
+ const now = Date.now();
167138
+ const expiryMs = this.cache.payload.exp * 1000;
167139
+ return now >= expiryMs && now < expiryMs + LicenseValidator.GRACE_PERIOD;
167140
+ }
167141
+ resolveToken() {
167142
+ // 1. Direct env var
167143
+ if (process.env.VISOR_LICENSE) {
167144
+ return process.env.VISOR_LICENSE.trim();
167145
+ }
167146
+ // 2. File path from env (validate against path traversal)
167147
+ if (process.env.VISOR_LICENSE_FILE) {
167148
+ // path.resolve() produces an absolute path with all '..' segments resolved,
167149
+ // so a separate resolved.includes('..') check is unnecessary.
167150
+ const resolved = path.resolve(process.env.VISOR_LICENSE_FILE);
167151
+ const home = process.env.HOME || process.env.USERPROFILE || '';
167152
+ const allowedPrefixes = [path.normalize(process.cwd())];
167153
+ if (home)
167154
+ allowedPrefixes.push(path.normalize(path.join(home, '.config', 'visor')));
167155
+ // Resolve symlinks so an attacker cannot create a symlink inside an
167156
+ // allowed prefix that points to an arbitrary file outside it.
167157
+ let realPath;
167158
+ try {
167159
+ realPath = fs.realpathSync(resolved);
167160
+ }
167161
+ catch {
167162
+ return null; // File doesn't exist or isn't accessible
167163
+ }
167164
+ const isSafe = allowedPrefixes.some(prefix => realPath === prefix || realPath.startsWith(prefix + path.sep));
167165
+ if (!isSafe)
167166
+ return null;
167167
+ return this.readFile(realPath);
167168
+ }
167169
+ // 3. .visor-license in cwd
167170
+ const cwdPath = path.join(process.cwd(), '.visor-license');
167171
+ const cwdToken = this.readFile(cwdPath);
167172
+ if (cwdToken)
167173
+ return cwdToken;
167174
+ // 4. ~/.config/visor/.visor-license
167175
+ const home = process.env.HOME || process.env.USERPROFILE || '';
167176
+ if (home) {
167177
+ const configPath = path.join(home, '.config', 'visor', '.visor-license');
167178
+ const configToken = this.readFile(configPath);
167179
+ if (configToken)
167180
+ return configToken;
167181
+ }
167182
+ return null;
167183
+ }
167184
+ readFile(filePath) {
167185
+ try {
167186
+ return fs.readFileSync(filePath, 'utf-8').trim();
167187
+ }
167188
+ catch {
167189
+ return null;
167190
+ }
167191
+ }
167192
+ verifyAndDecode(token) {
167193
+ try {
167194
+ const parts = token.split('.');
167195
+ if (parts.length !== 3)
167196
+ return null;
167197
+ const [headerB64, payloadB64, signatureB64] = parts;
167198
+ // Decode header to verify algorithm
167199
+ const header = JSON.parse(Buffer.from(headerB64, 'base64url').toString());
167200
+ if (header.alg !== 'EdDSA')
167201
+ return null;
167202
+ // Verify signature
167203
+ const data = `${headerB64}.${payloadB64}`;
167204
+ const signature = Buffer.from(signatureB64, 'base64url');
167205
+ const publicKey = crypto.createPublicKey(LicenseValidator.PUBLIC_KEY);
167206
+ // Validate that the loaded public key is actually Ed25519 (OID 1.3.101.112).
167207
+ // This prevents algorithm-confusion attacks if the embedded key were ever
167208
+ // swapped to a different type.
167209
+ if (publicKey.asymmetricKeyType !== 'ed25519') {
167210
+ return null;
167211
+ }
167212
+ // Ed25519 verification: algorithm must be null because EdDSA performs its
167213
+ // own internal hashing (SHA-512) — passing a digest algorithm here would
167214
+ // cause Node.js to throw. The key type is validated above.
167215
+ const isValid = crypto.verify(null, Buffer.from(data), publicKey, signature);
167216
+ if (!isValid)
167217
+ return null;
167218
+ // Decode payload
167219
+ const payload = JSON.parse(Buffer.from(payloadB64, 'base64url').toString());
167220
+ // Validate required fields
167221
+ if (!payload.org ||
167222
+ !Array.isArray(payload.features) ||
167223
+ typeof payload.exp !== 'number' ||
167224
+ typeof payload.iat !== 'number' ||
167225
+ !payload.sub) {
167226
+ return null;
167227
+ }
167228
+ // Check expiry (with grace period)
167229
+ const now = Date.now();
167230
+ const expiryMs = payload.exp * 1000;
167231
+ if (now >= expiryMs + LicenseValidator.GRACE_PERIOD) {
167232
+ return null;
167233
+ }
167234
+ return payload;
167235
+ }
167236
+ catch {
167237
+ return null;
167238
+ }
167239
+ }
167240
+ }
167241
+ exports.LicenseValidator = LicenseValidator;
167242
+
167243
+
167244
+ /***/ }),
167245
+
167246
+ /***/ 87068:
167247
+ /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
167248
+
167249
+ "use strict";
167250
+
167251
+ /**
167252
+ * Copyright (c) ProbeLabs. All rights reserved.
167253
+ * Licensed under the Elastic License 2.0; you may not use this file except
167254
+ * in compliance with the Elastic License 2.0.
167255
+ */
167256
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
167257
+ if (k2 === undefined) k2 = k;
167258
+ var desc = Object.getOwnPropertyDescriptor(m, k);
167259
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
167260
+ desc = { enumerable: true, get: function() { return m[k]; } };
167261
+ }
167262
+ Object.defineProperty(o, k2, desc);
167263
+ }) : (function(o, m, k, k2) {
167264
+ if (k2 === undefined) k2 = k;
167265
+ o[k2] = m[k];
167266
+ }));
167267
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
167268
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
167269
+ }) : function(o, v) {
167270
+ o["default"] = v;
167271
+ });
167272
+ var __importStar = (this && this.__importStar) || (function () {
167273
+ var ownKeys = function(o) {
167274
+ ownKeys = Object.getOwnPropertyNames || function (o) {
167275
+ var ar = [];
167276
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
167277
+ return ar;
167278
+ };
167279
+ return ownKeys(o);
167280
+ };
167281
+ return function (mod) {
167282
+ if (mod && mod.__esModule) return mod;
167283
+ var result = {};
167284
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
167285
+ __setModuleDefault(result, mod);
167286
+ return result;
167287
+ };
167288
+ })();
167289
+ Object.defineProperty(exports, "__esModule", ({ value: true }));
167290
+ exports.loadEnterprisePolicyEngine = loadEnterprisePolicyEngine;
167291
+ exports.loadEnterpriseStoreBackend = loadEnterpriseStoreBackend;
167292
+ const default_engine_1 = __nccwpck_require__(93866);
167293
+ /**
167294
+ * Load the enterprise policy engine if licensed, otherwise return the default no-op engine.
167295
+ *
167296
+ * This is the sole import boundary between OSS and enterprise code. Core code
167297
+ * must only import from this module (via dynamic `await import()`), never from
167298
+ * individual enterprise submodules.
167299
+ */
167300
+ async function loadEnterprisePolicyEngine(config) {
167301
+ try {
167302
+ const { LicenseValidator } = await Promise.resolve().then(() => __importStar(__nccwpck_require__(50069)));
167303
+ const validator = new LicenseValidator();
167304
+ const license = await validator.loadAndValidate();
167305
+ if (!license || !validator.hasFeature('policy')) {
167306
+ return new default_engine_1.DefaultPolicyEngine();
167307
+ }
167308
+ if (validator.isInGracePeriod()) {
167309
+ // eslint-disable-next-line no-console
167310
+ console.warn('[visor:enterprise] License has expired but is within the 72-hour grace period. ' +
167311
+ 'Please renew your license.');
167312
+ }
167313
+ const { OpaPolicyEngine } = await Promise.resolve().then(() => __importStar(__nccwpck_require__(39530)));
167314
+ const engine = new OpaPolicyEngine(config);
167315
+ await engine.initialize(config);
167316
+ return engine;
167317
+ }
167318
+ catch (err) {
167319
+ // Enterprise code not available or initialization failed
167320
+ const msg = err instanceof Error ? err.message : String(err);
167321
+ try {
167322
+ const { logger } = __nccwpck_require__(86999);
167323
+ logger.warn(`[PolicyEngine] Enterprise policy init failed, falling back to default: ${msg}`);
167324
+ }
167325
+ catch {
167326
+ // silent
167327
+ }
167328
+ return new default_engine_1.DefaultPolicyEngine();
167329
+ }
167330
+ }
167331
+ /**
167332
+ * Load the enterprise schedule store backend if licensed.
167333
+ *
167334
+ * @param driver Database driver ('postgresql', 'mysql', or 'mssql')
167335
+ * @param storageConfig Storage configuration with connection details
167336
+ * @param haConfig Optional HA configuration
167337
+ * @throws Error if enterprise license is not available or missing 'scheduler-sql' feature
167338
+ */
167339
+ async function loadEnterpriseStoreBackend(driver, storageConfig, haConfig) {
167340
+ const { LicenseValidator } = await Promise.resolve().then(() => __importStar(__nccwpck_require__(50069)));
167341
+ const validator = new LicenseValidator();
167342
+ const license = await validator.loadAndValidate();
167343
+ if (!license || !validator.hasFeature('scheduler-sql')) {
167344
+ throw new Error(`The ${driver} schedule storage driver requires a Visor Enterprise license ` +
167345
+ `with the 'scheduler-sql' feature. Please upgrade or use driver: 'sqlite' (default).`);
167346
+ }
167347
+ if (validator.isInGracePeriod()) {
167348
+ // eslint-disable-next-line no-console
167349
+ console.warn('[visor:enterprise] License has expired but is within the 72-hour grace period. ' +
167350
+ 'Please renew your license.');
167351
+ }
167352
+ const { KnexStoreBackend } = await Promise.resolve().then(() => __importStar(__nccwpck_require__(63737)));
167353
+ return new KnexStoreBackend(driver, storageConfig, haConfig);
167354
+ }
167355
+
167356
+
167357
+ /***/ }),
167358
+
167359
+ /***/ 628:
167360
+ /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
167361
+
167362
+ "use strict";
167363
+
167364
+ /**
167365
+ * Copyright (c) ProbeLabs. All rights reserved.
167366
+ * Licensed under the Elastic License 2.0; you may not use this file except
167367
+ * in compliance with the Elastic License 2.0.
167368
+ */
167369
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
167370
+ if (k2 === undefined) k2 = k;
167371
+ var desc = Object.getOwnPropertyDescriptor(m, k);
167372
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
167373
+ desc = { enumerable: true, get: function() { return m[k]; } };
167374
+ }
167375
+ Object.defineProperty(o, k2, desc);
167376
+ }) : (function(o, m, k, k2) {
167377
+ if (k2 === undefined) k2 = k;
167378
+ o[k2] = m[k];
167379
+ }));
167380
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
167381
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
167382
+ }) : function(o, v) {
167383
+ o["default"] = v;
167384
+ });
167385
+ var __importStar = (this && this.__importStar) || (function () {
167386
+ var ownKeys = function(o) {
167387
+ ownKeys = Object.getOwnPropertyNames || function (o) {
167388
+ var ar = [];
167389
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
167390
+ return ar;
167391
+ };
167392
+ return ownKeys(o);
167393
+ };
167394
+ return function (mod) {
167395
+ if (mod && mod.__esModule) return mod;
167396
+ var result = {};
167397
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
167398
+ __setModuleDefault(result, mod);
167399
+ return result;
167400
+ };
167401
+ })();
167402
+ Object.defineProperty(exports, "__esModule", ({ value: true }));
167403
+ exports.OpaCompiler = void 0;
167404
+ const fs = __importStar(__nccwpck_require__(79896));
167405
+ const path = __importStar(__nccwpck_require__(16928));
167406
+ const os = __importStar(__nccwpck_require__(70857));
167407
+ const crypto = __importStar(__nccwpck_require__(76982));
167408
+ const child_process_1 = __nccwpck_require__(35317);
167409
+ /**
167410
+ * OPA Rego Compiler - compiles .rego policy files to WASM bundles using the `opa` CLI.
167411
+ *
167412
+ * Handles:
167413
+ * - Resolving input paths to WASM bytes (direct .wasm, directory with policy.wasm, or .rego files)
167414
+ * - Compiling .rego files to WASM via `opa build`
167415
+ * - Caching compiled bundles based on content hashes
167416
+ * - Extracting policy.wasm from OPA tar.gz bundles
167417
+ *
167418
+ * Requires:
167419
+ * - `opa` CLI on PATH (only when auto-compiling .rego files)
167420
+ */
167421
+ class OpaCompiler {
167422
+ static CACHE_DIR = path.join(os.tmpdir(), 'visor-opa-cache');
167423
+ /**
167424
+ * Resolve the input paths to WASM bytes.
167425
+ *
167426
+ * Strategy:
167427
+ * 1. If any path is a .wasm file, read it directly
167428
+ * 2. If a directory contains policy.wasm, read it
167429
+ * 3. Otherwise, collect all .rego files and auto-compile via `opa build`
167430
+ */
167431
+ async resolveWasmBytes(paths) {
167432
+ // Collect .rego files and check for existing .wasm
167433
+ const regoFiles = [];
167434
+ for (const p of paths) {
167435
+ const resolved = path.resolve(p);
167436
+ // Reject paths containing '..' after resolution (path traversal)
167437
+ if (path.normalize(resolved).includes('..')) {
167438
+ throw new Error(`Policy path contains traversal sequences: ${p}`);
167439
+ }
167440
+ // Direct .wasm file
167441
+ if (resolved.endsWith('.wasm') && fs.existsSync(resolved)) {
167442
+ return fs.readFileSync(resolved);
167443
+ }
167444
+ if (!fs.existsSync(resolved))
167445
+ continue;
167446
+ const stat = fs.statSync(resolved);
167447
+ if (stat.isDirectory()) {
167448
+ // Check for pre-compiled policy.wasm in directory
167449
+ const wasmCandidate = path.join(resolved, 'policy.wasm');
167450
+ if (fs.existsSync(wasmCandidate)) {
167451
+ return fs.readFileSync(wasmCandidate);
167452
+ }
167453
+ // Collect all .rego files from directory
167454
+ const files = fs.readdirSync(resolved);
167455
+ for (const f of files) {
167456
+ if (f.endsWith('.rego')) {
167457
+ regoFiles.push(path.join(resolved, f));
167458
+ }
167459
+ }
167460
+ }
167461
+ else if (resolved.endsWith('.rego')) {
167462
+ regoFiles.push(resolved);
167463
+ }
167464
+ }
167465
+ if (regoFiles.length === 0) {
167466
+ throw new Error(`OPA WASM evaluator: no .wasm bundle or .rego files found in: ${paths.join(', ')}`);
167467
+ }
167468
+ // Auto-compile .rego -> .wasm
167469
+ return this.compileRego(regoFiles);
167470
+ }
167471
+ /**
167472
+ * Auto-compile .rego files to a WASM bundle using the `opa` CLI.
167473
+ *
167474
+ * Caches the compiled bundle based on a content hash of all input .rego files
167475
+ * so subsequent runs skip compilation if policies haven't changed.
167476
+ */
167477
+ compileRego(regoFiles) {
167478
+ // Check that `opa` CLI is available
167479
+ try {
167480
+ (0, child_process_1.execFileSync)('opa', ['version'], { stdio: 'pipe' });
167481
+ }
167482
+ catch {
167483
+ throw new Error('OPA CLI (`opa`) not found on PATH. Install it from https://www.openpolicyagent.org/docs/latest/#running-opa\n' +
167484
+ 'Or pre-compile your .rego files: opa build -t wasm -e visor -o bundle.tar.gz ' +
167485
+ regoFiles.join(' '));
167486
+ }
167487
+ // Compute content hash for cache key
167488
+ const hash = crypto.createHash('sha256');
167489
+ for (const f of regoFiles.sort()) {
167490
+ hash.update(fs.readFileSync(f));
167491
+ hash.update(f); // include filename for disambiguation
167492
+ }
167493
+ const cacheKey = hash.digest('hex').slice(0, 16);
167494
+ const cacheDir = OpaCompiler.CACHE_DIR;
167495
+ const cachedWasm = path.join(cacheDir, `${cacheKey}.wasm`);
167496
+ // Return cached bundle if still valid
167497
+ if (fs.existsSync(cachedWasm)) {
167498
+ return fs.readFileSync(cachedWasm);
167499
+ }
167500
+ // Compile to WASM via opa build
167501
+ fs.mkdirSync(cacheDir, { recursive: true });
167502
+ const bundleTar = path.join(cacheDir, `${cacheKey}-bundle.tar.gz`);
167503
+ try {
167504
+ const args = [
167505
+ 'build',
167506
+ '-t',
167507
+ 'wasm',
167508
+ '-e',
167509
+ 'visor', // entrypoint: the visor package tree
167510
+ '-o',
167511
+ bundleTar,
167512
+ ...regoFiles,
167513
+ ];
167514
+ (0, child_process_1.execFileSync)('opa', args, {
167515
+ stdio: 'pipe',
167516
+ timeout: 30000,
167517
+ });
167518
+ }
167519
+ catch (err) {
167520
+ const stderr = err?.stderr?.toString() || '';
167521
+ throw new Error(`Failed to compile .rego files to WASM:\n${stderr}\n` +
167522
+ 'Ensure your .rego files are valid and the `opa` CLI is installed.');
167523
+ }
167524
+ // Extract policy.wasm from the tar.gz bundle
167525
+ // OPA bundles are tar.gz with /policy.wasm inside
167526
+ try {
167527
+ (0, child_process_1.execFileSync)('tar', ['-xzf', bundleTar, '-C', cacheDir, '/policy.wasm'], {
167528
+ stdio: 'pipe',
167529
+ });
167530
+ const extractedWasm = path.join(cacheDir, 'policy.wasm');
167531
+ if (fs.existsSync(extractedWasm)) {
167532
+ // Move to cache-key named file
167533
+ fs.renameSync(extractedWasm, cachedWasm);
167534
+ }
167535
+ }
167536
+ catch {
167537
+ // Some tar implementations don't like leading /
167538
+ try {
167539
+ (0, child_process_1.execFileSync)('tar', ['-xzf', bundleTar, '-C', cacheDir, 'policy.wasm'], {
167540
+ stdio: 'pipe',
167541
+ });
167542
+ const extractedWasm = path.join(cacheDir, 'policy.wasm');
167543
+ if (fs.existsSync(extractedWasm)) {
167544
+ fs.renameSync(extractedWasm, cachedWasm);
167545
+ }
167546
+ }
167547
+ catch (err2) {
167548
+ throw new Error(`Failed to extract policy.wasm from OPA bundle: ${err2?.message || err2}`);
167549
+ }
167550
+ }
167551
+ // Clean up tar
167552
+ try {
167553
+ fs.unlinkSync(bundleTar);
167554
+ }
167555
+ catch { }
167556
+ if (!fs.existsSync(cachedWasm)) {
167557
+ throw new Error('OPA build succeeded but policy.wasm was not found in the bundle');
167558
+ }
167559
+ return fs.readFileSync(cachedWasm);
167560
+ }
167561
+ }
167562
+ exports.OpaCompiler = OpaCompiler;
167563
+
167564
+
167565
+ /***/ }),
167566
+
167567
+ /***/ 44693:
167568
+ /***/ ((__unused_webpack_module, exports) => {
167569
+
167570
+ "use strict";
167571
+
167572
+ /**
167573
+ * Copyright (c) ProbeLabs. All rights reserved.
167574
+ * Licensed under the Elastic License 2.0; you may not use this file except
167575
+ * in compliance with the Elastic License 2.0.
167576
+ */
167577
+ Object.defineProperty(exports, "__esModule", ({ value: true }));
167578
+ exports.OpaHttpEvaluator = void 0;
167579
+ /**
167580
+ * OPA HTTP Evaluator - evaluates policies via an external OPA server's REST API.
167581
+ *
167582
+ * Uses the built-in `fetch` API (Node 18+), so no extra dependencies are needed.
167583
+ */
167584
+ class OpaHttpEvaluator {
167585
+ baseUrl;
167586
+ timeout;
167587
+ constructor(baseUrl, timeout = 5000) {
167588
+ // Validate URL format and protocol
167589
+ let parsed;
167590
+ try {
167591
+ parsed = new URL(baseUrl);
167592
+ }
167593
+ catch {
167594
+ throw new Error(`OPA HTTP evaluator: invalid URL: ${baseUrl}`);
167595
+ }
167596
+ if (!['http:', 'https:'].includes(parsed.protocol)) {
167597
+ throw new Error(`OPA HTTP evaluator: url must use http:// or https:// protocol, got: ${baseUrl}`);
167598
+ }
167599
+ // Block cloud metadata, loopback, link-local, and private network addresses
167600
+ const hostname = parsed.hostname;
167601
+ if (this.isBlockedHostname(hostname)) {
167602
+ throw new Error(`OPA HTTP evaluator: url must not point to internal, loopback, or private network addresses`);
167603
+ }
167604
+ // Normalize: strip trailing slash
167605
+ this.baseUrl = baseUrl.replace(/\/+$/, '');
167606
+ this.timeout = timeout;
167607
+ }
167608
+ /**
167609
+ * Check if a hostname is blocked due to SSRF concerns.
167610
+ *
167611
+ * Blocks:
167612
+ * - Loopback addresses (127.x.x.x, localhost, 0.0.0.0, ::1)
167613
+ * - Link-local addresses (169.254.x.x)
167614
+ * - Private networks (10.x.x.x, 172.16-31.x.x, 192.168.x.x)
167615
+ * - IPv6 unique local addresses (fd00::/8)
167616
+ * - Cloud metadata services (*.internal)
167617
+ */
167618
+ isBlockedHostname(hostname) {
167619
+ if (!hostname)
167620
+ return true; // block empty hostnames
167621
+ // Normalize hostname: lowercase and remove brackets for IPv6
167622
+ const normalized = hostname.toLowerCase().replace(/^\[|\]$/g, '');
167623
+ // Block .internal domains (cloud metadata services)
167624
+ if (normalized === 'metadata.google.internal' || normalized.endsWith('.internal')) {
167625
+ return true;
167626
+ }
167627
+ // Block localhost variants
167628
+ if (normalized === 'localhost' || normalized === 'localhost.localdomain') {
167629
+ return true;
167630
+ }
167631
+ // Block IPv6 loopback
167632
+ if (normalized === '::1' || normalized === '0:0:0:0:0:0:0:1') {
167633
+ return true;
167634
+ }
167635
+ // Check IPv4 patterns
167636
+ const ipv4Pattern = /^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/;
167637
+ const ipv4Match = normalized.match(ipv4Pattern);
167638
+ if (ipv4Match) {
167639
+ const octets = ipv4Match.slice(1, 5).map(Number);
167640
+ // Validate octets are in range [0, 255]
167641
+ if (octets.some(octet => octet > 255)) {
167642
+ return false;
167643
+ }
167644
+ const [a, b] = octets;
167645
+ // Block loopback: 127.0.0.0/8
167646
+ if (a === 127) {
167647
+ return true;
167648
+ }
167649
+ // Block 0.0.0.0/8 (this host)
167650
+ if (a === 0) {
167651
+ return true;
167652
+ }
167653
+ // Block link-local: 169.254.0.0/16
167654
+ if (a === 169 && b === 254) {
167655
+ return true;
167656
+ }
167657
+ // Block private networks
167658
+ // 10.0.0.0/8
167659
+ if (a === 10) {
167660
+ return true;
167661
+ }
167662
+ // 172.16.0.0/12 (172.16.x.x through 172.31.x.x)
167663
+ if (a === 172 && b >= 16 && b <= 31) {
167664
+ return true;
167665
+ }
167666
+ // 192.168.0.0/16
167667
+ if (a === 192 && b === 168) {
167668
+ return true;
167669
+ }
167670
+ }
167671
+ // Check IPv6 patterns
167672
+ // Block unique local addresses: fd00::/8
167673
+ if (normalized.startsWith('fd') || normalized.startsWith('fc')) {
167674
+ return true;
167675
+ }
167676
+ // Block link-local: fe80::/10
167677
+ if (normalized.startsWith('fe80:')) {
167678
+ return true;
167679
+ }
167680
+ return false;
167681
+ }
167682
+ /**
167683
+ * Evaluate a policy rule against an input document via OPA REST API.
167684
+ *
167685
+ * @param input - The input document to evaluate
167686
+ * @param rulePath - OPA rule path (e.g., 'visor/check/execute')
167687
+ * @returns The result object from OPA, or undefined on error
167688
+ */
167689
+ async evaluate(input, rulePath) {
167690
+ // OPA Data API: POST /v1/data/<path>
167691
+ const encodedPath = rulePath
167692
+ .split('/')
167693
+ .map(s => encodeURIComponent(s))
167694
+ .join('/');
167695
+ const url = `${this.baseUrl}/v1/data/${encodedPath}`;
167696
+ const controller = new AbortController();
167697
+ const timer = setTimeout(() => controller.abort(), this.timeout);
167698
+ try {
167699
+ const response = await fetch(url, {
167700
+ method: 'POST',
167701
+ headers: { 'Content-Type': 'application/json' },
167702
+ body: JSON.stringify({ input }),
167703
+ signal: controller.signal,
167704
+ });
167705
+ if (!response.ok) {
167706
+ throw new Error(`OPA HTTP ${response.status}: ${response.statusText}`);
167707
+ }
167708
+ let body;
167709
+ try {
167710
+ body = await response.json();
167711
+ }
167712
+ catch (jsonErr) {
167713
+ throw new Error(`OPA HTTP evaluator: failed to parse JSON response: ${jsonErr instanceof Error ? jsonErr.message : String(jsonErr)}`);
167714
+ }
167715
+ // OPA returns { result: { ... } }
167716
+ return body?.result;
167717
+ }
167718
+ finally {
167719
+ clearTimeout(timer);
167720
+ }
167721
+ }
167722
+ async shutdown() {
167723
+ // No persistent connections to close
167724
+ }
167725
+ }
167726
+ exports.OpaHttpEvaluator = OpaHttpEvaluator;
167727
+
167728
+
167729
+ /***/ }),
167730
+
167731
+ /***/ 39530:
167732
+ /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
167733
+
167734
+ "use strict";
167735
+
167736
+ /**
167737
+ * Copyright (c) ProbeLabs. All rights reserved.
167738
+ * Licensed under the Elastic License 2.0; you may not use this file except
167739
+ * in compliance with the Elastic License 2.0.
167740
+ */
167741
+ Object.defineProperty(exports, "__esModule", ({ value: true }));
167742
+ exports.OpaPolicyEngine = void 0;
167743
+ const opa_wasm_evaluator_1 = __nccwpck_require__(8613);
167744
+ const opa_http_evaluator_1 = __nccwpck_require__(44693);
167745
+ const policy_input_builder_1 = __nccwpck_require__(17117);
167746
+ /**
167747
+ * Enterprise OPA Policy Engine.
167748
+ *
167749
+ * Wraps both WASM (local) and HTTP (remote) OPA evaluators behind the
167750
+ * OSS PolicyEngine interface. All OPA input building and role resolution
167751
+ * is handled internally — the OSS call sites pass only plain types.
167752
+ */
167753
+ class OpaPolicyEngine {
167754
+ evaluator = null;
167755
+ fallback;
167756
+ timeout;
167757
+ config;
167758
+ inputBuilder = null;
167759
+ logger = null;
167760
+ constructor(config) {
167761
+ this.config = config;
167762
+ this.fallback = config.fallback || 'deny';
167763
+ this.timeout = config.timeout || 5000;
167764
+ }
167765
+ async initialize(config) {
167766
+ // Resolve logger once at initialization
167767
+ try {
167768
+ this.logger = (__nccwpck_require__(86999).logger);
167769
+ }
167770
+ catch {
167771
+ // logger not available in this context
167772
+ }
167773
+ // Build actor/repo context from environment (available at engine init time)
167774
+ const actor = {
167775
+ authorAssociation: process.env.VISOR_AUTHOR_ASSOCIATION,
167776
+ login: process.env.VISOR_AUTHOR_LOGIN || process.env.GITHUB_ACTOR,
167777
+ isLocalMode: !process.env.GITHUB_ACTIONS,
167778
+ };
167779
+ const repo = {
167780
+ owner: process.env.GITHUB_REPOSITORY_OWNER,
167781
+ name: process.env.GITHUB_REPOSITORY?.split('/')[1],
167782
+ branch: process.env.GITHUB_HEAD_REF,
167783
+ baseBranch: process.env.GITHUB_BASE_REF,
167784
+ event: process.env.GITHUB_EVENT_NAME,
167785
+ };
167786
+ const prNum = process.env.GITHUB_PR_NUMBER
167787
+ ? parseInt(process.env.GITHUB_PR_NUMBER, 10)
167788
+ : undefined;
167789
+ const pullRequest = {
167790
+ number: prNum !== undefined && Number.isFinite(prNum) ? prNum : undefined,
167791
+ };
167792
+ this.inputBuilder = new policy_input_builder_1.PolicyInputBuilder(config, actor, repo, pullRequest);
167793
+ if (config.engine === 'local') {
167794
+ if (!config.rules) {
167795
+ throw new Error('OPA local mode requires `policy.rules` path to .wasm or .rego files');
167796
+ }
167797
+ const wasm = new opa_wasm_evaluator_1.OpaWasmEvaluator();
167798
+ await wasm.initialize(config.rules);
167799
+ if (config.data) {
167800
+ wasm.loadData(config.data);
167801
+ }
167802
+ this.evaluator = wasm;
167803
+ }
167804
+ else if (config.engine === 'remote') {
167805
+ if (!config.url) {
167806
+ throw new Error('OPA remote mode requires `policy.url` pointing to OPA server');
167807
+ }
167808
+ this.evaluator = new opa_http_evaluator_1.OpaHttpEvaluator(config.url, this.timeout);
167809
+ }
167810
+ else {
167811
+ this.evaluator = null;
167812
+ }
167813
+ }
167814
+ /**
167815
+ * Update actor/repo/PR context (e.g., after PR info becomes available).
167816
+ * Called by the enterprise loader when engine context is enriched.
167817
+ */
167818
+ setActorContext(actor, repo, pullRequest) {
167819
+ this.inputBuilder = new policy_input_builder_1.PolicyInputBuilder(this.config, actor, repo, pullRequest);
167820
+ }
167821
+ async evaluateCheckExecution(checkId, checkConfig) {
167822
+ if (!this.evaluator || !this.inputBuilder)
167823
+ return { allowed: true };
167824
+ const cfg = checkConfig && typeof checkConfig === 'object'
167825
+ ? checkConfig
167826
+ : {};
167827
+ const policyOverride = cfg.policy;
167828
+ const input = this.inputBuilder.forCheckExecution({
167829
+ id: checkId,
167830
+ type: cfg.type || 'ai',
167831
+ group: cfg.group,
167832
+ tags: cfg.tags,
167833
+ criticality: cfg.criticality,
167834
+ sandbox: cfg.sandbox,
167835
+ policy: policyOverride,
167836
+ });
167837
+ return this.doEvaluate(input, this.resolveRulePath('check.execute', policyOverride?.rule));
167838
+ }
167839
+ async evaluateToolInvocation(serverName, methodName, transport) {
167840
+ if (!this.evaluator || !this.inputBuilder)
167841
+ return { allowed: true };
167842
+ const input = this.inputBuilder.forToolInvocation(serverName, methodName, transport);
167843
+ return this.doEvaluate(input, 'visor/tool/invoke');
167844
+ }
167845
+ async evaluateCapabilities(checkId, capabilities) {
167846
+ if (!this.evaluator || !this.inputBuilder)
167847
+ return { allowed: true };
167848
+ const input = this.inputBuilder.forCapabilityResolve(checkId, capabilities);
167849
+ return this.doEvaluate(input, 'visor/capability/resolve');
167850
+ }
167851
+ async shutdown() {
167852
+ if (this.evaluator && 'shutdown' in this.evaluator) {
167853
+ await this.evaluator.shutdown();
167854
+ }
167855
+ this.evaluator = null;
167856
+ this.inputBuilder = null;
167857
+ }
167858
+ resolveRulePath(defaultScope, override) {
167859
+ if (override) {
167860
+ return override.startsWith('visor/') ? override : `visor/${override}`;
167861
+ }
167862
+ return `visor/${defaultScope.replace(/\./g, '/')}`;
167863
+ }
167864
+ async doEvaluate(input, rulePath) {
167865
+ try {
167866
+ this.logger?.debug(`[PolicyEngine] Evaluating ${rulePath}`, JSON.stringify(input));
167867
+ let timer;
167868
+ const timeoutPromise = new Promise((_resolve, reject) => {
167869
+ timer = setTimeout(() => reject(new Error('policy evaluation timeout')), this.timeout);
167870
+ });
167871
+ try {
167872
+ const result = await Promise.race([this.rawEvaluate(input, rulePath), timeoutPromise]);
167873
+ const decision = this.parseDecision(result);
167874
+ // In warn mode, override denied decisions to allowed but flag as warn
167875
+ if (!decision.allowed && this.fallback === 'warn') {
167876
+ decision.allowed = true;
167877
+ decision.warn = true;
167878
+ decision.reason = `audit: ${decision.reason || 'policy denied'}`;
167879
+ }
167880
+ this.logger?.debug(`[PolicyEngine] Decision for ${rulePath}: allowed=${decision.allowed}, warn=${decision.warn || false}, reason=${decision.reason || 'none'}`);
167881
+ return decision;
167882
+ }
167883
+ finally {
167884
+ if (timer)
167885
+ clearTimeout(timer);
167886
+ }
167887
+ }
167888
+ catch (err) {
167889
+ const msg = err instanceof Error ? err.message : String(err);
167890
+ this.logger?.warn(`[PolicyEngine] Evaluation failed for ${rulePath}: ${msg}`);
167891
+ return {
167892
+ allowed: this.fallback === 'allow' || this.fallback === 'warn',
167893
+ warn: this.fallback === 'warn' ? true : undefined,
167894
+ reason: `policy evaluation failed, fallback=${this.fallback}`,
167895
+ };
167896
+ }
167897
+ }
167898
+ async rawEvaluate(input, rulePath) {
167899
+ if (this.evaluator instanceof opa_wasm_evaluator_1.OpaWasmEvaluator) {
167900
+ const result = await this.evaluator.evaluate(input);
167901
+ // WASM compiled with `-e visor` entrypoint returns the full visor package tree.
167902
+ // Navigate to the specific rule subtree using rulePath segments.
167903
+ // e.g., 'visor/check/execute' → result.check.execute
167904
+ return this.navigateWasmResult(result, rulePath);
167905
+ }
167906
+ return this.evaluator.evaluate(input, rulePath);
167907
+ }
167908
+ /**
167909
+ * Navigate nested OPA WASM result tree to reach the specific rule's output.
167910
+ * The WASM entrypoint `-e visor` means the result root IS the visor package,
167911
+ * so we strip the `visor/` prefix and walk the remaining segments.
167912
+ */
167913
+ navigateWasmResult(result, rulePath) {
167914
+ if (!result || typeof result !== 'object')
167915
+ return result;
167916
+ // Strip the 'visor/' prefix (matches our compilation entrypoint)
167917
+ const segments = rulePath.replace(/^visor\//, '').split('/');
167918
+ let current = result;
167919
+ for (const seg of segments) {
167920
+ if (current && typeof current === 'object' && seg in current) {
167921
+ current = current[seg];
167922
+ }
167923
+ else {
167924
+ return undefined; // path not found in result tree
167925
+ }
167926
+ }
167927
+ return current;
167928
+ }
167929
+ parseDecision(result) {
167930
+ if (result === undefined || result === null) {
167931
+ return {
167932
+ allowed: this.fallback === 'allow' || this.fallback === 'warn',
167933
+ warn: this.fallback === 'warn' ? true : undefined,
167934
+ reason: this.fallback === 'warn' ? 'audit: no policy result' : 'no policy result',
167935
+ };
167936
+ }
167937
+ const allowed = result.allowed !== false;
167938
+ const decision = {
167939
+ allowed,
167940
+ reason: result.reason,
167941
+ };
167942
+ if (result.capabilities) {
167943
+ decision.capabilities = result.capabilities;
167944
+ }
167945
+ return decision;
167946
+ }
167947
+ }
167948
+ exports.OpaPolicyEngine = OpaPolicyEngine;
167949
+
167950
+
167951
+ /***/ }),
167952
+
167953
+ /***/ 8613:
167954
+ /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
167955
+
167956
+ "use strict";
167957
+
167958
+ /**
167959
+ * Copyright (c) ProbeLabs. All rights reserved.
167960
+ * Licensed under the Elastic License 2.0; you may not use this file except
167961
+ * in compliance with the Elastic License 2.0.
167962
+ */
167963
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
167964
+ if (k2 === undefined) k2 = k;
167965
+ var desc = Object.getOwnPropertyDescriptor(m, k);
167966
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
167967
+ desc = { enumerable: true, get: function() { return m[k]; } };
167968
+ }
167969
+ Object.defineProperty(o, k2, desc);
167970
+ }) : (function(o, m, k, k2) {
167971
+ if (k2 === undefined) k2 = k;
167972
+ o[k2] = m[k];
167973
+ }));
167974
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
167975
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
167976
+ }) : function(o, v) {
167977
+ o["default"] = v;
167978
+ });
167979
+ var __importStar = (this && this.__importStar) || (function () {
167980
+ var ownKeys = function(o) {
167981
+ ownKeys = Object.getOwnPropertyNames || function (o) {
167982
+ var ar = [];
167983
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
167984
+ return ar;
167985
+ };
167986
+ return ownKeys(o);
167987
+ };
167988
+ return function (mod) {
167989
+ if (mod && mod.__esModule) return mod;
167990
+ var result = {};
167991
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
167992
+ __setModuleDefault(result, mod);
167993
+ return result;
167994
+ };
167995
+ })();
167996
+ Object.defineProperty(exports, "__esModule", ({ value: true }));
167997
+ exports.OpaWasmEvaluator = void 0;
167998
+ const fs = __importStar(__nccwpck_require__(79896));
167999
+ const path = __importStar(__nccwpck_require__(16928));
168000
+ const opa_compiler_1 = __nccwpck_require__(628);
168001
+ /**
168002
+ * OPA WASM Evaluator - loads and evaluates OPA policies locally.
168003
+ *
168004
+ * Supports three input formats:
168005
+ * 1. Pre-compiled `.wasm` bundle — loaded directly (fastest startup)
168006
+ * 2. `.rego` files or directory — auto-compiled to WASM via `opa build` CLI
168007
+ * 3. Directory with `policy.wasm` inside — loaded directly
168008
+ *
168009
+ * Compilation and caching of .rego files is delegated to {@link OpaCompiler}.
168010
+ *
168011
+ * Requires:
168012
+ * - `@open-policy-agent/opa-wasm` npm package (optional dep)
168013
+ * - `opa` CLI on PATH (only when auto-compiling .rego files)
168014
+ */
168015
+ class OpaWasmEvaluator {
168016
+ policy = null;
168017
+ dataDocument = {};
168018
+ compiler = new opa_compiler_1.OpaCompiler();
168019
+ async initialize(rulesPath) {
168020
+ const paths = Array.isArray(rulesPath) ? rulesPath : [rulesPath];
168021
+ const wasmBytes = await this.compiler.resolveWasmBytes(paths);
168022
+ try {
168023
+ // Use createRequire to load the optional dep at runtime without ncc bundling it.
168024
+ // `new Function('id', 'return require(id)')` fails in ncc bundles because
168025
+ // `require` is not in the `new Function` scope. `createRequire` works correctly
168026
+ // because it creates a real Node.js require rooted at the given path.
168027
+ // eslint-disable-next-line @typescript-eslint/no-var-requires
168028
+ const { createRequire } = __nccwpck_require__(73339);
168029
+ const runtimeRequire = createRequire(__filename);
168030
+ const opaWasm = runtimeRequire('@open-policy-agent/opa-wasm');
168031
+ const loadPolicy = opaWasm.loadPolicy || opaWasm.default?.loadPolicy;
168032
+ if (!loadPolicy) {
168033
+ throw new Error('loadPolicy not found in @open-policy-agent/opa-wasm');
168034
+ }
168035
+ this.policy = await loadPolicy(wasmBytes);
168036
+ }
168037
+ catch (err) {
168038
+ if (err?.code === 'MODULE_NOT_FOUND' || err?.code === 'ERR_MODULE_NOT_FOUND') {
168039
+ throw new Error('OPA WASM evaluator requires @open-policy-agent/opa-wasm. ' +
168040
+ 'Install it with: npm install @open-policy-agent/opa-wasm');
168041
+ }
168042
+ throw err;
168043
+ }
168044
+ }
168045
+ /**
168046
+ * Load external data from a JSON file to use as the OPA data document.
168047
+ * The loaded data will be passed to `policy.setData()` during evaluation,
168048
+ * making it available in Rego via `data.<key>`.
168049
+ */
168050
+ loadData(dataPath) {
168051
+ const resolved = path.resolve(dataPath);
168052
+ if (path.normalize(resolved).includes('..')) {
168053
+ throw new Error(`Data path contains traversal sequences: ${dataPath}`);
168054
+ }
168055
+ if (!fs.existsSync(resolved)) {
168056
+ throw new Error(`OPA data file not found: ${resolved}`);
168057
+ }
168058
+ const stat = fs.statSync(resolved);
168059
+ if (stat.size > 10 * 1024 * 1024) {
168060
+ throw new Error(`OPA data file exceeds 10MB limit: ${resolved} (${stat.size} bytes)`);
168061
+ }
168062
+ const raw = fs.readFileSync(resolved, 'utf-8');
168063
+ try {
168064
+ const parsed = JSON.parse(raw);
168065
+ if (typeof parsed !== 'object' || parsed === null || Array.isArray(parsed)) {
168066
+ throw new Error('OPA data file must contain a JSON object (not an array or primitive)');
168067
+ }
168068
+ this.dataDocument = parsed;
168069
+ }
168070
+ catch (err) {
168071
+ if (err.message.startsWith('OPA data file must')) {
168072
+ throw err;
168073
+ }
168074
+ throw new Error(`Failed to parse OPA data file ${resolved}: ${err.message}`);
168075
+ }
168076
+ }
168077
+ async evaluate(input) {
168078
+ if (!this.policy) {
168079
+ throw new Error('OPA WASM evaluator not initialized');
168080
+ }
168081
+ this.policy.setData(this.dataDocument);
168082
+ const resultSet = this.policy.evaluate(input);
168083
+ if (Array.isArray(resultSet) && resultSet.length > 0) {
168084
+ return resultSet[0].result;
168085
+ }
168086
+ return undefined;
168087
+ }
168088
+ async shutdown() {
168089
+ if (this.policy) {
168090
+ // opa-wasm policy objects may have a close/free method for WASM cleanup
168091
+ if (typeof this.policy.close === 'function') {
168092
+ try {
168093
+ this.policy.close();
168094
+ }
168095
+ catch { }
168096
+ }
168097
+ else if (typeof this.policy.free === 'function') {
168098
+ try {
168099
+ this.policy.free();
168100
+ }
168101
+ catch { }
168102
+ }
168103
+ }
168104
+ this.policy = null;
168105
+ }
168106
+ }
168107
+ exports.OpaWasmEvaluator = OpaWasmEvaluator;
168108
+
168109
+
168110
+ /***/ }),
168111
+
168112
+ /***/ 17117:
168113
+ /***/ ((__unused_webpack_module, exports) => {
168114
+
168115
+ "use strict";
168116
+
168117
+ /**
168118
+ * Copyright (c) ProbeLabs. All rights reserved.
168119
+ * Licensed under the Elastic License 2.0; you may not use this file except
168120
+ * in compliance with the Elastic License 2.0.
168121
+ */
168122
+ Object.defineProperty(exports, "__esModule", ({ value: true }));
168123
+ exports.PolicyInputBuilder = void 0;
168124
+ /**
168125
+ * Builds OPA-compatible input documents from engine context.
168126
+ *
168127
+ * Resolves actor roles from the `policy.roles` config section by matching
168128
+ * the actor's authorAssociation and login against role definitions.
168129
+ */
168130
+ class PolicyInputBuilder {
168131
+ roles;
168132
+ actor;
168133
+ repository;
168134
+ pullRequest;
168135
+ constructor(policyConfig, actor, repository, pullRequest) {
168136
+ this.roles = policyConfig.roles || {};
168137
+ this.actor = actor;
168138
+ this.repository = repository;
168139
+ this.pullRequest = pullRequest;
168140
+ }
168141
+ /** Resolve which roles apply to the current actor. */
168142
+ resolveRoles() {
168143
+ const matched = [];
168144
+ for (const [roleName, roleConfig] of Object.entries(this.roles)) {
168145
+ let identityMatch = false;
168146
+ if (roleConfig.author_association &&
168147
+ this.actor.authorAssociation &&
168148
+ roleConfig.author_association.includes(this.actor.authorAssociation)) {
168149
+ identityMatch = true;
168150
+ }
168151
+ if (!identityMatch &&
168152
+ roleConfig.users &&
168153
+ this.actor.login &&
168154
+ roleConfig.users.includes(this.actor.login)) {
168155
+ identityMatch = true;
168156
+ }
168157
+ // Slack user ID match
168158
+ if (!identityMatch &&
168159
+ roleConfig.slack_users &&
168160
+ this.actor.slack?.userId &&
168161
+ roleConfig.slack_users.includes(this.actor.slack.userId)) {
168162
+ identityMatch = true;
168163
+ }
168164
+ // Email match (case-insensitive)
168165
+ if (!identityMatch && roleConfig.emails && this.actor.slack?.email) {
168166
+ const actorEmail = this.actor.slack.email.toLowerCase();
168167
+ if (roleConfig.emails.some(e => e.toLowerCase() === actorEmail)) {
168168
+ identityMatch = true;
168169
+ }
168170
+ }
168171
+ // Note: teams-based role resolution requires GitHub API access (read:org scope)
168172
+ // and is not yet implemented. If configured, the role will not match via teams.
168173
+ if (!identityMatch)
168174
+ continue;
168175
+ // slack_channels gate: if set, the role only applies when triggered from one of these channels
168176
+ if (roleConfig.slack_channels && roleConfig.slack_channels.length > 0) {
168177
+ if (!this.actor.slack?.channelId ||
168178
+ !roleConfig.slack_channels.includes(this.actor.slack.channelId)) {
168179
+ continue;
168180
+ }
168181
+ }
168182
+ matched.push(roleName);
168183
+ }
168184
+ return matched;
168185
+ }
168186
+ buildActor() {
168187
+ return {
168188
+ authorAssociation: this.actor.authorAssociation,
168189
+ login: this.actor.login,
168190
+ roles: this.resolveRoles(),
168191
+ isLocalMode: this.actor.isLocalMode,
168192
+ ...(this.actor.slack && { slack: this.actor.slack }),
168193
+ };
168194
+ }
168195
+ forCheckExecution(check) {
168196
+ return {
168197
+ scope: 'check.execute',
168198
+ check: {
168199
+ id: check.id,
168200
+ type: check.type,
168201
+ group: check.group,
168202
+ tags: check.tags,
168203
+ criticality: check.criticality,
168204
+ sandbox: check.sandbox,
168205
+ policy: check.policy,
168206
+ },
168207
+ actor: this.buildActor(),
168208
+ repository: this.repository,
168209
+ pullRequest: this.pullRequest,
168210
+ };
168211
+ }
168212
+ forToolInvocation(serverName, methodName, transport) {
168213
+ return {
168214
+ scope: 'tool.invoke',
168215
+ tool: { serverName, methodName, transport },
168216
+ actor: this.buildActor(),
168217
+ repository: this.repository,
168218
+ pullRequest: this.pullRequest,
168219
+ };
168220
+ }
168221
+ forCapabilityResolve(checkId, capabilities) {
168222
+ return {
168223
+ scope: 'capability.resolve',
168224
+ check: { id: checkId, type: 'ai' },
168225
+ capability: capabilities,
168226
+ actor: this.buildActor(),
168227
+ repository: this.repository,
168228
+ pullRequest: this.pullRequest,
168229
+ };
168230
+ }
168231
+ }
168232
+ exports.PolicyInputBuilder = PolicyInputBuilder;
168233
+
168234
+
168235
+ /***/ }),
168236
+
168237
+ /***/ 63737:
168238
+ /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
168239
+
168240
+ "use strict";
168241
+
168242
+ /**
168243
+ * Copyright (c) ProbeLabs. All rights reserved.
168244
+ * Licensed under the Elastic License 2.0; you may not use this file except
168245
+ * in compliance with the Elastic License 2.0.
168246
+ */
168247
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
168248
+ if (k2 === undefined) k2 = k;
168249
+ var desc = Object.getOwnPropertyDescriptor(m, k);
168250
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
168251
+ desc = { enumerable: true, get: function() { return m[k]; } };
168252
+ }
168253
+ Object.defineProperty(o, k2, desc);
168254
+ }) : (function(o, m, k, k2) {
168255
+ if (k2 === undefined) k2 = k;
168256
+ o[k2] = m[k];
168257
+ }));
168258
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
168259
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
168260
+ }) : function(o, v) {
168261
+ o["default"] = v;
168262
+ });
168263
+ var __importStar = (this && this.__importStar) || (function () {
168264
+ var ownKeys = function(o) {
168265
+ ownKeys = Object.getOwnPropertyNames || function (o) {
168266
+ var ar = [];
168267
+ for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
168268
+ return ar;
168269
+ };
168270
+ return ownKeys(o);
168271
+ };
168272
+ return function (mod) {
168273
+ if (mod && mod.__esModule) return mod;
168274
+ var result = {};
168275
+ if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
168276
+ __setModuleDefault(result, mod);
168277
+ return result;
168278
+ };
168279
+ })();
168280
+ Object.defineProperty(exports, "__esModule", ({ value: true }));
168281
+ exports.KnexStoreBackend = void 0;
168282
+ /**
168283
+ * Knex-backed schedule store for PostgreSQL, MySQL, and MSSQL (Enterprise)
168284
+ *
168285
+ * Uses Knex query builder for database-agnostic SQL. Same schema as SQLite backend
168286
+ * but with real distributed locking via row-level claims (claimed_by/claimed_at/lock_token).
168287
+ */
168288
+ const fs = __importStar(__nccwpck_require__(79896));
168289
+ const path = __importStar(__nccwpck_require__(16928));
168290
+ const uuid_1 = __nccwpck_require__(31914);
168291
+ const logger_1 = __nccwpck_require__(86999);
168292
+ function toNum(val) {
168293
+ if (val === null || val === undefined)
168294
+ return undefined;
168295
+ return typeof val === 'string' ? parseInt(val, 10) : val;
168296
+ }
168297
+ function safeJsonParse(value) {
168298
+ if (!value)
168299
+ return undefined;
168300
+ try {
168301
+ return JSON.parse(value);
168302
+ }
168303
+ catch {
168304
+ return undefined;
168305
+ }
168306
+ }
168307
+ function fromDbRow(row) {
168308
+ return {
168309
+ id: row.id,
168310
+ creatorId: row.creator_id,
168311
+ creatorContext: row.creator_context ?? undefined,
168312
+ creatorName: row.creator_name ?? undefined,
168313
+ timezone: row.timezone,
168314
+ schedule: row.schedule_expr,
168315
+ runAt: toNum(row.run_at),
168316
+ isRecurring: row.is_recurring === true || row.is_recurring === 1,
168317
+ originalExpression: row.original_expression,
168318
+ workflow: row.workflow ?? undefined,
168319
+ workflowInputs: safeJsonParse(row.workflow_inputs),
168320
+ outputContext: safeJsonParse(row.output_context),
168321
+ status: row.status,
168322
+ createdAt: toNum(row.created_at),
168323
+ lastRunAt: toNum(row.last_run_at),
168324
+ nextRunAt: toNum(row.next_run_at),
168325
+ runCount: row.run_count,
168326
+ failureCount: row.failure_count,
168327
+ lastError: row.last_error ?? undefined,
168328
+ previousResponse: row.previous_response ?? undefined,
168329
+ };
168330
+ }
168331
+ function toInsertRow(schedule) {
168332
+ return {
168333
+ id: schedule.id,
168334
+ creator_id: schedule.creatorId,
168335
+ creator_context: schedule.creatorContext ?? null,
168336
+ creator_name: schedule.creatorName ?? null,
168337
+ timezone: schedule.timezone,
168338
+ schedule_expr: schedule.schedule,
168339
+ run_at: schedule.runAt ?? null,
168340
+ is_recurring: schedule.isRecurring,
168341
+ original_expression: schedule.originalExpression,
168342
+ workflow: schedule.workflow ?? null,
168343
+ workflow_inputs: schedule.workflowInputs ? JSON.stringify(schedule.workflowInputs) : null,
168344
+ output_context: schedule.outputContext ? JSON.stringify(schedule.outputContext) : null,
168345
+ status: schedule.status,
168346
+ created_at: schedule.createdAt,
168347
+ last_run_at: schedule.lastRunAt ?? null,
168348
+ next_run_at: schedule.nextRunAt ?? null,
168349
+ run_count: schedule.runCount,
168350
+ failure_count: schedule.failureCount,
168351
+ last_error: schedule.lastError ?? null,
168352
+ previous_response: schedule.previousResponse ?? null,
168353
+ };
168354
+ }
168355
+ /**
168356
+ * Enterprise Knex-backed store for PostgreSQL, MySQL, and MSSQL
168357
+ */
168358
+ class KnexStoreBackend {
168359
+ knex = null;
168360
+ driver;
168361
+ connection;
168362
+ constructor(driver, storageConfig, _haConfig) {
168363
+ this.driver = driver;
168364
+ this.connection = (storageConfig.connection || {});
168365
+ }
168366
+ async initialize() {
168367
+ // Load knex dynamically
168368
+ const { createRequire } = __nccwpck_require__(73339);
168369
+ const runtimeRequire = createRequire(__filename);
168370
+ let knexFactory;
168371
+ try {
168372
+ knexFactory = runtimeRequire('knex');
168373
+ }
168374
+ catch (err) {
168375
+ const code = err?.code;
168376
+ if (code === 'MODULE_NOT_FOUND' || code === 'ERR_MODULE_NOT_FOUND') {
168377
+ throw new Error('knex is required for PostgreSQL/MySQL/MSSQL schedule storage. ' +
168378
+ 'Install it with: npm install knex');
168379
+ }
168380
+ throw err;
168381
+ }
168382
+ const clientMap = {
168383
+ postgresql: 'pg',
168384
+ mysql: 'mysql2',
168385
+ mssql: 'tedious',
168386
+ };
168387
+ const client = clientMap[this.driver];
168388
+ // Build connection config
168389
+ let connection;
168390
+ if (this.connection.connection_string) {
168391
+ connection = this.connection.connection_string;
168392
+ }
168393
+ else if (this.driver === 'mssql') {
168394
+ connection = this.buildMssqlConnection();
168395
+ }
168396
+ else {
168397
+ connection = this.buildStandardConnection();
168398
+ }
168399
+ this.knex = knexFactory({
168400
+ client,
168401
+ connection,
168402
+ pool: {
168403
+ min: this.connection.pool?.min ?? 0,
168404
+ max: this.connection.pool?.max ?? 10,
168405
+ },
168406
+ });
168407
+ // Run schema migration
168408
+ await this.migrateSchema();
168409
+ logger_1.logger.info(`[KnexStore] Initialized (${this.driver})`);
168410
+ }
168411
+ buildStandardConnection() {
168412
+ return {
168413
+ host: this.connection.host || 'localhost',
168414
+ port: this.connection.port,
168415
+ database: this.connection.database || 'visor',
168416
+ user: this.connection.user,
168417
+ password: this.connection.password,
168418
+ ssl: this.resolveSslConfig(),
168419
+ };
168420
+ }
168421
+ buildMssqlConnection() {
168422
+ const ssl = this.connection.ssl;
168423
+ const sslEnabled = ssl === true || (typeof ssl === 'object' && ssl.enabled !== false);
168424
+ return {
168425
+ server: this.connection.host || 'localhost',
168426
+ port: this.connection.port,
168427
+ database: this.connection.database || 'visor',
168428
+ user: this.connection.user,
168429
+ password: this.connection.password,
168430
+ options: {
168431
+ encrypt: sslEnabled,
168432
+ trustServerCertificate: typeof ssl === 'object' ? ssl.reject_unauthorized === false : !sslEnabled,
168433
+ },
168434
+ };
168435
+ }
168436
+ resolveSslConfig() {
168437
+ const ssl = this.connection.ssl;
168438
+ if (ssl === false || ssl === undefined)
168439
+ return false;
168440
+ if (ssl === true)
168441
+ return { rejectUnauthorized: true };
168442
+ // Object config
168443
+ if (ssl.enabled === false)
168444
+ return false;
168445
+ const result = {
168446
+ rejectUnauthorized: ssl.reject_unauthorized !== false,
168447
+ };
168448
+ if (ssl.ca) {
168449
+ const caPath = this.validateSslPath(ssl.ca, 'CA certificate');
168450
+ result.ca = fs.readFileSync(caPath, 'utf8');
168451
+ }
168452
+ if (ssl.cert) {
168453
+ const certPath = this.validateSslPath(ssl.cert, 'client certificate');
168454
+ result.cert = fs.readFileSync(certPath, 'utf8');
168455
+ }
168456
+ if (ssl.key) {
168457
+ const keyPath = this.validateSslPath(ssl.key, 'client key');
168458
+ result.key = fs.readFileSync(keyPath, 'utf8');
168459
+ }
168460
+ return result;
168461
+ }
168462
+ validateSslPath(filePath, label) {
168463
+ const resolved = path.resolve(filePath);
168464
+ if (resolved !== path.normalize(resolved)) {
168465
+ throw new Error(`SSL ${label} path contains invalid sequences: ${filePath}`);
168466
+ }
168467
+ if (!fs.existsSync(resolved)) {
168468
+ throw new Error(`SSL ${label} not found: ${filePath}`);
168469
+ }
168470
+ return resolved;
168471
+ }
168472
+ async shutdown() {
168473
+ if (this.knex) {
168474
+ await this.knex.destroy();
168475
+ this.knex = null;
168476
+ }
168477
+ }
168478
+ async migrateSchema() {
168479
+ const knex = this.getKnex();
168480
+ const exists = await knex.schema.hasTable('schedules');
168481
+ if (!exists) {
168482
+ await knex.schema.createTable('schedules', table => {
168483
+ table.string('id', 36).primary();
168484
+ table.string('creator_id', 255).notNullable().index();
168485
+ table.string('creator_context', 255);
168486
+ table.string('creator_name', 255);
168487
+ table.string('timezone', 64).notNullable().defaultTo('UTC');
168488
+ table.string('schedule_expr', 255);
168489
+ table.bigInteger('run_at');
168490
+ table.boolean('is_recurring').notNullable();
168491
+ table.text('original_expression');
168492
+ table.string('workflow', 255);
168493
+ table.text('workflow_inputs');
168494
+ table.text('output_context');
168495
+ table.string('status', 20).notNullable().index();
168496
+ table.bigInteger('created_at').notNullable();
168497
+ table.bigInteger('last_run_at');
168498
+ table.bigInteger('next_run_at');
168499
+ table.integer('run_count').notNullable().defaultTo(0);
168500
+ table.integer('failure_count').notNullable().defaultTo(0);
168501
+ table.text('last_error');
168502
+ table.text('previous_response');
168503
+ table.index(['status', 'next_run_at']);
168504
+ });
168505
+ }
168506
+ // Create scheduler_locks table for distributed locking
168507
+ const locksExist = await knex.schema.hasTable('scheduler_locks');
168508
+ if (!locksExist) {
168509
+ await knex.schema.createTable('scheduler_locks', table => {
168510
+ table.string('lock_id', 255).primary();
168511
+ table.string('node_id', 255).notNullable();
168512
+ table.string('lock_token', 36).notNullable();
168513
+ table.bigInteger('acquired_at').notNullable();
168514
+ table.bigInteger('expires_at').notNullable();
168515
+ });
168516
+ }
168517
+ }
168518
+ getKnex() {
168519
+ if (!this.knex) {
168520
+ throw new Error('[KnexStore] Not initialized. Call initialize() first.');
168521
+ }
168522
+ return this.knex;
168523
+ }
168524
+ // --- CRUD ---
168525
+ async create(schedule) {
168526
+ const knex = this.getKnex();
168527
+ const newSchedule = {
168528
+ ...schedule,
168529
+ id: (0, uuid_1.v4)(),
168530
+ createdAt: Date.now(),
168531
+ runCount: 0,
168532
+ failureCount: 0,
168533
+ status: 'active',
168534
+ };
168535
+ await knex('schedules').insert(toInsertRow(newSchedule));
168536
+ logger_1.logger.info(`[KnexStore] Created schedule ${newSchedule.id} for user ${newSchedule.creatorId}`);
168537
+ return newSchedule;
168538
+ }
168539
+ async importSchedule(schedule) {
168540
+ const knex = this.getKnex();
168541
+ const existing = await knex('schedules').where('id', schedule.id).first();
168542
+ if (existing)
168543
+ return; // Already imported (idempotent)
168544
+ await knex('schedules').insert(toInsertRow(schedule));
168545
+ }
168546
+ async get(id) {
168547
+ const knex = this.getKnex();
168548
+ const row = await knex('schedules').where('id', id).first();
168549
+ return row ? fromDbRow(row) : undefined;
168550
+ }
168551
+ async update(id, patch) {
168552
+ const knex = this.getKnex();
168553
+ const existing = await knex('schedules').where('id', id).first();
168554
+ if (!existing)
168555
+ return undefined;
168556
+ const current = fromDbRow(existing);
168557
+ const updated = { ...current, ...patch, id: current.id };
168558
+ const row = toInsertRow(updated);
168559
+ // Remove id from update (PK cannot change)
168560
+ delete row.id;
168561
+ await knex('schedules').where('id', id).update(row);
168562
+ return updated;
168563
+ }
168564
+ async delete(id) {
168565
+ const knex = this.getKnex();
168566
+ const deleted = await knex('schedules').where('id', id).del();
168567
+ if (deleted > 0) {
168568
+ logger_1.logger.info(`[KnexStore] Deleted schedule ${id}`);
168569
+ return true;
168570
+ }
168571
+ return false;
168572
+ }
168573
+ // --- Queries ---
168574
+ async getByCreator(creatorId) {
168575
+ const knex = this.getKnex();
168576
+ const rows = await knex('schedules').where('creator_id', creatorId);
168577
+ return rows.map((r) => fromDbRow(r));
168578
+ }
168579
+ async getActiveSchedules() {
168580
+ const knex = this.getKnex();
168581
+ const rows = await knex('schedules').where('status', 'active');
168582
+ return rows.map((r) => fromDbRow(r));
168583
+ }
168584
+ async getDueSchedules(now) {
168585
+ const ts = now ?? Date.now();
168586
+ const knex = this.getKnex();
168587
+ // MSSQL uses 1/0 for booleans
168588
+ const bFalse = this.driver === 'mssql' ? 0 : false;
168589
+ const bTrue = this.driver === 'mssql' ? 1 : true;
168590
+ const rows = await knex('schedules')
168591
+ .where('status', 'active')
168592
+ .andWhere(function () {
168593
+ this.where(function () {
168594
+ this.where('is_recurring', bFalse)
168595
+ .whereNotNull('run_at')
168596
+ .where('run_at', '<=', ts);
168597
+ }).orWhere(function () {
168598
+ this.where('is_recurring', bTrue)
168599
+ .whereNotNull('next_run_at')
168600
+ .where('next_run_at', '<=', ts);
168601
+ });
168602
+ });
168603
+ return rows.map((r) => fromDbRow(r));
168604
+ }
168605
+ async findByWorkflow(creatorId, workflowName) {
168606
+ const knex = this.getKnex();
168607
+ const escaped = workflowName.toLowerCase().replace(/[%_\\]/g, '\\$&');
168608
+ const pattern = `%${escaped}%`;
168609
+ const rows = await knex('schedules')
168610
+ .where('creator_id', creatorId)
168611
+ .where('status', 'active')
168612
+ .whereRaw("LOWER(workflow) LIKE ? ESCAPE '\\'", [pattern]);
168613
+ return rows.map((r) => fromDbRow(r));
168614
+ }
168615
+ async getAll() {
168616
+ const knex = this.getKnex();
168617
+ const rows = await knex('schedules');
168618
+ return rows.map((r) => fromDbRow(r));
168619
+ }
168620
+ async getStats() {
168621
+ const knex = this.getKnex();
168622
+ // MSSQL uses 1/0 for booleans; PostgreSQL/MySQL accept both true/1
168623
+ const boolTrue = this.driver === 'mssql' ? '1' : 'true';
168624
+ const boolFalse = this.driver === 'mssql' ? '0' : 'false';
168625
+ const result = await knex('schedules')
168626
+ .select(knex.raw('COUNT(*) as total'), knex.raw("SUM(CASE WHEN status = 'active' THEN 1 ELSE 0 END) as active"), knex.raw("SUM(CASE WHEN status = 'paused' THEN 1 ELSE 0 END) as paused"), knex.raw("SUM(CASE WHEN status = 'completed' THEN 1 ELSE 0 END) as completed"), knex.raw("SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END) as failed"), knex.raw(`SUM(CASE WHEN is_recurring = ${boolTrue} THEN 1 ELSE 0 END) as recurring`), knex.raw(`SUM(CASE WHEN is_recurring = ${boolFalse} THEN 1 ELSE 0 END) as one_time`))
168627
+ .first();
168628
+ return {
168629
+ total: Number(result.total) || 0,
168630
+ active: Number(result.active) || 0,
168631
+ paused: Number(result.paused) || 0,
168632
+ completed: Number(result.completed) || 0,
168633
+ failed: Number(result.failed) || 0,
168634
+ recurring: Number(result.recurring) || 0,
168635
+ oneTime: Number(result.one_time) || 0,
168636
+ };
168637
+ }
168638
+ async validateLimits(creatorId, isRecurring, limits) {
168639
+ const knex = this.getKnex();
168640
+ if (limits.maxGlobal) {
168641
+ const result = await knex('schedules').count('* as cnt').first();
168642
+ if (Number(result?.cnt) >= limits.maxGlobal) {
168643
+ throw new Error(`Global schedule limit reached (${limits.maxGlobal})`);
168644
+ }
168645
+ }
168646
+ if (limits.maxPerUser) {
168647
+ const result = await knex('schedules')
168648
+ .where('creator_id', creatorId)
168649
+ .count('* as cnt')
168650
+ .first();
168651
+ if (Number(result?.cnt) >= limits.maxPerUser) {
168652
+ throw new Error(`You have reached the maximum number of schedules (${limits.maxPerUser})`);
168653
+ }
168654
+ }
168655
+ if (isRecurring && limits.maxRecurringPerUser) {
168656
+ const bTrue = this.driver === 'mssql' ? 1 : true;
168657
+ const result = await knex('schedules')
168658
+ .where('creator_id', creatorId)
168659
+ .where('is_recurring', bTrue)
168660
+ .count('* as cnt')
168661
+ .first();
168662
+ if (Number(result?.cnt) >= limits.maxRecurringPerUser) {
168663
+ throw new Error(`You have reached the maximum number of recurring schedules (${limits.maxRecurringPerUser})`);
168664
+ }
168665
+ }
168666
+ }
168667
+ // --- HA Distributed Locking (via scheduler_locks table) ---
168668
+ async tryAcquireLock(lockId, nodeId, ttlSeconds) {
168669
+ const knex = this.getKnex();
168670
+ const now = Date.now();
168671
+ const expiresAt = now + ttlSeconds * 1000;
168672
+ const token = (0, uuid_1.v4)();
168673
+ // Step 1: Try to claim an existing expired lock
168674
+ const updated = await knex('scheduler_locks')
168675
+ .where('lock_id', lockId)
168676
+ .where('expires_at', '<', now)
168677
+ .update({
168678
+ node_id: nodeId,
168679
+ lock_token: token,
168680
+ acquired_at: now,
168681
+ expires_at: expiresAt,
168682
+ });
168683
+ if (updated > 0)
168684
+ return token;
168685
+ // Step 2: Try to INSERT a new lock row
168686
+ try {
168687
+ await knex('scheduler_locks').insert({
168688
+ lock_id: lockId,
168689
+ node_id: nodeId,
168690
+ lock_token: token,
168691
+ acquired_at: now,
168692
+ expires_at: expiresAt,
168693
+ });
168694
+ return token;
168695
+ }
168696
+ catch {
168697
+ // Unique constraint violation — another node holds the lock
168698
+ return null;
168699
+ }
168700
+ }
168701
+ async releaseLock(lockId, lockToken) {
168702
+ const knex = this.getKnex();
168703
+ await knex('scheduler_locks').where('lock_id', lockId).where('lock_token', lockToken).del();
168704
+ }
168705
+ async renewLock(lockId, lockToken, ttlSeconds) {
168706
+ const knex = this.getKnex();
168707
+ const now = Date.now();
168708
+ const expiresAt = now + ttlSeconds * 1000;
168709
+ const updated = await knex('scheduler_locks')
168710
+ .where('lock_id', lockId)
168711
+ .where('lock_token', lockToken)
168712
+ .update({ acquired_at: now, expires_at: expiresAt });
168713
+ return updated > 0;
168714
+ }
168715
+ async flush() {
168716
+ // No-op for server-based backends
168717
+ }
168718
+ }
168719
+ exports.KnexStoreBackend = KnexStoreBackend;
168720
+
168721
+
167038
168722
  /***/ }),
167039
168723
 
167040
168724
  /***/ 83864:
@@ -167903,6 +169587,7 @@ class GitHubFrontend {
167903
169587
  minUpdateDelayMs = 1000; // Minimum delay between updates (public for testing)
167904
169588
  // Cache of created GitHub comment IDs per group to handle API eventual consistency
167905
169589
  createdCommentGithubIds = new Map();
169590
+ _stopped = false;
167906
169591
  start(ctx) {
167907
169592
  const log = ctx.logger;
167908
169593
  const bus = ctx.eventBus;
@@ -168030,10 +169715,23 @@ class GitHubFrontend {
168030
169715
  }
168031
169716
  }));
168032
169717
  }
168033
- stop() {
169718
+ async stop() {
169719
+ this._stopped = true;
168034
169720
  for (const s of this.subs)
168035
169721
  s.unsubscribe();
168036
169722
  this.subs = [];
169723
+ if (this._timer) {
169724
+ clearTimeout(this._timer);
169725
+ this._timer = null;
169726
+ }
169727
+ this._pendingIds.clear();
169728
+ // Drain any in-flight updateGroupedComment operations so callers that
169729
+ // await stop() (e.g. FrontendsHost.stopAll) are guaranteed no async
169730
+ // work leaks after stop resolves.
169731
+ const pending = Array.from(this.updateLocks.values());
169732
+ if (pending.length > 0) {
169733
+ await Promise.allSettled(pending);
169734
+ }
168037
169735
  }
168038
169736
  async buildFullBody(ctx, group) {
168039
169737
  const header = this.renderThreadHeader(ctx, group);
@@ -168124,6 +169822,8 @@ ${end}`);
168124
169822
  */
168125
169823
  async performGroupedCommentUpdate(ctx, comments, group, changedIds) {
168126
169824
  try {
169825
+ if (this._stopped)
169826
+ return;
168127
169827
  if (!ctx.run.repo || !ctx.run.pr)
168128
169828
  return;
168129
169829
  // Check if PR comments are enabled (default to true if not specified)
@@ -177854,6 +179554,35 @@ class OutputFormatters {
177854
179554
  exports.OutputFormatters = OutputFormatters;
177855
179555
 
177856
179556
 
179557
+ /***/ }),
179558
+
179559
+ /***/ 93866:
179560
+ /***/ ((__unused_webpack_module, exports) => {
179561
+
179562
+ "use strict";
179563
+
179564
+ Object.defineProperty(exports, "__esModule", ({ value: true }));
179565
+ exports.DefaultPolicyEngine = void 0;
179566
+ /**
179567
+ * Default (no-op) policy engine — always allows everything.
179568
+ * Used when no enterprise license is present or policy is disabled.
179569
+ */
179570
+ class DefaultPolicyEngine {
179571
+ async initialize(_config) { }
179572
+ async evaluateCheckExecution(_checkId, _checkConfig) {
179573
+ return { allowed: true };
179574
+ }
179575
+ async evaluateToolInvocation(_serverName, _methodName, _transport) {
179576
+ return { allowed: true };
179577
+ }
179578
+ async evaluateCapabilities(_checkId, _capabilities) {
179579
+ return { allowed: true };
179580
+ }
179581
+ async shutdown() { }
179582
+ }
179583
+ exports.DefaultPolicyEngine = DefaultPolicyEngine;
179584
+
179585
+
177857
179586
  /***/ }),
177858
179587
 
177859
179588
  /***/ 96611:
@@ -179474,12 +181203,53 @@ class AICheckProvider extends check_provider_interface_1.CheckProvider {
179474
181203
  if (Object.keys(dynamicServers).length > 0) {
179475
181204
  Object.assign(mcpServers, dynamicServers);
179476
181205
  }
181206
+ // Emit telemetry for tool setup diagnostics
181207
+ try {
181208
+ const span = lazy_otel_1.trace.getSpan(lazy_otel_1.context.active());
181209
+ if (span) {
181210
+ span.addEvent('tool_setup.mcp_servers_js', {
181211
+ 'tool_setup.server_count': Object.keys(dynamicServers).length,
181212
+ 'tool_setup.server_names': Object.keys(dynamicServers).join(','),
181213
+ 'tool_setup.workflow_entries': Object.entries(dynamicServers)
181214
+ .filter(([, cfg]) => cfg?.workflow)
181215
+ .map(([name, cfg]) => `${name}→${cfg.workflow}`)
181216
+ .join(','),
181217
+ });
181218
+ }
181219
+ }
181220
+ catch { }
179477
181221
  }
179478
181222
  catch (error) {
179479
- logger_1.logger.error(`[AICheckProvider] Failed to evaluate ai_mcp_servers_js: ${error instanceof Error ? error.message : 'Unknown error'}`);
181223
+ const errMsg = error instanceof Error ? error.message : 'Unknown error';
181224
+ logger_1.logger.error(`[AICheckProvider] Failed to evaluate ai_mcp_servers_js: ${errMsg}`);
181225
+ // Emit telemetry for the failure
181226
+ try {
181227
+ const span = lazy_otel_1.trace.getSpan(lazy_otel_1.context.active());
181228
+ if (span) {
181229
+ span.addEvent('tool_setup.mcp_servers_js_error', {
181230
+ 'tool_setup.error': errMsg,
181231
+ });
181232
+ }
181233
+ }
181234
+ catch { }
179480
181235
  // Continue without dynamic servers
179481
181236
  }
179482
181237
  }
181238
+ else if (mcpServersJsExpr && !_dependencyResults) {
181239
+ // Expression exists but no dependency results — this means the check has no dependencies
181240
+ // or the dependency results map was empty/undefined
181241
+ try {
181242
+ const span = lazy_otel_1.trace.getSpan(lazy_otel_1.context.active());
181243
+ if (span) {
181244
+ span.addEvent('tool_setup.mcp_servers_js_skipped', {
181245
+ 'tool_setup.reason': 'no_dependency_results',
181246
+ 'tool_setup.has_expr': true,
181247
+ 'tool_setup.has_deps': false,
181248
+ });
181249
+ }
181250
+ }
181251
+ catch { }
181252
+ }
179483
181253
  // 5. Resolve environment variable placeholders in MCP server env configs
179484
181254
  // Supports ${VAR} and ${{ env.VAR }} syntax
179485
181255
  for (const serverConfig of Object.values(mcpServers)) {
@@ -179632,6 +181402,28 @@ class AICheckProvider extends check_provider_interface_1.CheckProvider {
179632
181402
  try {
179633
181403
  // Load custom tools from global config (supports workflows and custom tools)
179634
181404
  const customTools = this.loadCustomTools(customToolsToLoad, config);
181405
+ // Emit telemetry for tool resolution results
181406
+ try {
181407
+ const span = lazy_otel_1.trace.getSpan(lazy_otel_1.context.active());
181408
+ if (span) {
181409
+ const requestedNames = customToolsToLoad.map(item => typeof item === 'string'
181410
+ ? item
181411
+ : `${item.name || item.workflow}(wf:${item.workflow})`);
181412
+ span.addEvent('tool_setup.resolution', {
181413
+ 'tool_setup.requested_count': customToolsToLoad.length,
181414
+ 'tool_setup.requested_names': requestedNames.join(','),
181415
+ 'tool_setup.resolved_count': customTools.size,
181416
+ 'tool_setup.resolved_names': Array.from(customTools.keys()).join(','),
181417
+ 'tool_setup.missing_count': customToolsToLoad.length - customTools.size,
181418
+ });
181419
+ }
181420
+ }
181421
+ catch { }
181422
+ if (customToolsToLoad.length > 0 && customTools.size === 0) {
181423
+ logger_1.logger.warn(`[AICheckProvider] All ${customToolsToLoad.length} custom tools failed to resolve! ` +
181424
+ `Requested: ${customToolsToLoad.map(item => (typeof item === 'string' ? item : item.workflow)).join(', ')}. ` +
181425
+ `AI will have no workflow tools available.`);
181426
+ }
179635
181427
  // Add schedule tool if enabled (via ai_mcp_servers { tool: 'schedule' } or enable_scheduler)
179636
181428
  if (scheduleToolEnabled) {
179637
181429
  const scheduleTool = (0, schedule_tool_1.getScheduleToolDefinition)();
@@ -179669,10 +181461,36 @@ class AICheckProvider extends check_provider_interface_1.CheckProvider {
179669
181461
  }
179670
181462
  }
179671
181463
  catch (error) {
179672
- logger_1.logger.error(`[AICheckProvider] Failed to start custom tools SSE server '${customToolsServerName}': ${error instanceof Error ? error.message : 'Unknown error'}`);
181464
+ const errMsg = error instanceof Error ? error.message : 'Unknown error';
181465
+ logger_1.logger.error(`[AICheckProvider] Failed to start custom tools SSE server '${customToolsServerName}': ${errMsg}`);
181466
+ // Emit telemetry for SSE server failure
181467
+ try {
181468
+ const span = lazy_otel_1.trace.getSpan(lazy_otel_1.context.active());
181469
+ if (span) {
181470
+ span.addEvent('tool_setup.sse_server_error', {
181471
+ 'tool_setup.error': errMsg,
181472
+ 'tool_setup.server_name': customToolsServerName || '',
181473
+ });
181474
+ }
181475
+ }
181476
+ catch { }
179673
181477
  // Continue without custom tools
179674
181478
  }
179675
181479
  }
181480
+ // Emit final tool setup summary telemetry
181481
+ try {
181482
+ const span = lazy_otel_1.trace.getSpan(lazy_otel_1.context.active());
181483
+ if (span) {
181484
+ const finalServerNames = Object.keys(mcpServers);
181485
+ span.addEvent('tool_setup.final', {
181486
+ 'tool_setup.final_server_count': finalServerNames.length,
181487
+ 'tool_setup.final_server_names': finalServerNames.join(','),
181488
+ 'tool_setup.has_custom_tools_server': !!customToolsServer,
181489
+ 'tool_setup.tools_disabled': !!config.ai?.disableTools,
181490
+ });
181491
+ }
181492
+ }
181493
+ catch { }
179676
181494
  // Pass MCP server config directly to AI service (unless tools are disabled)
179677
181495
  if (Object.keys(mcpServers).length > 0 && !config.ai?.disableTools) {
179678
181496
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
@@ -192219,6 +194037,11 @@ async function handleCancel(positional, flags) {
192219
194037
  process.exit(1);
192220
194038
  }
192221
194039
  await store.deleteAsync(schedule.id);
194040
+ // Also cancel the in-memory job (cron or timeout) so it doesn't fire
194041
+ const scheduler = (0, scheduler_1.getScheduler)();
194042
+ if (scheduler) {
194043
+ scheduler.cancelSchedule(schedule.id);
194044
+ }
192222
194045
  console.log('Schedule cancelled successfully!');
192223
194046
  console.log();
192224
194047
  console.log(` ID: ${schedule.id.substring(0, 8)}`);
@@ -193126,6 +194949,7 @@ exports.isScheduleTool = isScheduleTool;
193126
194949
  exports.buildScheduleToolContext = buildScheduleToolContext;
193127
194950
  const schedule_store_1 = __nccwpck_require__(27336);
193128
194951
  const schedule_parser_1 = __nccwpck_require__(48478);
194952
+ const scheduler_1 = __nccwpck_require__(28404);
193129
194953
  const logger_1 = __nccwpck_require__(86999);
193130
194954
  /**
193131
194955
  * Simple glob-style pattern matching for workflow names
@@ -193546,8 +195370,13 @@ async function handleCancel(args, context, store) {
193546
195370
  error: 'You can only cancel your own schedules.',
193547
195371
  };
193548
195372
  }
193549
- // Delete the schedule
195373
+ // Delete the schedule from DB
193550
195374
  await store.deleteAsync(schedule.id);
195375
+ // Also cancel the in-memory job (cron or timeout) so it doesn't fire
195376
+ const scheduler = (0, scheduler_1.getScheduler)();
195377
+ if (scheduler) {
195378
+ scheduler.cancelSchedule(schedule.id);
195379
+ }
193551
195380
  logger_1.logger.info(`[ScheduleTool] Cancelled schedule ${schedule.id} for user ${context.userId}`);
193552
195381
  return {
193553
195382
  success: true,
@@ -194005,6 +195834,27 @@ class Scheduler {
194005
195834
  getStore() {
194006
195835
  return this.store;
194007
195836
  }
195837
+ /**
195838
+ * Cancel a schedule's in-memory job (cron or timeout).
195839
+ * Called after deleting from DB to ensure the job doesn't fire again.
195840
+ */
195841
+ cancelSchedule(scheduleId) {
195842
+ // Stop cron job if it exists
195843
+ const cronJob = this.cronJobs.get(scheduleId);
195844
+ if (cronJob) {
195845
+ cronJob.stop();
195846
+ this.cronJobs.delete(scheduleId);
195847
+ logger_1.logger.debug(`[Scheduler] Cancelled cron job for schedule ${scheduleId}`);
195848
+ return;
195849
+ }
195850
+ // Clear timeout if it exists
195851
+ const timeout = this.oneTimeTimeouts.get(scheduleId);
195852
+ if (timeout) {
195853
+ clearTimeout(timeout);
195854
+ this.oneTimeTimeouts.delete(scheduleId);
195855
+ logger_1.logger.debug(`[Scheduler] Cancelled timeout for schedule ${scheduleId}`);
195856
+ }
195857
+ }
194008
195858
  /**
194009
195859
  * Start the scheduler
194010
195860
  */
@@ -194385,6 +196235,26 @@ class Scheduler {
194385
196235
  * Execute a scheduled workflow
194386
196236
  */
194387
196237
  async executeSchedule(schedule) {
196238
+ // DB freshness check: verify the schedule still exists and is active.
196239
+ // This prevents execution of cancelled or paused schedules when the
196240
+ // in-memory job fires after a DB-only cancellation.
196241
+ // Note: This is a single indexed primary-key lookup (<1ms for SQLite),
196242
+ // and only runs for user-created schedules (static cron jobs use
196243
+ // executeStaticCronJob instead), so the overhead is negligible
196244
+ // compared to the workflow execution that follows.
196245
+ try {
196246
+ const fresh = await this.store.getAsync(schedule.id);
196247
+ if (!fresh || fresh.status !== 'active') {
196248
+ logger_1.logger.info(`[Scheduler] Schedule ${schedule.id} is no longer active (${fresh ? fresh.status : 'deleted'}), skipping execution`);
196249
+ // Clean up the in-memory job since the schedule is gone/inactive
196250
+ this.cancelSchedule(schedule.id);
196251
+ return;
196252
+ }
196253
+ }
196254
+ catch {
196255
+ // If we can't check the DB, log and proceed (don't block execution on DB errors)
196256
+ logger_1.logger.warn(`[Scheduler] Could not verify schedule ${schedule.id} freshness, proceeding with execution`);
196257
+ }
194388
196258
  const description = schedule.workflow || 'reminder';
194389
196259
  logger_1.logger.info(`[Scheduler] Executing schedule ${schedule.id}: ${description}`);
194390
196260
  const startTime = Date.now();
@@ -198521,7 +200391,7 @@ class StateMachineExecutionEngine {
198521
200391
  try {
198522
200392
  logger_1.logger.debug(`[PolicyEngine] Loading enterprise policy engine (engine=${configWithTagFilter.policy.engine})`);
198523
200393
  // @ts-ignore — enterprise/ may not exist in OSS builds (caught at runtime)
198524
- const { loadEnterprisePolicyEngine } = await Promise.resolve().then(() => __importStar(__nccwpck_require__(7065)));
200394
+ const { loadEnterprisePolicyEngine } = await Promise.resolve().then(() => __importStar(__nccwpck_require__(87068)));
198525
200395
  context.policyEngine = await loadEnterprisePolicyEngine(configWithTagFilter.policy);
198526
200396
  logger_1.logger.debug(`[PolicyEngine] Initialized: ${context.policyEngine?.constructor?.name || 'unknown'}`);
198527
200397
  }
@@ -199617,6 +201487,13 @@ async function initializeWorkspace(context) {
199617
201487
  process.env.VISOR_WORKSPACE_MAIN_PROJECT = info.mainProjectPath;
199618
201488
  process.env.VISOR_WORKSPACE_MAIN_PROJECT_NAME = info.mainProjectName;
199619
201489
  process.env.VISOR_ORIGINAL_WORKDIR = originalPath;
201490
+ // Prevent git from walking above the workspace base path.
201491
+ // Without this, git commands in workspace subdirectories can discover
201492
+ // a rogue .git in a parent directory (e.g. /tmp/.git) and leak
201493
+ // operations across all workspaces.
201494
+ const basePath = workspaceConfig?.base_path || process.env.VISOR_WORKSPACE_PATH || '/tmp/visor-workspaces';
201495
+ const existing = process.env.GIT_CEILING_DIRECTORIES;
201496
+ process.env.GIT_CEILING_DIRECTORIES = existing ? `${existing}:${basePath}` : basePath;
199620
201497
  }
199621
201498
  catch { }
199622
201499
  logger_1.logger.info(`[Workspace] Initialized workspace: ${info.workspacePath}`);
@@ -208785,7 +210662,7 @@ async function initTelemetry(opts = {}) {
208785
210662
  const path = __nccwpck_require__(16928);
208786
210663
  const outDir = opts.file?.dir ||
208787
210664
  process.env.VISOR_TRACE_DIR ||
208788
- __nccwpck_require__.ab + "traces";
210665
+ path.join(process.cwd(), 'output', 'traces');
208789
210666
  fs.mkdirSync(outDir, { recursive: true });
208790
210667
  const ts = new Date().toISOString().replace(/[:.]/g, '-');
208791
210668
  process.env.VISOR_FALLBACK_TRACE_FILE = path.join(outDir, `run-${ts}.ndjson`);
@@ -208990,7 +210867,7 @@ async function shutdownTelemetry() {
208990
210867
  if (process.env.VISOR_TRACE_REPORT === 'true') {
208991
210868
  const fs = __nccwpck_require__(79896);
208992
210869
  const path = __nccwpck_require__(16928);
208993
- const outDir = process.env.VISOR_TRACE_DIR || __nccwpck_require__.ab + "traces";
210870
+ const outDir = process.env.VISOR_TRACE_DIR || path.join(process.cwd(), 'output', 'traces');
208994
210871
  if (!fs.existsSync(outDir))
208995
210872
  fs.mkdirSync(outDir, { recursive: true });
208996
210873
  const ts = new Date().toISOString().replace(/[:.]/g, '-');
@@ -209489,7 +211366,7 @@ function __getOrCreateNdjsonPath() {
209489
211366
  fs.mkdirSync(dir, { recursive: true });
209490
211367
  return __ndjsonPath;
209491
211368
  }
209492
- const outDir = process.env.VISOR_TRACE_DIR || __nccwpck_require__.ab + "traces";
211369
+ const outDir = process.env.VISOR_TRACE_DIR || path.join(process.cwd(), 'output', 'traces');
209493
211370
  if (!fs.existsSync(outDir))
209494
211371
  fs.mkdirSync(outDir, { recursive: true });
209495
211372
  if (!__ndjsonPath) {
@@ -220622,6 +222499,7 @@ function buildProviderTemplateContext(prInfo, dependencyResults, memoryStore, ou
220622
222499
  Object.defineProperty(exports, "__esModule", ({ value: true }));
220623
222500
  exports.resolveTools = resolveTools;
220624
222501
  const workflow_tool_executor_1 = __nccwpck_require__(30236);
222502
+ const workflow_registry_1 = __nccwpck_require__(82824);
220625
222503
  const logger_1 = __nccwpck_require__(86999);
220626
222504
  /**
220627
222505
  * Resolve tool items to CustomToolDefinition instances.
@@ -220632,6 +222510,13 @@ const logger_1 = __nccwpck_require__(86999);
220632
222510
  */
220633
222511
  function resolveTools(toolItems, globalTools, logPrefix = '[ToolResolver]') {
220634
222512
  const tools = new Map();
222513
+ // Log registry state once for debugging workflow resolution failures
222514
+ const registry = workflow_registry_1.WorkflowRegistry.getInstance();
222515
+ const registeredWorkflows = registry.list().map(w => w.id);
222516
+ if (toolItems.some(item => typeof item !== 'string' && (0, workflow_tool_executor_1.isWorkflowToolReference)(item))) {
222517
+ logger_1.logger.info(`${logPrefix} Resolving ${toolItems.length} tool items. ` +
222518
+ `WorkflowRegistry has ${registeredWorkflows.length} workflows: [${registeredWorkflows.join(', ')}]`);
222519
+ }
220635
222520
  for (const item of toolItems) {
220636
222521
  // First, try to resolve as a workflow tool
220637
222522
  const workflowTool = (0, workflow_tool_executor_1.resolveWorkflowToolFromItem)(item);
@@ -220651,7 +222536,8 @@ function resolveTools(toolItems, globalTools, logPrefix = '[ToolResolver]') {
220651
222536
  logger_1.logger.warn(`${logPrefix} Tool '${item}' not found in global tools or workflow registry`);
220652
222537
  }
220653
222538
  else if ((0, workflow_tool_executor_1.isWorkflowToolReference)(item)) {
220654
- logger_1.logger.warn(`${logPrefix} Workflow '${item.workflow}' referenced but not found in registry`);
222539
+ logger_1.logger.warn(`${logPrefix} Workflow '${item.workflow}' referenced but not found in registry. ` +
222540
+ `Available: [${registeredWorkflows.join(', ')}]`);
220655
222541
  }
220656
222542
  }
220657
222543
  if (tools.size === 0 && toolItems.length > 0 && !globalTools) {
@@ -221028,6 +222914,10 @@ class WorkspaceManager {
221028
222914
  catch { }
221029
222915
  await this.createMainProjectWorktree(mainProjectPath);
221030
222916
  }
222917
+ else {
222918
+ // Worktree exists and is valid — update to latest upstream and clean
222919
+ await this.refreshWorktreeToUpstream(mainProjectPath);
222920
+ }
221031
222921
  }
221032
222922
  else {
221033
222923
  await this.createMainProjectWorktree(mainProjectPath);
@@ -221243,32 +223133,129 @@ class WorkspaceManager {
221243
223133
  return cleaned;
221244
223134
  }
221245
223135
  /**
221246
- * Create worktree for the main project
221247
- *
221248
- * visor-disable: architecture - Not using WorktreeManager here because:
221249
- * 1. WorktreeManager expects remote URLs and clones to bare repos first
221250
- * 2. This operates on the LOCAL repo we're already in (no cloning needed)
221251
- * 3. Adding a "local mode" to WorktreeManager would add complexity for minimal benefit
221252
- * The git commands here are simpler (just rev-parse + worktree add) vs WorktreeManager's
221253
- * full clone/bare-repo/fetch/worktree pipeline.
223136
+ * visor-disable: architecture - The helpers below (resolveUpstreamRef,
223137
+ * fetchAndResolveUpstream, resetAndCleanWorktree, refreshWorktreeToUpstream)
223138
+ * are NOT duplicates of WorktreeManager's fetchRef/getCommitShaForRef/cleanWorktree.
223139
+ * WorktreeManager operates on BARE repo caches cloned from remote URLs, while
223140
+ * WorkspaceManager operates on the LOCAL working repo the user already has checked out.
223141
+ * The git commands differ (e.g. `fetch origin --prune` vs `fetch origin <ref>:<ref>`)
223142
+ * and sharing code would require adding a "local mode" to WorktreeManager for no benefit.
221254
223143
  */
221255
- async createMainProjectWorktree(targetPath) {
221256
- logger_1.logger.debug(`Creating main project worktree: ${targetPath}`);
221257
- // Get current HEAD
221258
- const headResult = await command_executor_1.commandExecutor.execute(`git -C ${shellEscape(this.originalPath)} rev-parse HEAD`, {
221259
- timeout: 10000,
221260
- });
223144
+ /**
223145
+ * Resolve the upstream default branch ref.
223146
+ * Tries origin/HEAD (symbolic), then origin/main, then origin/master.
223147
+ * Falls back to local HEAD if no remote is configured.
223148
+ */
223149
+ async resolveUpstreamRef() {
223150
+ const esc = shellEscape(this.originalPath);
223151
+ // First, try to resolve origin/HEAD (follows the remote's default branch)
223152
+ const symbolicResult = await command_executor_1.commandExecutor.execute(`git -C ${esc} symbolic-ref refs/remotes/origin/HEAD 2>/dev/null`, { timeout: 10000 });
223153
+ if (symbolicResult.exitCode === 0 && symbolicResult.stdout.trim()) {
223154
+ // Returns something like "refs/remotes/origin/main"
223155
+ const ref = symbolicResult.stdout.trim().replace('refs/remotes/', '');
223156
+ logger_1.logger.debug(`[Workspace] Resolved upstream default branch via origin/HEAD: ${ref}`);
223157
+ return ref;
223158
+ }
223159
+ // Try origin/main
223160
+ const mainResult = await command_executor_1.commandExecutor.execute(`git -C ${esc} rev-parse --verify origin/main 2>/dev/null`, { timeout: 10000 });
223161
+ if (mainResult.exitCode === 0) {
223162
+ logger_1.logger.debug(`[Workspace] Using origin/main as upstream ref`);
223163
+ return 'origin/main';
223164
+ }
223165
+ // Try origin/master
223166
+ const masterResult = await command_executor_1.commandExecutor.execute(`git -C ${esc} rev-parse --verify origin/master 2>/dev/null`, { timeout: 10000 });
223167
+ if (masterResult.exitCode === 0) {
223168
+ logger_1.logger.debug(`[Workspace] Using origin/master as upstream ref`);
223169
+ return 'origin/master';
223170
+ }
223171
+ // Fallback: no remote configured, use local HEAD
223172
+ logger_1.logger.warn(`[Workspace] No upstream remote found, falling back to local HEAD`);
223173
+ return 'HEAD';
223174
+ }
223175
+ /**
223176
+ * Fetch latest from origin, resolve the upstream default branch, and return
223177
+ * both the ref name and the resolved commit SHA.
223178
+ */
223179
+ async fetchAndResolveUpstream() {
223180
+ // Fetch latest from origin
223181
+ logger_1.logger.debug(`[Workspace] Fetching latest from origin`);
223182
+ const fetchResult = await command_executor_1.commandExecutor.execute(`git -C ${shellEscape(this.originalPath)} fetch origin --prune 2>&1`, { timeout: 120000 });
223183
+ if (fetchResult.exitCode !== 0) {
223184
+ logger_1.logger.warn(`[Workspace] fetch origin failed (will use cached refs): ${fetchResult.stderr}`);
223185
+ }
223186
+ // Resolve the upstream ref
223187
+ const upstreamRef = await this.resolveUpstreamRef();
223188
+ // Get the commit SHA for the upstream ref
223189
+ const shaResult = await command_executor_1.commandExecutor.execute(`git -C ${shellEscape(this.originalPath)} rev-parse ${shellEscape(upstreamRef)}`, { timeout: 10000 });
223190
+ if (shaResult.exitCode === 0) {
223191
+ return { upstreamRef, targetSha: shaResult.stdout.trim() };
223192
+ }
223193
+ // Upstream ref unresolvable — fall back to local HEAD
223194
+ logger_1.logger.warn(`[Workspace] Could not resolve ${upstreamRef} (${shaResult.stderr.trim()}), falling back to HEAD`);
223195
+ const headResult = await command_executor_1.commandExecutor.execute(`git -C ${shellEscape(this.originalPath)} rev-parse HEAD`, { timeout: 10000 });
221261
223196
  if (headResult.exitCode !== 0) {
221262
- throw new Error(`Failed to get HEAD: ${headResult.stderr}`);
223197
+ throw new Error(`Repository has no commits — cannot create worktree: ${headResult.stderr}`);
223198
+ }
223199
+ return { upstreamRef: 'HEAD', targetSha: headResult.stdout.trim() };
223200
+ }
223201
+ /**
223202
+ * Reset a worktree to a specific commit and clean all modifications.
223203
+ */
223204
+ async resetAndCleanWorktree(worktreePath, targetSha) {
223205
+ const escapedPath = shellEscape(worktreePath);
223206
+ const escapedSha = shellEscape(targetSha);
223207
+ const resetResult = await command_executor_1.commandExecutor.execute(`git -C ${escapedPath} reset --hard ${escapedSha}`, { timeout: 10000 });
223208
+ if (resetResult.exitCode !== 0) {
223209
+ logger_1.logger.warn(`[Workspace] reset --hard failed: ${resetResult.stderr}`);
223210
+ }
223211
+ const cleanResult = await command_executor_1.commandExecutor.execute(`git -C ${escapedPath} clean -fdx`, {
223212
+ timeout: 30000,
223213
+ });
223214
+ if (cleanResult.exitCode !== 0) {
223215
+ logger_1.logger.warn(`[Workspace] clean -fdx failed: ${cleanResult.stderr}`);
223216
+ }
223217
+ }
223218
+ /**
223219
+ * Refresh an existing worktree to the latest upstream default branch
223220
+ * and ensure it has no modified or untracked files.
223221
+ */
223222
+ async refreshWorktreeToUpstream(worktreePath) {
223223
+ logger_1.logger.info(`[Workspace] Refreshing worktree to latest upstream: ${worktreePath}`);
223224
+ try {
223225
+ const { upstreamRef, targetSha } = await this.fetchAndResolveUpstream();
223226
+ // Point worktree to the upstream commit
223227
+ const checkoutResult = await command_executor_1.commandExecutor.execute(`git -C ${shellEscape(worktreePath)} checkout --detach ${shellEscape(targetSha)}`, { timeout: 30000 });
223228
+ if (checkoutResult.exitCode !== 0) {
223229
+ logger_1.logger.warn(`[Workspace] checkout --detach failed (worktree stays at current commit): ${checkoutResult.stderr}`);
223230
+ // Still clean even if checkout failed — the worktree is valid, just at old commit
223231
+ await this.resetAndCleanWorktree(worktreePath, 'HEAD');
223232
+ return;
223233
+ }
223234
+ // Reset and clean
223235
+ await this.resetAndCleanWorktree(worktreePath, targetSha);
223236
+ logger_1.logger.info(`[Workspace] Worktree updated to ${upstreamRef} (${targetSha.slice(0, 8)})`);
223237
+ }
223238
+ catch (error) {
223239
+ // Best-effort: a stale worktree is better than failing initialization entirely
223240
+ logger_1.logger.warn(`[Workspace] Failed to refresh worktree (continuing with stale state): ${error}`);
221263
223241
  }
221264
- const headRef = headResult.stdout.trim();
221265
- // Create worktree using detached HEAD to avoid branch conflicts
221266
- const createCmd = `git -C ${shellEscape(this.originalPath)} worktree add --detach ${shellEscape(targetPath)} ${shellEscape(headRef)}`;
223242
+ }
223243
+ /**
223244
+ * Create worktree for the main project.
223245
+ * See visor-disable comment above resolveUpstreamRef for why this doesn't use WorktreeManager.
223246
+ */
223247
+ async createMainProjectWorktree(targetPath) {
223248
+ logger_1.logger.debug(`Creating main project worktree: ${targetPath}`);
223249
+ const { upstreamRef, targetSha } = await this.fetchAndResolveUpstream();
223250
+ // Create worktree using detached HEAD at the upstream commit
223251
+ const createCmd = `git -C ${shellEscape(this.originalPath)} worktree add --detach ${shellEscape(targetPath)} ${shellEscape(targetSha)}`;
221267
223252
  const result = await command_executor_1.commandExecutor.execute(createCmd, { timeout: 60000 });
221268
223253
  if (result.exitCode !== 0) {
221269
223254
  throw new Error(`Failed to create main project worktree: ${result.stderr}`);
221270
223255
  }
221271
- logger_1.logger.debug(`Created main project worktree at ${targetPath}`);
223256
+ // Clean (shouldn't be needed in a fresh worktree, but defense in depth)
223257
+ await this.resetAndCleanWorktree(targetPath, targetSha);
223258
+ logger_1.logger.info(`Created main project worktree at ${targetPath} (${upstreamRef} -> ${targetSha.slice(0, 8)})`);
221272
223259
  }
221273
223260
  /**
221274
223261
  * Remove main project worktree
@@ -221950,27 +223937,52 @@ class WorktreeManager {
221950
223937
  fs.rmSync(worktree_path, { recursive: true, force: true });
221951
223938
  }
221952
223939
  }
223940
+ // Clean up sibling metadata file
223941
+ const metadataPath = this.getMetadataPath(worktree_path);
223942
+ try {
223943
+ if (fs.existsSync(metadataPath)) {
223944
+ fs.unlinkSync(metadataPath);
223945
+ }
223946
+ }
223947
+ catch {
223948
+ // best-effort cleanup
223949
+ }
221953
223950
  // Remove from active list
221954
223951
  this.activeWorktrees.delete(worktreeId);
221955
223952
  logger_1.logger.info(`Successfully removed worktree: ${worktreeId}`);
221956
223953
  }
223954
+ /**
223955
+ * Get the metadata file path for a worktree.
223956
+ * Stored as a sibling file OUTSIDE the worktree to avoid being committed
223957
+ * when agents run `git add .` inside the checked-out repo.
223958
+ */
223959
+ getMetadataPath(worktreePath) {
223960
+ return worktreePath.replace(/\/?$/, '') + '.metadata.json';
223961
+ }
221957
223962
  /**
221958
223963
  * Save worktree metadata
221959
223964
  */
221960
223965
  async saveMetadata(worktreePath, metadata) {
221961
- const metadataPath = path.join(worktreePath, '.visor-metadata.json');
223966
+ const metadataPath = this.getMetadataPath(worktreePath);
221962
223967
  fs.writeFileSync(metadataPath, JSON.stringify(metadata, null, 2), 'utf8');
221963
223968
  }
221964
223969
  /**
221965
223970
  * Load worktree metadata
221966
223971
  */
221967
223972
  async loadMetadata(worktreePath) {
221968
- const metadataPath = path.join(worktreePath, '.visor-metadata.json');
221969
- if (!fs.existsSync(metadataPath)) {
223973
+ const metadataPath = this.getMetadataPath(worktreePath);
223974
+ // Also check legacy location (inside worktree) for backwards compatibility
223975
+ const legacyPath = path.join(worktreePath, '.visor-metadata.json');
223976
+ const pathToRead = fs.existsSync(metadataPath)
223977
+ ? metadataPath
223978
+ : fs.existsSync(legacyPath)
223979
+ ? legacyPath
223980
+ : null;
223981
+ if (!pathToRead) {
221970
223982
  return null;
221971
223983
  }
221972
223984
  try {
221973
- const content = fs.readFileSync(metadataPath, 'utf8');
223985
+ const content = fs.readFileSync(pathToRead, 'utf8');
221974
223986
  return JSON.parse(content);
221975
223987
  }
221976
223988
  catch (error) {
@@ -223042,22 +225054,6 @@ class WorkflowRegistry {
223042
225054
  exports.WorkflowRegistry = WorkflowRegistry;
223043
225055
 
223044
225056
 
223045
- /***/ }),
223046
-
223047
- /***/ 7065:
223048
- /***/ ((module) => {
223049
-
223050
- module.exports = eval("require")("./enterprise/loader");
223051
-
223052
-
223053
- /***/ }),
223054
-
223055
- /***/ 71370:
223056
- /***/ ((module) => {
223057
-
223058
- module.exports = eval("require")("./enterprise/policy/policy-input-builder");
223059
-
223060
-
223061
225057
  /***/ }),
223062
225058
 
223063
225059
  /***/ 18327:
@@ -264729,7 +266725,7 @@ var require_package2 = __commonJS({
264729
266725
  module2.exports = {
264730
266726
  name: "@aws-sdk/client-bedrock-runtime",
264731
266727
  description: "AWS SDK for JavaScript Bedrock Runtime Client for Node.js, Browser and React Native",
264732
- version: "3.999.0",
266728
+ version: "3.1000.0",
264733
266729
  scripts: {
264734
266730
  build: "concurrently 'yarn:build:types' 'yarn:build:es' && yarn build:cjs",
264735
266731
  "build:cjs": "node ../../scripts/compilation/inline client-bedrock-runtime",
@@ -264740,7 +266736,11 @@ var require_package2 = __commonJS({
264740
266736
  clean: "premove dist-cjs dist-es dist-types tsconfig.cjs.tsbuildinfo tsconfig.es.tsbuildinfo tsconfig.types.tsbuildinfo",
264741
266737
  "extract:docs": "api-extractor run --local",
264742
266738
  "generate:client": "node ../../scripts/generate-clients/single-service --solo bedrock-runtime",
264743
- "test:index": "tsc --noEmit ./test/index-types.ts && node ./test/index-objects.spec.mjs"
266739
+ test: "yarn g:vitest run --passWithNoTests",
266740
+ "test:index": "tsc --noEmit ./test/index-types.ts && node ./test/index-objects.spec.mjs",
266741
+ "test:integration": "yarn g:vitest run --passWithNoTests -c vitest.config.integ.mts",
266742
+ "test:integration:watch": "yarn g:vitest run --passWithNoTests -c vitest.config.integ.mts",
266743
+ "test:watch": "yarn g:vitest watch --passWithNoTests"
264744
266744
  },
264745
266745
  main: "./dist-cjs/index.js",
264746
266746
  types: "./dist-types/index.d.ts",
@@ -264759,7 +266759,7 @@ var require_package2 = __commonJS({
264759
266759
  "@aws-sdk/middleware-user-agent": "^3.972.15",
264760
266760
  "@aws-sdk/middleware-websocket": "^3.972.10",
264761
266761
  "@aws-sdk/region-config-resolver": "^3.972.6",
264762
- "@aws-sdk/token-providers": "3.999.0",
266762
+ "@aws-sdk/token-providers": "3.1000.0",
264763
266763
  "@aws-sdk/types": "^3.973.4",
264764
266764
  "@aws-sdk/util-endpoints": "^3.996.3",
264765
266765
  "@aws-sdk/util-user-agent-browser": "^3.972.6",
@@ -264796,12 +266796,14 @@ var require_package2 = __commonJS({
264796
266796
  tslib: "^2.6.2"
264797
266797
  },
264798
266798
  devDependencies: {
266799
+ "@smithy/snapshot-testing": "^1.0.7",
264799
266800
  "@tsconfig/node20": "20.1.8",
264800
266801
  "@types/node": "^20.14.8",
264801
266802
  concurrently: "7.0.0",
264802
266803
  "downlevel-dts": "0.10.1",
264803
266804
  premove: "4.0.0",
264804
- typescript: "~5.8.3"
266805
+ typescript: "~5.8.3",
266806
+ vitest: "^4.0.17"
264805
266807
  },
264806
266808
  engines: {
264807
266809
  node: ">=20.0.0"
@@ -266803,9 +268805,9 @@ var init_sso_oidc = __esm({
266803
268805
  }
266804
268806
  });
266805
268807
 
266806
- // node_modules/@aws-sdk/token-providers/dist-cjs/index.js
268808
+ // node_modules/@aws-sdk/credential-provider-sso/node_modules/@aws-sdk/token-providers/dist-cjs/index.js
266807
268809
  var require_dist_cjs56 = __commonJS({
266808
- "node_modules/@aws-sdk/token-providers/dist-cjs/index.js"(exports2) {
268810
+ "node_modules/@aws-sdk/credential-provider-sso/node_modules/@aws-sdk/token-providers/dist-cjs/index.js"(exports2) {
266809
268811
  "use strict";
266810
268812
  var client = (init_client(), __toCommonJS(client_exports));
266811
268813
  var httpAuthSchemes = (init_httpAuthSchemes2(), __toCommonJS(httpAuthSchemes_exports));
@@ -270638,8 +272640,155 @@ var require_dist_cjs63 = __commonJS({
270638
272640
  }
270639
272641
  });
270640
272642
 
270641
- // node_modules/@smithy/eventstream-serde-node/dist-cjs/index.js
272643
+ // node_modules/@aws-sdk/token-providers/dist-cjs/index.js
270642
272644
  var require_dist_cjs64 = __commonJS({
272645
+ "node_modules/@aws-sdk/token-providers/dist-cjs/index.js"(exports2) {
272646
+ "use strict";
272647
+ var client = (init_client(), __toCommonJS(client_exports));
272648
+ var httpAuthSchemes = (init_httpAuthSchemes2(), __toCommonJS(httpAuthSchemes_exports));
272649
+ var propertyProvider = require_dist_cjs24();
272650
+ var sharedIniFileLoader = require_dist_cjs42();
272651
+ var node_fs = __nccwpck_require__(73024);
272652
+ var fromEnvSigningName = ({ logger: logger2, signingName } = {}) => async () => {
272653
+ logger2?.debug?.("@aws-sdk/token-providers - fromEnvSigningName");
272654
+ if (!signingName) {
272655
+ throw new propertyProvider.TokenProviderError("Please pass 'signingName' to compute environment variable key", { logger: logger2 });
272656
+ }
272657
+ const bearerTokenKey = httpAuthSchemes.getBearerTokenEnvKey(signingName);
272658
+ if (!(bearerTokenKey in process.env)) {
272659
+ throw new propertyProvider.TokenProviderError(`Token not present in '${bearerTokenKey}' environment variable`, { logger: logger2 });
272660
+ }
272661
+ const token = { token: process.env[bearerTokenKey] };
272662
+ client.setTokenFeature(token, "BEARER_SERVICE_ENV_VARS", "3");
272663
+ return token;
272664
+ };
272665
+ var EXPIRE_WINDOW_MS = 5 * 60 * 1e3;
272666
+ var REFRESH_MESSAGE = `To refresh this SSO session run 'aws sso login' with the corresponding profile.`;
272667
+ var getSsoOidcClient = async (ssoRegion, init = {}, callerClientConfig) => {
272668
+ const { SSOOIDCClient: SSOOIDCClient2 } = await Promise.resolve().then(() => (init_sso_oidc(), sso_oidc_exports));
272669
+ const coalesce = (prop) => init.clientConfig?.[prop] ?? init.parentClientConfig?.[prop] ?? callerClientConfig?.[prop];
272670
+ const ssoOidcClient = new SSOOIDCClient2(Object.assign({}, init.clientConfig ?? {}, {
272671
+ region: ssoRegion ?? init.clientConfig?.region,
272672
+ logger: coalesce("logger"),
272673
+ userAgentAppId: coalesce("userAgentAppId")
272674
+ }));
272675
+ return ssoOidcClient;
272676
+ };
272677
+ var getNewSsoOidcToken = async (ssoToken, ssoRegion, init = {}, callerClientConfig) => {
272678
+ const { CreateTokenCommand: CreateTokenCommand2 } = await Promise.resolve().then(() => (init_sso_oidc(), sso_oidc_exports));
272679
+ const ssoOidcClient = await getSsoOidcClient(ssoRegion, init, callerClientConfig);
272680
+ return ssoOidcClient.send(new CreateTokenCommand2({
272681
+ clientId: ssoToken.clientId,
272682
+ clientSecret: ssoToken.clientSecret,
272683
+ refreshToken: ssoToken.refreshToken,
272684
+ grantType: "refresh_token"
272685
+ }));
272686
+ };
272687
+ var validateTokenExpiry = (token) => {
272688
+ if (token.expiration && token.expiration.getTime() < Date.now()) {
272689
+ throw new propertyProvider.TokenProviderError(`Token is expired. ${REFRESH_MESSAGE}`, false);
272690
+ }
272691
+ };
272692
+ var validateTokenKey = (key, value, forRefresh = false) => {
272693
+ if (typeof value === "undefined") {
272694
+ throw new propertyProvider.TokenProviderError(`Value not present for '${key}' in SSO Token${forRefresh ? ". Cannot refresh" : ""}. ${REFRESH_MESSAGE}`, false);
272695
+ }
272696
+ };
272697
+ var { writeFile: writeFile2 } = node_fs.promises;
272698
+ var writeSSOTokenToFile = (id, ssoToken) => {
272699
+ const tokenFilepath = sharedIniFileLoader.getSSOTokenFilepath(id);
272700
+ const tokenString = JSON.stringify(ssoToken, null, 2);
272701
+ return writeFile2(tokenFilepath, tokenString);
272702
+ };
272703
+ var lastRefreshAttemptTime = /* @__PURE__ */ new Date(0);
272704
+ var fromSso = (init = {}) => async ({ callerClientConfig } = {}) => {
272705
+ init.logger?.debug("@aws-sdk/token-providers - fromSso");
272706
+ const profiles = await sharedIniFileLoader.parseKnownFiles(init);
272707
+ const profileName = sharedIniFileLoader.getProfileName({
272708
+ profile: init.profile ?? callerClientConfig?.profile
272709
+ });
272710
+ const profile = profiles[profileName];
272711
+ if (!profile) {
272712
+ throw new propertyProvider.TokenProviderError(`Profile '${profileName}' could not be found in shared credentials file.`, false);
272713
+ } else if (!profile["sso_session"]) {
272714
+ throw new propertyProvider.TokenProviderError(`Profile '${profileName}' is missing required property 'sso_session'.`);
272715
+ }
272716
+ const ssoSessionName = profile["sso_session"];
272717
+ const ssoSessions = await sharedIniFileLoader.loadSsoSessionData(init);
272718
+ const ssoSession = ssoSessions[ssoSessionName];
272719
+ if (!ssoSession) {
272720
+ throw new propertyProvider.TokenProviderError(`Sso session '${ssoSessionName}' could not be found in shared credentials file.`, false);
272721
+ }
272722
+ for (const ssoSessionRequiredKey of ["sso_start_url", "sso_region"]) {
272723
+ if (!ssoSession[ssoSessionRequiredKey]) {
272724
+ throw new propertyProvider.TokenProviderError(`Sso session '${ssoSessionName}' is missing required property '${ssoSessionRequiredKey}'.`, false);
272725
+ }
272726
+ }
272727
+ ssoSession["sso_start_url"];
272728
+ const ssoRegion = ssoSession["sso_region"];
272729
+ let ssoToken;
272730
+ try {
272731
+ ssoToken = await sharedIniFileLoader.getSSOTokenFromFile(ssoSessionName);
272732
+ } catch (e5) {
272733
+ throw new propertyProvider.TokenProviderError(`The SSO session token associated with profile=${profileName} was not found or is invalid. ${REFRESH_MESSAGE}`, false);
272734
+ }
272735
+ validateTokenKey("accessToken", ssoToken.accessToken);
272736
+ validateTokenKey("expiresAt", ssoToken.expiresAt);
272737
+ const { accessToken, expiresAt } = ssoToken;
272738
+ const existingToken = { token: accessToken, expiration: new Date(expiresAt) };
272739
+ if (existingToken.expiration.getTime() - Date.now() > EXPIRE_WINDOW_MS) {
272740
+ return existingToken;
272741
+ }
272742
+ if (Date.now() - lastRefreshAttemptTime.getTime() < 30 * 1e3) {
272743
+ validateTokenExpiry(existingToken);
272744
+ return existingToken;
272745
+ }
272746
+ validateTokenKey("clientId", ssoToken.clientId, true);
272747
+ validateTokenKey("clientSecret", ssoToken.clientSecret, true);
272748
+ validateTokenKey("refreshToken", ssoToken.refreshToken, true);
272749
+ try {
272750
+ lastRefreshAttemptTime.setTime(Date.now());
272751
+ const newSsoOidcToken = await getNewSsoOidcToken(ssoToken, ssoRegion, init, callerClientConfig);
272752
+ validateTokenKey("accessToken", newSsoOidcToken.accessToken);
272753
+ validateTokenKey("expiresIn", newSsoOidcToken.expiresIn);
272754
+ const newTokenExpiration = new Date(Date.now() + newSsoOidcToken.expiresIn * 1e3);
272755
+ try {
272756
+ await writeSSOTokenToFile(ssoSessionName, {
272757
+ ...ssoToken,
272758
+ accessToken: newSsoOidcToken.accessToken,
272759
+ expiresAt: newTokenExpiration.toISOString(),
272760
+ refreshToken: newSsoOidcToken.refreshToken
272761
+ });
272762
+ } catch (error2) {
272763
+ }
272764
+ return {
272765
+ token: newSsoOidcToken.accessToken,
272766
+ expiration: newTokenExpiration
272767
+ };
272768
+ } catch (error2) {
272769
+ validateTokenExpiry(existingToken);
272770
+ return existingToken;
272771
+ }
272772
+ };
272773
+ var fromStatic = ({ token, logger: logger2 }) => async () => {
272774
+ logger2?.debug("@aws-sdk/token-providers - fromStatic");
272775
+ if (!token || !token.token) {
272776
+ throw new propertyProvider.TokenProviderError(`Please pass a valid token to fromStatic`, false);
272777
+ }
272778
+ return token;
272779
+ };
272780
+ var nodeProvider = (init = {}) => propertyProvider.memoize(propertyProvider.chain(fromSso(init), async () => {
272781
+ throw new propertyProvider.TokenProviderError("Could not load token from any providers", false);
272782
+ }), (token) => token.expiration !== void 0 && token.expiration.getTime() - Date.now() < 3e5, (token) => token.expiration !== void 0);
272783
+ exports2.fromEnvSigningName = fromEnvSigningName;
272784
+ exports2.fromSso = fromSso;
272785
+ exports2.fromStatic = fromStatic;
272786
+ exports2.nodeProvider = nodeProvider;
272787
+ }
272788
+ });
272789
+
272790
+ // node_modules/@smithy/eventstream-serde-node/dist-cjs/index.js
272791
+ var require_dist_cjs65 = __commonJS({
270643
272792
  "node_modules/@smithy/eventstream-serde-node/dist-cjs/index.js"(exports2) {
270644
272793
  "use strict";
270645
272794
  var eventstreamSerdeUniversal = require_dist_cjs35();
@@ -273321,11 +275470,11 @@ var require_runtimeConfig = __commonJS({
273321
275470
  var core_1 = (init_dist_es2(), __toCommonJS(dist_es_exports2));
273322
275471
  var credential_provider_node_1 = require_dist_cjs62();
273323
275472
  var eventstream_handler_node_1 = require_dist_cjs63();
273324
- var token_providers_1 = require_dist_cjs56();
275473
+ var token_providers_1 = require_dist_cjs64();
273325
275474
  var util_user_agent_node_1 = require_dist_cjs51();
273326
275475
  var config_resolver_1 = require_dist_cjs39();
273327
275476
  var core_2 = (init_dist_es(), __toCommonJS(dist_es_exports));
273328
- var eventstream_serde_node_1 = require_dist_cjs64();
275477
+ var eventstream_serde_node_1 = require_dist_cjs65();
273329
275478
  var hash_node_1 = require_dist_cjs52();
273330
275479
  var middleware_retry_1 = require_dist_cjs47();
273331
275480
  var node_config_provider_1 = require_dist_cjs43();
@@ -273397,7 +275546,7 @@ var require_runtimeConfig = __commonJS({
273397
275546
  });
273398
275547
 
273399
275548
  // node_modules/@aws-sdk/client-bedrock-runtime/dist-cjs/index.js
273400
- var require_dist_cjs65 = __commonJS({
275549
+ var require_dist_cjs66 = __commonJS({
273401
275550
  "node_modules/@aws-sdk/client-bedrock-runtime/dist-cjs/index.js"(exports2) {
273402
275551
  "use strict";
273403
275552
  var middlewareEventstream = require_dist_cjs3();
@@ -274242,13 +276391,13 @@ var import_client_bedrock_runtime, import_client_bedrock_runtime2, import_client
274242
276391
  var init_dist3 = __esm({
274243
276392
  "node_modules/@ai-sdk/amazon-bedrock/dist/index.mjs"() {
274244
276393
  init_dist2();
274245
- import_client_bedrock_runtime = __toESM(require_dist_cjs65(), 1);
276394
+ import_client_bedrock_runtime = __toESM(require_dist_cjs66(), 1);
274246
276395
  init_dist();
274247
- import_client_bedrock_runtime2 = __toESM(require_dist_cjs65(), 1);
276396
+ import_client_bedrock_runtime2 = __toESM(require_dist_cjs66(), 1);
274248
276397
  init_dist();
274249
276398
  init_dist();
274250
276399
  init_dist2();
274251
- import_client_bedrock_runtime3 = __toESM(require_dist_cjs65(), 1);
276400
+ import_client_bedrock_runtime3 = __toESM(require_dist_cjs66(), 1);
274252
276401
  generateFileId = createIdGenerator({ prefix: "file", size: 16 });
274253
276402
  BedrockChatLanguageModel = class {
274254
276403
  constructor(modelId, settings, config) {
@@ -297416,7 +299565,6 @@ var init_reg_exp = __esm({
297416
299565
  // node_modules/chevrotain/lib/src/scan/lexer.js
297417
299566
  function analyzeTokenTypes(tokenTypes, options) {
297418
299567
  options = defaults_default(options, {
297419
- useSticky: SUPPORT_STICKY,
297420
299568
  debug: false,
297421
299569
  safeMode: false,
297422
299570
  positionTracking: "full",
@@ -297465,7 +299613,7 @@ function analyzeTokenTypes(tokenTypes, options) {
297465
299613
  ], regExpSource[1])) {
297466
299614
  return regExpSource[1];
297467
299615
  } else {
297468
- return options.useSticky ? addStickyFlag(currPattern) : addStartOfInput(currPattern);
299616
+ return addStickyFlag(currPattern);
297469
299617
  }
297470
299618
  } else if (isFunction_default(currPattern)) {
297471
299619
  hasCustom = true;
@@ -297479,7 +299627,7 @@ function analyzeTokenTypes(tokenTypes, options) {
297479
299627
  } else {
297480
299628
  const escapedRegExpString = currPattern.replace(/[\\^$.*+?()[\]{}|]/g, "\\$&");
297481
299629
  const wrappedRegExp = new RegExp(escapedRegExpString);
297482
- return options.useSticky ? addStickyFlag(wrappedRegExp) : addStartOfInput(wrappedRegExp);
299630
+ return addStickyFlag(wrappedRegExp);
297483
299631
  }
297484
299632
  } else {
297485
299633
  throw Error("non exhaustive match");
@@ -297883,10 +300031,6 @@ function noMetaChar(regExp) {
297883
300031
  function usesLookAheadOrBehind(regExp) {
297884
300032
  return /(\(\?=)|(\(\?!)|(\(\?<=)|(\(\?<!)/.test(regExp.source);
297885
300033
  }
297886
- function addStartOfInput(pattern) {
297887
- const flags = pattern.ignoreCase ? "i" : "";
297888
- return new RegExp(`^(?:${pattern.source})`, flags);
297889
- }
297890
300034
  function addStickyFlag(pattern) {
297891
300035
  const flags = pattern.ignoreCase ? "iy" : "y";
297892
300036
  return new RegExp(`${pattern.source}`, flags);
@@ -298075,7 +300219,7 @@ function initCharCodeToOptimizedIndexMap() {
298075
300219
  }
298076
300220
  }
298077
300221
  }
298078
- var PATTERN, DEFAULT_MODE, MODES, SUPPORT_STICKY, end_of_input, start_of_input, LineTerminatorOptimizedTester, minOptimizationVal, charCodeToOptimizedIdxMap;
300222
+ var PATTERN, DEFAULT_MODE, MODES, end_of_input, start_of_input, LineTerminatorOptimizedTester, minOptimizationVal, charCodeToOptimizedIdxMap;
298079
300223
  var init_lexer = __esm({
298080
300224
  "node_modules/chevrotain/lib/src/scan/lexer.js"() {
298081
300225
  init_api3();
@@ -298087,7 +300231,6 @@ var init_lexer = __esm({
298087
300231
  PATTERN = "PATTERN";
298088
300232
  DEFAULT_MODE = "defaultMode";
298089
300233
  MODES = "modes";
298090
- SUPPORT_STICKY = typeof new RegExp("(?:)").sticky === "boolean";
298091
300234
  end_of_input = /[^\\][$]/;
298092
300235
  start_of_input = /[^\\[][\^]|^\^/;
298093
300236
  LineTerminatorOptimizedTester = {
@@ -298403,13 +300546,6 @@ var init_lexer_public = __esm({
298403
300546
  PRINT_WARNING(warningDescriptor.message);
298404
300547
  });
298405
300548
  this.TRACE_INIT("Choosing sub-methods implementations", () => {
298406
- if (SUPPORT_STICKY) {
298407
- this.chopInput = identity_default;
298408
- this.match = this.matchWithTest;
298409
- } else {
298410
- this.updateLastIndex = noop_default;
298411
- this.match = this.matchWithExec;
298412
- }
298413
300549
  if (hasOnlySingleMode) {
298414
300550
  this.handleModes = noop_default;
298415
300551
  }
@@ -298472,7 +300608,7 @@ var init_lexer_public = __esm({
298472
300608
  // this method also used quite a bit of `!` none null assertions because it is too optimized
298473
300609
  // for `tsc` to always understand it is "safe"
298474
300610
  tokenizeInternal(text, initialMode) {
298475
- let i5, j5, k5, matchAltImage, longerAlt, matchedImage, payload2, altPayload, imageLength, group, tokType, newToken, errLength, droppedChar, msg, match2;
300611
+ let i5, j5, k5, matchAltImage, longerAlt, matchedImage, payload2, altPayload, imageLength, group, tokType, newToken, errLength, msg, match2;
298476
300612
  const orgText = text;
298477
300613
  const orgLength = orgText.length;
298478
300614
  let offset2 = 0;
@@ -298491,19 +300627,7 @@ var init_lexer_public = __esm({
298491
300627
  const modeStack = [];
298492
300628
  const emptyArray = [];
298493
300629
  Object.freeze(emptyArray);
298494
- let getPossiblePatterns;
298495
- function getPossiblePatternsSlow() {
298496
- return patternIdxToConfig;
298497
- }
298498
- function getPossiblePatternsOptimized(charCode) {
298499
- const optimizedCharIdx = charCodeToOptimizedIndex(charCode);
298500
- const possiblePatterns = currCharCodeToPatternIdxToConfig[optimizedCharIdx];
298501
- if (possiblePatterns === void 0) {
298502
- return emptyArray;
298503
- } else {
298504
- return possiblePatterns;
298505
- }
298506
- }
300630
+ let isOptimizedMode = false;
298507
300631
  const pop_mode = (popToken) => {
298508
300632
  if (modeStack.length === 1 && // if we have both a POP_MODE and a PUSH_MODE this is in-fact a "transition"
298509
300633
  // So no error should occur.
@@ -298524,9 +300648,9 @@ var init_lexer_public = __esm({
298524
300648
  currModePatternsLength = patternIdxToConfig.length;
298525
300649
  const modeCanBeOptimized = this.canModeBeOptimized[newMode] && this.config.safeMode === false;
298526
300650
  if (currCharCodeToPatternIdxToConfig && modeCanBeOptimized) {
298527
- getPossiblePatterns = getPossiblePatternsOptimized;
300651
+ isOptimizedMode = true;
298528
300652
  } else {
298529
- getPossiblePatterns = getPossiblePatternsSlow;
300653
+ isOptimizedMode = false;
298530
300654
  }
298531
300655
  }
298532
300656
  };
@@ -298538,9 +300662,9 @@ var init_lexer_public = __esm({
298538
300662
  currModePatternsLength = patternIdxToConfig.length;
298539
300663
  const modeCanBeOptimized = this.canModeBeOptimized[newMode] && this.config.safeMode === false;
298540
300664
  if (currCharCodeToPatternIdxToConfig && modeCanBeOptimized) {
298541
- getPossiblePatterns = getPossiblePatternsOptimized;
300665
+ isOptimizedMode = true;
298542
300666
  } else {
298543
- getPossiblePatterns = getPossiblePatternsSlow;
300667
+ isOptimizedMode = false;
298544
300668
  }
298545
300669
  }
298546
300670
  push_mode.call(this, initialMode);
@@ -298548,8 +300672,16 @@ var init_lexer_public = __esm({
298548
300672
  const recoveryEnabled = this.config.recoveryEnabled;
298549
300673
  while (offset2 < orgLength) {
298550
300674
  matchedImage = null;
300675
+ imageLength = -1;
298551
300676
  const nextCharCode = orgText.charCodeAt(offset2);
298552
- const chosenPatternIdxToConfig = getPossiblePatterns(nextCharCode);
300677
+ let chosenPatternIdxToConfig;
300678
+ if (isOptimizedMode) {
300679
+ const optimizedCharIdx = charCodeToOptimizedIndex(nextCharCode);
300680
+ const possiblePatterns = currCharCodeToPatternIdxToConfig[optimizedCharIdx];
300681
+ chosenPatternIdxToConfig = possiblePatterns !== void 0 ? possiblePatterns : emptyArray;
300682
+ } else {
300683
+ chosenPatternIdxToConfig = patternIdxToConfig;
300684
+ }
298553
300685
  const chosenPatternsLength = chosenPatternIdxToConfig.length;
298554
300686
  for (i5 = 0; i5 < chosenPatternsLength; i5++) {
298555
300687
  currConfig = chosenPatternIdxToConfig[i5];
@@ -298558,12 +300690,14 @@ var init_lexer_public = __esm({
298558
300690
  const singleCharCode = currConfig.short;
298559
300691
  if (singleCharCode !== false) {
298560
300692
  if (nextCharCode === singleCharCode) {
300693
+ imageLength = 1;
298561
300694
  matchedImage = currPattern;
298562
300695
  }
298563
300696
  } else if (currConfig.isCustom === true) {
298564
300697
  match2 = currPattern.exec(orgText, offset2, matchedTokens, groups);
298565
300698
  if (match2 !== null) {
298566
300699
  matchedImage = match2[0];
300700
+ imageLength = matchedImage.length;
298567
300701
  if (match2.payload !== void 0) {
298568
300702
  payload2 = match2.payload;
298569
300703
  }
@@ -298571,12 +300705,13 @@ var init_lexer_public = __esm({
298571
300705
  matchedImage = null;
298572
300706
  }
298573
300707
  } else {
298574
- this.updateLastIndex(currPattern, offset2);
298575
- matchedImage = this.match(currPattern, text, offset2);
300708
+ currPattern.lastIndex = offset2;
300709
+ imageLength = this.matchLength(currPattern, text, offset2);
298576
300710
  }
298577
- if (matchedImage !== null) {
300711
+ if (imageLength !== -1) {
298578
300712
  longerAlt = currConfig.longerAlt;
298579
300713
  if (longerAlt !== void 0) {
300714
+ matchedImage = text.substring(offset2, offset2 + imageLength);
298580
300715
  const longerAltLength = longerAlt.length;
298581
300716
  for (k5 = 0; k5 < longerAltLength; k5++) {
298582
300717
  const longerAltConfig = patternIdxToConfig[longerAlt[k5]];
@@ -298593,11 +300728,12 @@ var init_lexer_public = __esm({
298593
300728
  matchAltImage = null;
298594
300729
  }
298595
300730
  } else {
298596
- this.updateLastIndex(longerAltPattern, offset2);
300731
+ longerAltPattern.lastIndex = offset2;
298597
300732
  matchAltImage = this.match(longerAltPattern, text, offset2);
298598
300733
  }
298599
300734
  if (matchAltImage && matchAltImage.length > matchedImage.length) {
298600
300735
  matchedImage = matchAltImage;
300736
+ imageLength = matchAltImage.length;
298601
300737
  payload2 = altPayload;
298602
300738
  currConfig = longerAltConfig;
298603
300739
  break;
@@ -298607,10 +300743,10 @@ var init_lexer_public = __esm({
298607
300743
  break;
298608
300744
  }
298609
300745
  }
298610
- if (matchedImage !== null) {
298611
- imageLength = matchedImage.length;
300746
+ if (imageLength !== -1) {
298612
300747
  group = currConfig.group;
298613
300748
  if (group !== void 0) {
300749
+ matchedImage = matchedImage !== null ? matchedImage : text.substring(offset2, offset2 + imageLength);
298614
300750
  tokType = currConfig.tokenTypeIdx;
298615
300751
  newToken = this.createTokenInstance(matchedImage, offset2, tokType, currConfig.tokenType, line, column, imageLength);
298616
300752
  this.handlePayload(newToken, payload2);
@@ -298620,15 +300756,13 @@ var init_lexer_public = __esm({
298620
300756
  groups[group].push(newToken);
298621
300757
  }
298622
300758
  }
298623
- text = this.chopInput(text, imageLength);
298624
- offset2 = offset2 + imageLength;
298625
- column = this.computeNewColumn(column, imageLength);
298626
300759
  if (trackLines === true && currConfig.canLineTerminator === true) {
298627
300760
  let numOfLTsInMatch = 0;
298628
300761
  let foundTerminator;
298629
300762
  let lastLTEndOffset;
298630
300763
  lineTerminatorPattern.lastIndex = 0;
298631
300764
  do {
300765
+ matchedImage = matchedImage !== null ? matchedImage : text.substring(offset2, offset2 + imageLength);
298632
300766
  foundTerminator = lineTerminatorPattern.test(matchedImage);
298633
300767
  if (foundTerminator === true) {
298634
300768
  lastLTEndOffset = lineTerminatorPattern.lastIndex - 1;
@@ -298639,8 +300773,13 @@ var init_lexer_public = __esm({
298639
300773
  line = line + numOfLTsInMatch;
298640
300774
  column = imageLength - lastLTEndOffset;
298641
300775
  this.updateTokenEndLineColumnLocation(newToken, group, lastLTEndOffset, numOfLTsInMatch, line, column, imageLength);
300776
+ } else {
300777
+ column = this.computeNewColumn(column, imageLength);
298642
300778
  }
300779
+ } else {
300780
+ column = this.computeNewColumn(column, imageLength);
298643
300781
  }
300782
+ offset2 = offset2 + imageLength;
298644
300783
  this.handleModes(currConfig, pop_mode, push_mode, newToken);
298645
300784
  } else {
298646
300785
  const errorStartOffset = offset2;
@@ -298648,7 +300787,6 @@ var init_lexer_public = __esm({
298648
300787
  const errorColumn = column;
298649
300788
  let foundResyncPoint = recoveryEnabled === false;
298650
300789
  while (foundResyncPoint === false && offset2 < orgLength) {
298651
- text = this.chopInput(text, 1);
298652
300790
  offset2++;
298653
300791
  for (j5 = 0; j5 < currModePatternsLength; j5++) {
298654
300792
  const currConfig2 = patternIdxToConfig[j5];
@@ -298661,7 +300799,7 @@ var init_lexer_public = __esm({
298661
300799
  } else if (currConfig2.isCustom === true) {
298662
300800
  foundResyncPoint = currPattern.exec(orgText, offset2, matchedTokens, groups) !== null;
298663
300801
  } else {
298664
- this.updateLastIndex(currPattern, offset2);
300802
+ currPattern.lastIndex = offset2;
298665
300803
  foundResyncPoint = currPattern.exec(text) !== null;
298666
300804
  }
298667
300805
  if (foundResyncPoint === true) {
@@ -298704,12 +300842,6 @@ var init_lexer_public = __esm({
298704
300842
  push_mode.call(this, config.push);
298705
300843
  }
298706
300844
  }
298707
- chopInput(text, length) {
298708
- return text.substring(length);
298709
- }
298710
- updateLastIndex(regExp, newLastIndex) {
298711
- regExp.lastIndex = newLastIndex;
298712
- }
298713
300845
  // TODO: decrease this under 600 characters? inspect stripping comments option in TSC compiler
298714
300846
  updateTokenEndLineColumnLocation(newToken, group, lastLTIdx, numOfLTsInMatch, line, column, imageLength) {
298715
300847
  let lastCharIsLT, fixForEndingInLT;
@@ -298772,16 +300904,19 @@ var init_lexer_public = __esm({
298772
300904
  token.payload = payload2;
298773
300905
  }
298774
300906
  }
298775
- matchWithTest(pattern, text, offset2) {
300907
+ match(pattern, text, offset2) {
298776
300908
  const found = pattern.test(text);
298777
300909
  if (found === true) {
298778
300910
  return text.substring(offset2, pattern.lastIndex);
298779
300911
  }
298780
300912
  return null;
298781
300913
  }
298782
- matchWithExec(pattern, text) {
298783
- const regExpArray = pattern.exec(text);
298784
- return regExpArray !== null ? regExpArray[0] : null;
300914
+ matchLength(pattern, text, offset2) {
300915
+ const found = pattern.test(text);
300916
+ if (found === true) {
300917
+ return pattern.lastIndex - offset2;
300918
+ }
300919
+ return -1;
298785
300920
  }
298786
300921
  };
298787
300922
  Lexer.SKIPPED = "This marks a skipped Token pattern, this means each token identified by it will be consumed and then thrown into oblivion, this can be used to for example to completely ignore whitespace.";
@@ -298973,12 +301108,20 @@ For Further details.`;
298973
301108
  return errMsg;
298974
301109
  },
298975
301110
  buildAlternationAmbiguityError(options) {
298976
- const pathMsg = map_default(options.prefixPath, (currtok) => tokenLabel2(currtok)).join(", ");
298977
301111
  const occurrence = options.alternation.idx === 0 ? "" : options.alternation.idx;
301112
+ const isEmptyPath = options.prefixPath.length === 0;
298978
301113
  let currMessage = `Ambiguous Alternatives Detected: <${options.ambiguityIndices.join(" ,")}> in <OR${occurrence}> inside <${options.topLevelRule.name}> Rule,
298979
- <${pathMsg}> may appears as a prefix path in all these alternatives.
298980
301114
  `;
298981
- currMessage = currMessage + `See: https://chevrotain.io/docs/guide/resolving_grammar_errors.html#AMBIGUOUS_ALTERNATIVES
301115
+ if (isEmptyPath) {
301116
+ currMessage += `These alternatives are all empty (match no tokens), making them indistinguishable.
301117
+ Only the last alternative may be empty.
301118
+ `;
301119
+ } else {
301120
+ const pathMsg = map_default(options.prefixPath, (currtok) => tokenLabel2(currtok)).join(", ");
301121
+ currMessage += `<${pathMsg}> may appears as a prefix path in all these alternatives.
301122
+ `;
301123
+ }
301124
+ currMessage += `See: https://chevrotain.io/docs/guide/resolving_grammar_errors.html#AMBIGUOUS_ALTERNATIVES
298982
301125
  For Further details.`;
298983
301126
  return currMessage;
298984
301127
  },
@@ -328217,12 +330360,31 @@ function cleanSchemaResponse(response) {
328217
330360
  const closeChar = openChar === "{" ? "}" : "]";
328218
330361
  let bracketCount = 1;
328219
330362
  let endIndex = startIndex + 1;
330363
+ let inString = false;
330364
+ let escapeNext = false;
328220
330365
  while (endIndex < trimmed.length && bracketCount > 0) {
328221
330366
  const char = trimmed[endIndex];
328222
- if (char === openChar) {
328223
- bracketCount++;
328224
- } else if (char === closeChar) {
328225
- bracketCount--;
330367
+ if (escapeNext) {
330368
+ escapeNext = false;
330369
+ endIndex++;
330370
+ continue;
330371
+ }
330372
+ if (char === "\\" && inString) {
330373
+ escapeNext = true;
330374
+ endIndex++;
330375
+ continue;
330376
+ }
330377
+ if (char === '"') {
330378
+ inString = !inString;
330379
+ endIndex++;
330380
+ continue;
330381
+ }
330382
+ if (!inString) {
330383
+ if (char === openChar) {
330384
+ bracketCount++;
330385
+ } else if (char === closeChar) {
330386
+ bracketCount--;
330387
+ }
328226
330388
  }
328227
330389
  endIndex++;
328228
330390
  }
@@ -350603,6 +352765,122 @@ var init_bashPermissions = __esm({
350603
352765
  });
350604
352766
 
350605
352767
  // src/agent/bashExecutor.js
352768
+ function splitCommandComponents(command) {
352769
+ const parts = [];
352770
+ let current2 = "";
352771
+ let inQuote = false;
352772
+ let quoteChar = "";
352773
+ for (let i5 = 0; i5 < command.length; i5++) {
352774
+ const c5 = command[i5];
352775
+ const next = command[i5 + 1] || "";
352776
+ if (c5 === "\\" && !inQuote) {
352777
+ current2 += c5 + next;
352778
+ i5++;
352779
+ continue;
352780
+ }
352781
+ if (inQuote && quoteChar === '"' && c5 === "\\" && next) {
352782
+ current2 += c5 + next;
352783
+ i5++;
352784
+ continue;
352785
+ }
352786
+ if (!inQuote && (c5 === '"' || c5 === "'")) {
352787
+ inQuote = true;
352788
+ quoteChar = c5;
352789
+ current2 += c5;
352790
+ continue;
352791
+ }
352792
+ if (inQuote && c5 === quoteChar) {
352793
+ inQuote = false;
352794
+ current2 += c5;
352795
+ continue;
352796
+ }
352797
+ if (!inQuote) {
352798
+ if (c5 === "&" && next === "&" || c5 === "|" && next === "|") {
352799
+ if (current2.trim()) parts.push(current2.trim());
352800
+ current2 = "";
352801
+ i5++;
352802
+ continue;
352803
+ }
352804
+ if (c5 === "|" || c5 === ";") {
352805
+ if (current2.trim()) parts.push(current2.trim());
352806
+ current2 = "";
352807
+ continue;
352808
+ }
352809
+ }
352810
+ current2 += c5;
352811
+ }
352812
+ if (current2.trim()) parts.push(current2.trim());
352813
+ return parts;
352814
+ }
352815
+ function checkSingleCommandInteractive(command) {
352816
+ let effective = command.trim();
352817
+ while (/^\w+=\S*\s/.test(effective)) {
352818
+ effective = effective.replace(/^\w+=\S*\s+/, "");
352819
+ }
352820
+ const parts = effective.split(/\s+/);
352821
+ const base2 = parts[0];
352822
+ const args = parts.slice(1);
352823
+ if (["vi", "vim", "nvim", "nano", "emacs", "pico", "joe", "mcedit"].includes(base2)) {
352824
+ return `'${base2}' is an interactive editor and cannot run without a terminal. Use non-interactive file manipulation commands instead.`;
352825
+ }
352826
+ if (["less", "more"].includes(base2)) {
352827
+ return `'${base2}' is an interactive pager. Use 'cat', 'head', or 'tail' instead.`;
352828
+ }
352829
+ if (base2 === "git") {
352830
+ const sub = args[0];
352831
+ if (sub === "commit") {
352832
+ const hasNonInteractiveFlag = args.some(
352833
+ (a5) => a5 === "-m" || a5.startsWith("--message") || a5 === "-C" || a5 === "-c" || a5.startsWith("--fixup") || a5.startsWith("--squash") || a5 === "--allow-empty-message" || a5 === "--no-edit"
352834
+ );
352835
+ if (!hasNonInteractiveFlag) {
352836
+ return `Interactive command: 'git commit' opens an editor for the commit message. Use 'git commit -m "your message"' instead.`;
352837
+ }
352838
+ }
352839
+ if (sub === "rebase" && (args.includes("--continue") || args.includes("--skip"))) {
352840
+ return "Interactive command: 'git rebase --continue' opens an editor. Set environment variable GIT_EDITOR=true to accept default messages, e.g. pass env: {GIT_EDITOR: 'true'} or prepend GIT_EDITOR=true to the command.";
352841
+ }
352842
+ if (sub === "rebase" && (args.includes("-i") || args.includes("--interactive"))) {
352843
+ return "Interactive command: 'git rebase -i' requires an interactive editor. Interactive rebase cannot run without a terminal.";
352844
+ }
352845
+ if (sub === "merge" && !args.includes("--no-edit") && !args.includes("--no-commit") && !args.includes("--ff-only")) {
352846
+ return "Interactive command: 'git merge' may open an editor for the merge commit message. Add '--no-edit' to accept the default message.";
352847
+ }
352848
+ if (sub === "cherry-pick" && !args.includes("--no-edit")) {
352849
+ return "Interactive command: 'git cherry-pick' may open an editor. Add '--no-edit' to accept the default message.";
352850
+ }
352851
+ if (sub === "revert" && !args.includes("--no-edit")) {
352852
+ return "Interactive command: 'git revert' opens an editor. Add '--no-edit' to accept the default message.";
352853
+ }
352854
+ if (sub === "tag" && args.includes("-a") && !args.some((a5) => a5 === "-m" || a5.startsWith("--message"))) {
352855
+ return `Interactive command: 'git tag -a' opens an editor for the tag message. Use 'git tag -a <name> -m "message"' instead.`;
352856
+ }
352857
+ if (sub === "add" && (args.includes("-i") || args.includes("--interactive") || args.includes("-p") || args.includes("--patch"))) {
352858
+ return "Interactive command: 'git add -i/-p' requires interactive input. Use 'git add <files>' to stage specific files instead.";
352859
+ }
352860
+ }
352861
+ if (["python", "python3", "node", "irb", "ghci", "lua", "R", "ruby"].includes(base2) && args.length === 0) {
352862
+ return `Interactive command: '${base2}' without arguments starts an interactive REPL. Provide a script file or use '-c'/'--eval' for inline code.`;
352863
+ }
352864
+ if (base2 === "mysql" && !args.some((a5) => a5 === "-e" || a5.startsWith("--execute"))) {
352865
+ return `Interactive command: 'mysql' without -e flag starts an interactive session. Use 'mysql -e "SQL QUERY"' instead.`;
352866
+ }
352867
+ if (base2 === "psql" && !args.some((a5) => a5 === "-c" || a5.startsWith("--command") || a5 === "-f" || a5.startsWith("--file"))) {
352868
+ return `Interactive command: 'psql' without -c flag starts an interactive session. Use 'psql -c "SQL QUERY"' instead.`;
352869
+ }
352870
+ if (["top", "htop", "btop", "nmon"].includes(base2)) {
352871
+ return `Interactive command: '${base2}' is an interactive TUI tool. Use 'ps aux' or 'top -b -n 1' for non-interactive process listing.`;
352872
+ }
352873
+ return null;
352874
+ }
352875
+ function checkInteractiveCommand(command) {
352876
+ if (!command || typeof command !== "string") return null;
352877
+ const components = splitCommandComponents(command.trim());
352878
+ for (const component of components) {
352879
+ const result = checkSingleCommandInteractive(component);
352880
+ if (result) return result;
352881
+ }
352882
+ return null;
352883
+ }
350606
352884
  async function executeBashCommand(command, options = {}) {
350607
352885
  const {
350608
352886
  workingDirectory = process.cwd(),
@@ -350632,6 +352910,24 @@ async function executeBashCommand(command, options = {}) {
350632
352910
  };
350633
352911
  }
350634
352912
  const startTime = Date.now();
352913
+ const interactiveError = checkInteractiveCommand(command);
352914
+ if (interactiveError) {
352915
+ if (debug) {
352916
+ console.log(`[BashExecutor] Blocked interactive command: "${command}"`);
352917
+ console.log(`[BashExecutor] Reason: ${interactiveError}`);
352918
+ }
352919
+ return {
352920
+ success: false,
352921
+ error: interactiveError,
352922
+ stdout: "",
352923
+ stderr: interactiveError,
352924
+ exitCode: 1,
352925
+ command,
352926
+ workingDirectory: cwd,
352927
+ duration: 0,
352928
+ interactive: true
352929
+ };
352930
+ }
350635
352931
  if (debug) {
350636
352932
  console.log(`[BashExecutor] Executing command: "${command}"`);
350637
352933
  console.log(`[BashExecutor] Working directory: "${cwd}"`);
@@ -350642,6 +352938,8 @@ async function executeBashCommand(command, options = {}) {
350642
352938
  ...process.env,
350643
352939
  ...env
350644
352940
  };
352941
+ if (!processEnv.GIT_EDITOR) processEnv.GIT_EDITOR = "true";
352942
+ if (!processEnv.GIT_TERMINAL_PROMPT) processEnv.GIT_TERMINAL_PROMPT = "0";
350645
352943
  const isComplex = isComplexCommand(command);
350646
352944
  let cmd, cmdArgs, useShell;
350647
352945
  if (isComplex) {
@@ -350676,20 +352974,32 @@ async function executeBashCommand(command, options = {}) {
350676
352974
  // stdin ignored, capture stdout/stderr
350677
352975
  shell: useShell,
350678
352976
  // false for security
352977
+ detached: true,
352978
+ // new session — no controlling terminal
350679
352979
  windowsHide: true
350680
352980
  });
350681
352981
  let stdout = "";
350682
352982
  let stderr = "";
350683
352983
  let killed = false;
350684
352984
  let timeoutHandle;
352985
+ const killProcessGroup = (signal) => {
352986
+ try {
352987
+ if (child.pid) process.kill(-child.pid, signal);
352988
+ } catch {
352989
+ try {
352990
+ child.kill(signal);
352991
+ } catch {
352992
+ }
352993
+ }
352994
+ };
350685
352995
  if (timeout > 0) {
350686
352996
  timeoutHandle = setTimeout(() => {
350687
352997
  if (!killed) {
350688
352998
  killed = true;
350689
- child.kill("SIGTERM");
352999
+ killProcessGroup("SIGTERM");
350690
353000
  setTimeout(() => {
350691
353001
  if (child.exitCode === null) {
350692
- child.kill("SIGKILL");
353002
+ killProcessGroup("SIGKILL");
350693
353003
  }
350694
353004
  }, 5e3);
350695
353005
  }
@@ -350702,7 +353012,7 @@ async function executeBashCommand(command, options = {}) {
350702
353012
  } else {
350703
353013
  if (!killed) {
350704
353014
  killed = true;
350705
- child.kill("SIGTERM");
353015
+ killProcessGroup("SIGTERM");
350706
353016
  }
350707
353017
  }
350708
353018
  });
@@ -350713,7 +353023,7 @@ async function executeBashCommand(command, options = {}) {
350713
353023
  } else {
350714
353024
  if (!killed) {
350715
353025
  killed = true;
350716
- child.kill("SIGTERM");
353026
+ killProcessGroup("SIGTERM");
350717
353027
  }
350718
353028
  }
350719
353029
  });
@@ -395253,7 +397563,7 @@ module.exports = /*#__PURE__*/JSON.parse('{"100":"Continue","101":"Switching Pro
395253
397563
  /***/ ((module) => {
395254
397564
 
395255
397565
  "use strict";
395256
- module.exports = /*#__PURE__*/JSON.parse('{"name":"@probelabs/visor","version":"0.1.147","main":"dist/index.js","bin":{"visor":"./dist/index.js"},"exports":{".":{"require":"./dist/index.js","import":"./dist/index.js"},"./sdk":{"types":"./dist/sdk/sdk.d.ts","import":"./dist/sdk/sdk.mjs","require":"./dist/sdk/sdk.js"},"./cli":{"require":"./dist/index.js"}},"files":["dist/","defaults/","action.yml","README.md","LICENSE"],"publishConfig":{"access":"public","registry":"https://registry.npmjs.org/"},"scripts":{"build:cli":"ncc build src/index.ts -o dist && cp -r defaults dist/ && cp -r output dist/ && cp -r docs dist/ && cp -r examples dist/ && cp -r src/debug-visualizer/ui dist/debug-visualizer/ && node scripts/inject-version.js && echo \'#!/usr/bin/env node\' | cat - dist/index.js > temp && mv temp dist/index.js && chmod +x dist/index.js","build:sdk":"tsup src/sdk.ts --dts --sourcemap --format esm,cjs --out-dir dist/sdk","build":"./scripts/build-oss.sh","build:ee":"npm run build:cli && npm run build:sdk","test":"jest && npm run test:yaml","test:unit":"jest","prepublishOnly":"npm run build","test:watch":"jest --watch","test:coverage":"jest --coverage","test:ee":"jest --testPathPatterns=\'tests/ee\' --testPathIgnorePatterns=\'/node_modules/\' --no-coverage","test:manual:bash":"RUN_MANUAL_TESTS=true jest tests/manual/bash-config-manual.test.ts","lint":"eslint src tests --ext .ts","lint:fix":"eslint src tests --ext .ts --fix","format":"prettier --write src tests","format:check":"prettier --check src tests","clean":"","clean:traces":"node scripts/clean-traces.js","prebuild":"npm run clean && node scripts/generate-config-schema.js","pretest":"npm run clean:traces && node scripts/generate-config-schema.js && npm run build:cli","pretest:unit":"npm run clean:traces && node scripts/generate-config-schema.js && npm run build:cli","test:with-build":"npm run build:cli && jest","test:yaml":"node dist/index.js test --progress compact","test:yaml:parallel":"node dist/index.js test --progress compact --max-parallel 4","prepare":"husky","pre-commit":"lint-staged","deploy:site":"cd site && npx wrangler pages deploy . --project-name=visor-site --commit-dirty=true","deploy:worker":"npx wrangler deploy","deploy":"npm run deploy:site && npm run deploy:worker","publish:ee":"./scripts/publish-ee.sh","release":"./scripts/release.sh","release:patch":"./scripts/release.sh patch","release:minor":"./scripts/release.sh minor","release:major":"./scripts/release.sh major","release:prerelease":"./scripts/release.sh prerelease","docs:validate":"node scripts/validate-readme-links.js","workshop:setup":"npm install -D reveal-md@6.1.2","workshop:serve":"cd workshop && reveal-md slides.md -w","workshop:export":"reveal-md workshop/slides.md --static workshop/build","workshop:pdf":"reveal-md workshop/slides.md --print workshop/Visor-Workshop.pdf --print-size letter","workshop:pdf:ci":"reveal-md workshop/slides.md --print workshop/Visor-Workshop.pdf --print-size letter --puppeteer-launch-args=\\"--no-sandbox --disable-dev-shm-usage\\"","workshop:pdf:a4":"reveal-md workshop/slides.md --print workshop/Visor-Workshop-A4.pdf --print-size A4","workshop:build":"npm run workshop:export && npm run workshop:pdf","simulate:issue":"TS_NODE_TRANSPILE_ONLY=1 ts-node scripts/simulate-gh-run.ts --event issues --action opened --debug","simulate:comment":"TS_NODE_TRANSPILE_ONLY=1 ts-node scripts/simulate-gh-run.ts --event issue_comment --action created --debug"},"keywords":["code-review","ai","github-action","cli","pr-review","visor"],"author":"Probe Labs","license":"MIT","description":"AI workflow engine for code review, assistants, and automation — orchestrate checks, MCP tools, and AI providers with YAML-driven pipelines","repository":{"type":"git","url":"git+https://github.com/probelabs/visor.git"},"bugs":{"url":"https://github.com/probelabs/visor/issues"},"homepage":"https://github.com/probelabs/visor#readme","dependencies":{"@actions/core":"^1.11.1","@apidevtools/swagger-parser":"^12.1.0","@modelcontextprotocol/sdk":"^1.25.3","@nyariv/sandboxjs":"github:probelabs/SandboxJS#f1c13b8eee98734a8ea024061eada4aa9a9ff2e9","@octokit/action":"^8.0.2","@octokit/auth-app":"^8.1.0","@octokit/core":"^7.0.3","@octokit/rest":"^22.0.0","@opentelemetry/api":"^1.9.0","@opentelemetry/core":"^1.30.1","@opentelemetry/exporter-trace-otlp-grpc":"^0.203.0","@opentelemetry/exporter-trace-otlp-http":"^0.203.0","@opentelemetry/instrumentation":"^0.203.0","@opentelemetry/resources":"^1.30.1","@opentelemetry/sdk-metrics":"^1.30.1","@opentelemetry/sdk-node":"^0.203.0","@opentelemetry/sdk-trace-base":"^1.30.1","@opentelemetry/semantic-conventions":"^1.30.1","@probelabs/probe":"^0.6.0-rc262","@types/commander":"^2.12.0","@types/uuid":"^10.0.0","acorn":"^8.16.0","acorn-walk":"^8.3.5","ajv":"^8.17.1","ajv-formats":"^3.0.1","better-sqlite3":"^11.0.0","blessed":"^0.1.81","cli-table3":"^0.6.5","commander":"^14.0.0","deepmerge":"^4.3.1","dotenv":"^17.2.3","ignore":"^7.0.5","js-yaml":"^4.1.0","jsonpath-plus":"^10.4.0","liquidjs":"^10.21.1","minimatch":"^10.2.2","node-cron":"^3.0.3","open":"^9.1.0","simple-git":"^3.28.0","uuid":"^11.1.0","ws":"^8.18.3"},"optionalDependencies":{"@anthropic/claude-code-sdk":"npm:null@*","@open-policy-agent/opa-wasm":"^1.10.0","knex":"^3.1.0","mysql2":"^3.11.0","pg":"^8.13.0","tedious":"^19.0.0"},"devDependencies":{"@eslint/js":"^9.34.0","@kie/act-js":"^2.6.2","@kie/mock-github":"^2.0.1","@swc/core":"^1.13.2","@swc/jest":"^0.2.37","@types/better-sqlite3":"^7.6.0","@types/blessed":"^0.1.27","@types/jest":"^30.0.0","@types/js-yaml":"^4.0.9","@types/node":"^24.3.0","@types/node-cron":"^3.0.11","@types/ws":"^8.18.1","@typescript-eslint/eslint-plugin":"^8.42.0","@typescript-eslint/parser":"^8.42.0","@vercel/ncc":"^0.38.4","eslint":"^9.34.0","eslint-config-prettier":"^10.1.8","eslint-plugin-prettier":"^5.5.4","husky":"^9.1.7","jest":"^30.1.3","lint-staged":"^16.1.6","prettier":"^3.6.2","reveal-md":"^6.1.2","ts-json-schema-generator":"^1.5.1","ts-node":"^10.9.2","tsup":"^8.5.0","typescript":"^5.9.2","wrangler":"^3.0.0"},"peerDependenciesMeta":{"@anthropic/claude-code-sdk":{"optional":true}},"directories":{"test":"tests"},"lint-staged":{"src/**/*.{ts,js}":["eslint --fix","prettier --write"],"tests/**/*.{ts,js}":["eslint --fix","prettier --write"],"*.{json,md,yml,yaml}":["prettier --write"]}}');
397566
+ module.exports = /*#__PURE__*/JSON.parse('{"name":"@probelabs/visor","version":"0.1.42","main":"dist/index.js","bin":{"visor":"./dist/index.js"},"exports":{".":{"require":"./dist/index.js","import":"./dist/index.js"},"./sdk":{"types":"./dist/sdk/sdk.d.ts","import":"./dist/sdk/sdk.mjs","require":"./dist/sdk/sdk.js"},"./cli":{"require":"./dist/index.js"}},"files":["dist/","defaults/","action.yml","README.md","LICENSE"],"publishConfig":{"access":"public","registry":"https://registry.npmjs.org/"},"scripts":{"build:cli":"ncc build src/index.ts -o dist && cp -r defaults dist/ && cp -r output dist/ && cp -r docs dist/ && cp -r examples dist/ && cp -r src/debug-visualizer/ui dist/debug-visualizer/ && node scripts/inject-version.js && echo \'#!/usr/bin/env node\' | cat - dist/index.js > temp && mv temp dist/index.js && chmod +x dist/index.js","build:sdk":"tsup src/sdk.ts --dts --sourcemap --format esm,cjs --out-dir dist/sdk","build":"./scripts/build-oss.sh","build:ee":"npm run build:cli && npm run build:sdk","test":"jest && npm run test:yaml","test:unit":"jest","prepublishOnly":"npm run build","test:watch":"jest --watch","test:coverage":"jest --coverage","test:ee":"jest --testPathPatterns=\'tests/ee\' --testPathIgnorePatterns=\'/node_modules/\' --no-coverage","test:manual:bash":"RUN_MANUAL_TESTS=true jest tests/manual/bash-config-manual.test.ts","lint":"eslint src tests --ext .ts","lint:fix":"eslint src tests --ext .ts --fix","format":"prettier --write src tests","format:check":"prettier --check src tests","clean":"","clean:traces":"node scripts/clean-traces.js","prebuild":"npm run clean && node scripts/generate-config-schema.js","pretest":"npm run clean:traces && node scripts/generate-config-schema.js && npm run build:cli","pretest:unit":"npm run clean:traces && node scripts/generate-config-schema.js && npm run build:cli","test:with-build":"npm run build:cli && jest","test:yaml":"node dist/index.js test --progress compact","test:yaml:parallel":"node dist/index.js test --progress compact --max-parallel 4","prepare":"husky","pre-commit":"lint-staged","deploy:site":"cd site && npx wrangler pages deploy . --project-name=visor-site --commit-dirty=true","deploy:worker":"npx wrangler deploy","deploy":"npm run deploy:site && npm run deploy:worker","publish:ee":"./scripts/publish-ee.sh","release":"./scripts/release.sh","release:patch":"./scripts/release.sh patch","release:minor":"./scripts/release.sh minor","release:major":"./scripts/release.sh major","release:prerelease":"./scripts/release.sh prerelease","docs:validate":"node scripts/validate-readme-links.js","workshop:setup":"npm install -D reveal-md@6.1.2","workshop:serve":"cd workshop && reveal-md slides.md -w","workshop:export":"reveal-md workshop/slides.md --static workshop/build","workshop:pdf":"reveal-md workshop/slides.md --print workshop/Visor-Workshop.pdf --print-size letter","workshop:pdf:ci":"reveal-md workshop/slides.md --print workshop/Visor-Workshop.pdf --print-size letter --puppeteer-launch-args=\\"--no-sandbox --disable-dev-shm-usage\\"","workshop:pdf:a4":"reveal-md workshop/slides.md --print workshop/Visor-Workshop-A4.pdf --print-size A4","workshop:build":"npm run workshop:export && npm run workshop:pdf","simulate:issue":"TS_NODE_TRANSPILE_ONLY=1 ts-node scripts/simulate-gh-run.ts --event issues --action opened --debug","simulate:comment":"TS_NODE_TRANSPILE_ONLY=1 ts-node scripts/simulate-gh-run.ts --event issue_comment --action created --debug"},"keywords":["code-review","ai","github-action","cli","pr-review","visor"],"author":"Probe Labs","license":"MIT","description":"AI workflow engine for code review, assistants, and automation — orchestrate checks, MCP tools, and AI providers with YAML-driven pipelines","repository":{"type":"git","url":"git+https://github.com/probelabs/visor.git"},"bugs":{"url":"https://github.com/probelabs/visor/issues"},"homepage":"https://github.com/probelabs/visor#readme","dependencies":{"@actions/core":"^1.11.1","@apidevtools/swagger-parser":"^12.1.0","@modelcontextprotocol/sdk":"^1.25.3","@nyariv/sandboxjs":"github:probelabs/SandboxJS#f1c13b8eee98734a8ea024061eada4aa9a9ff2e9","@octokit/action":"^8.0.2","@octokit/auth-app":"^8.1.0","@octokit/core":"^7.0.3","@octokit/rest":"^22.0.0","@opentelemetry/api":"^1.9.0","@opentelemetry/core":"^1.30.1","@opentelemetry/exporter-trace-otlp-grpc":"^0.203.0","@opentelemetry/exporter-trace-otlp-http":"^0.203.0","@opentelemetry/instrumentation":"^0.203.0","@opentelemetry/resources":"^1.30.1","@opentelemetry/sdk-metrics":"^1.30.1","@opentelemetry/sdk-node":"^0.203.0","@opentelemetry/sdk-trace-base":"^1.30.1","@opentelemetry/semantic-conventions":"^1.30.1","@probelabs/probe":"^0.6.0-rc264","@types/commander":"^2.12.0","@types/uuid":"^10.0.0","acorn":"^8.16.0","acorn-walk":"^8.3.5","ajv":"^8.17.1","ajv-formats":"^3.0.1","better-sqlite3":"^11.0.0","blessed":"^0.1.81","cli-table3":"^0.6.5","commander":"^14.0.0","deepmerge":"^4.3.1","dotenv":"^17.2.3","ignore":"^7.0.5","js-yaml":"^4.1.0","jsonpath-plus":"^10.4.0","liquidjs":"^10.21.1","minimatch":"^10.2.2","node-cron":"^3.0.3","open":"^9.1.0","simple-git":"^3.28.0","uuid":"^11.1.0","ws":"^8.18.3"},"optionalDependencies":{"@anthropic/claude-code-sdk":"npm:null@*","@open-policy-agent/opa-wasm":"^1.10.0","knex":"^3.1.0","mysql2":"^3.11.0","pg":"^8.13.0","tedious":"^19.0.0"},"devDependencies":{"@eslint/js":"^9.34.0","@kie/act-js":"^2.6.2","@kie/mock-github":"^2.0.1","@swc/core":"^1.13.2","@swc/jest":"^0.2.37","@types/better-sqlite3":"^7.6.0","@types/blessed":"^0.1.27","@types/jest":"^30.0.0","@types/js-yaml":"^4.0.9","@types/node":"^24.3.0","@types/node-cron":"^3.0.11","@types/ws":"^8.18.1","@typescript-eslint/eslint-plugin":"^8.42.0","@typescript-eslint/parser":"^8.42.0","@vercel/ncc":"^0.38.4","eslint":"^9.34.0","eslint-config-prettier":"^10.1.8","eslint-plugin-prettier":"^5.5.4","husky":"^9.1.7","jest":"^30.1.3","lint-staged":"^16.1.6","prettier":"^3.6.2","reveal-md":"^6.1.2","ts-json-schema-generator":"^1.5.1","ts-node":"^10.9.2","tsup":"^8.5.0","typescript":"^5.9.2","wrangler":"^3.0.0"},"peerDependenciesMeta":{"@anthropic/claude-code-sdk":{"optional":true}},"directories":{"test":"tests"},"lint-staged":{"src/**/*.{ts,js}":["eslint --fix","prettier --write"],"tests/**/*.{ts,js}":["eslint --fix","prettier --write"],"*.{json,md,yml,yaml}":["prettier --write"]}}');
395257
397567
 
395258
397568
  /***/ })
395259
397569