@probelabs/visor 0.1.177 → 0.1.178-ee
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/defaults/code-talk.yaml +10 -5
- package/dist/defaults/code-talk.yaml +10 -5
- package/dist/docs/ai-custom-tools.md +49 -0
- package/dist/docs/http.md +23 -0
- package/dist/docs/testing/cookbook.md +48 -0
- package/dist/docs/testing/dsl-reference.md +4 -2
- package/dist/docs/testing/flows.md +33 -1
- package/dist/examples/http-integration-config.yaml +16 -0
- package/dist/generated/config-schema.d.ts +51 -6
- package/dist/generated/config-schema.d.ts.map +1 -1
- package/dist/generated/config-schema.json +61 -6
- package/dist/github-comments.d.ts +5 -1
- package/dist/github-comments.d.ts.map +1 -1
- package/dist/index.js +2224 -93
- package/dist/providers/api-tool-executor.d.ts +2 -0
- package/dist/providers/api-tool-executor.d.ts.map +1 -1
- package/dist/providers/http-client-provider.d.ts.map +1 -1
- package/dist/providers/mcp-custom-sse-server.d.ts.map +1 -1
- package/dist/providers/workflow-check-provider.d.ts.map +1 -1
- package/dist/sdk/{a2a-frontend-FUJRKHJB.mjs → a2a-frontend-U3PTNCLR.mjs} +2 -2
- package/dist/sdk/{check-provider-registry-HW4QPPSA.mjs → check-provider-registry-SRASECAR.mjs} +6 -6
- package/dist/sdk/{check-provider-registry-OY2EESIO.mjs → check-provider-registry-ZX76MY2L.mjs} +6 -6
- package/dist/sdk/{chunk-OPI632LK.mjs → chunk-4ECMTCOM.mjs} +2 -2
- package/dist/sdk/{chunk-GVTWESYN.mjs → chunk-6YGCACBF.mjs} +2 -2
- package/dist/sdk/{chunk-65SHRIQF.mjs.map → chunk-6YGCACBF.mjs.map} +1 -1
- package/dist/sdk/{chunk-Y6PVSFCS.mjs → chunk-B7XHSG3L.mjs} +237 -47
- package/dist/sdk/chunk-B7XHSG3L.mjs.map +1 -0
- package/dist/sdk/{chunk-MM3TGVQ4.mjs → chunk-BMXVAJ2M.mjs} +52 -7
- package/dist/sdk/chunk-BMXVAJ2M.mjs.map +1 -0
- package/dist/sdk/{chunk-OHOBWVPP.mjs → chunk-ENSZDV3O.mjs} +3 -3
- package/dist/sdk/{chunk-2LCF5H5K.mjs → chunk-MGY5JAN2.mjs} +222 -37
- package/dist/sdk/chunk-MGY5JAN2.mjs.map +1 -0
- package/dist/sdk/{config-OOUMTCEA.mjs → config-DFOF7LP4.mjs} +2 -2
- package/dist/sdk/{failure-condition-evaluator-DL6H57NX.mjs → failure-condition-evaluator-P3MS5DRL.mjs} +3 -3
- package/dist/sdk/{github-frontend-FP6WKNZR.mjs → github-frontend-QTKOYB56.mjs} +11 -3
- package/dist/sdk/github-frontend-QTKOYB56.mjs.map +1 -0
- package/dist/sdk/{host-6SBCE4VK.mjs → host-I2TBBKD5.mjs} +3 -3
- package/dist/sdk/{host-NYUSWEE4.mjs → host-THORKOEL.mjs} +3 -3
- package/dist/sdk/knex-store-QCEW4I4R.mjs +527 -0
- package/dist/sdk/knex-store-QCEW4I4R.mjs.map +1 -0
- package/dist/sdk/loader-Q7K76ZIY.mjs +89 -0
- package/dist/sdk/loader-Q7K76ZIY.mjs.map +1 -0
- package/dist/sdk/opa-policy-engine-QCSSIMUF.mjs +655 -0
- package/dist/sdk/opa-policy-engine-QCSSIMUF.mjs.map +1 -0
- package/dist/sdk/{routing-PFFCQJV2.mjs → routing-2X6QF5IW.mjs} +4 -4
- package/dist/sdk/{schedule-tool-DN2DSXIX.mjs → schedule-tool-M6Y4YTXR.mjs} +6 -6
- package/dist/sdk/{schedule-tool-KVZN5LP6.mjs → schedule-tool-R6JJIDZ6.mjs} +6 -6
- package/dist/sdk/{schedule-tool-handler-57JBEICD.mjs → schedule-tool-handler-AOMZV3Q3.mjs} +6 -6
- package/dist/sdk/{schedule-tool-handler-6MPP5DXK.mjs → schedule-tool-handler-JYCVH377.mjs} +6 -6
- package/dist/sdk/sdk.d.mts +21 -0
- package/dist/sdk/sdk.d.ts +21 -0
- package/dist/sdk/sdk.js +1919 -307
- package/dist/sdk/sdk.js.map +1 -1
- package/dist/sdk/sdk.mjs +5 -5
- package/dist/sdk/{trace-helpers-L3EOYW5P.mjs → trace-helpers-K47ZVJSU.mjs} +2 -2
- package/dist/sdk/validator-XTZJZZJH.mjs +134 -0
- package/dist/sdk/validator-XTZJZZJH.mjs.map +1 -0
- package/dist/sdk/{workflow-check-provider-OA33MESM.mjs → workflow-check-provider-A3YH2UZJ.mjs} +6 -6
- package/dist/sdk/{workflow-check-provider-U3UIYLU7.mjs → workflow-check-provider-EMFC7A5K.mjs} +6 -6
- package/dist/state-machine/context/build-engine-context.d.ts.map +1 -1
- package/dist/test-runner/conversation-sugar.d.ts +3 -0
- package/dist/test-runner/conversation-sugar.d.ts.map +1 -1
- package/dist/test-runner/validator.d.ts.map +1 -1
- package/dist/types/config.d.ts +21 -0
- package/dist/types/config.d.ts.map +1 -1
- package/dist/utils/rate-limiter.d.ts +61 -0
- package/dist/utils/rate-limiter.d.ts.map +1 -0
- package/package.json +2 -2
- package/dist/output/traces/run-2026-03-10T16-21-38-082Z.ndjson +0 -138
- package/dist/output/traces/run-2026-03-10T16-22-15-059Z.ndjson +0 -2296
- package/dist/sdk/a2a-frontend-BPWLYLCG.mjs +0 -1658
- package/dist/sdk/a2a-frontend-FUJRKHJB.mjs.map +0 -1
- package/dist/sdk/a2a-frontend-HBUSNE3K.mjs +0 -1658
- package/dist/sdk/a2a-frontend-HBUSNE3K.mjs.map +0 -1
- package/dist/sdk/check-provider-registry-TRHN5ZBY.mjs +0 -30
- package/dist/sdk/chunk-2LCF5H5K.mjs.map +0 -1
- package/dist/sdk/chunk-65SHRIQF.mjs +0 -516
- package/dist/sdk/chunk-ADQVGGKA.mjs +0 -1502
- package/dist/sdk/chunk-BWC5R2UB.mjs +0 -739
- package/dist/sdk/chunk-EFNNJIMY.mjs +0 -739
- package/dist/sdk/chunk-EFNNJIMY.mjs.map +0 -1
- package/dist/sdk/chunk-FNBSDOQM.mjs +0 -516
- package/dist/sdk/chunk-FNBSDOQM.mjs.map +0 -1
- package/dist/sdk/chunk-GVTWESYN.mjs.map +0 -1
- package/dist/sdk/chunk-MM3TGVQ4.mjs.map +0 -1
- package/dist/sdk/chunk-OHOBWVPP.mjs.map +0 -1
- package/dist/sdk/chunk-OPI632LK.mjs.map +0 -1
- package/dist/sdk/chunk-WJIV7MKY.mjs +0 -1502
- package/dist/sdk/chunk-WJIV7MKY.mjs.map +0 -1
- package/dist/sdk/chunk-XLDVWRKQ.mjs +0 -45239
- package/dist/sdk/chunk-XLDVWRKQ.mjs.map +0 -1
- package/dist/sdk/chunk-Y6PVSFCS.mjs.map +0 -1
- package/dist/sdk/failure-condition-evaluator-63BECZYF.mjs +0 -18
- package/dist/sdk/failure-condition-evaluator-HL33X7MH.mjs +0 -18
- package/dist/sdk/github-frontend-F2YCPK6H.mjs +0 -1386
- package/dist/sdk/github-frontend-F2YCPK6H.mjs.map +0 -1
- package/dist/sdk/github-frontend-FP6WKNZR.mjs.map +0 -1
- package/dist/sdk/github-frontend-U2U42CKV.mjs +0 -1386
- package/dist/sdk/github-frontend-U2U42CKV.mjs.map +0 -1
- package/dist/sdk/host-6TBS44ER.mjs +0 -87
- package/dist/sdk/host-NYUSWEE4.mjs.map +0 -1
- package/dist/sdk/routing-GF2CF3JT.mjs +0 -26
- package/dist/sdk/routing-SFP4D6O3.mjs +0 -26
- package/dist/sdk/schedule-tool-45NAALKS.mjs +0 -36
- package/dist/sdk/schedule-tool-KVZN5LP6.mjs.map +0 -1
- package/dist/sdk/schedule-tool-handler-57JBEICD.mjs.map +0 -1
- package/dist/sdk/schedule-tool-handler-6MPP5DXK.mjs.map +0 -1
- package/dist/sdk/schedule-tool-handler-GEXHYH3X.mjs +0 -40
- package/dist/sdk/schedule-tool-handler-GEXHYH3X.mjs.map +0 -1
- package/dist/sdk/slack-frontend-6SXPTQDI.mjs +0 -895
- package/dist/sdk/slack-frontend-6SXPTQDI.mjs.map +0 -1
- package/dist/sdk/trace-helpers-FKM2MEDW.mjs +0 -29
- package/dist/sdk/trace-helpers-FKM2MEDW.mjs.map +0 -1
- package/dist/sdk/trace-helpers-L3EOYW5P.mjs.map +0 -1
- package/dist/sdk/trace-helpers-MYH2GPXF.mjs +0 -29
- package/dist/sdk/trace-helpers-MYH2GPXF.mjs.map +0 -1
- package/dist/sdk/workflow-check-provider-JNEFAECH.mjs +0 -30
- package/dist/sdk/workflow-check-provider-JNEFAECH.mjs.map +0 -1
- package/dist/sdk/workflow-check-provider-OA33MESM.mjs.map +0 -1
- package/dist/sdk/workflow-check-provider-U3UIYLU7.mjs.map +0 -1
- package/dist/traces/run-2026-03-10T16-21-38-082Z.ndjson +0 -138
- package/dist/traces/run-2026-03-10T16-22-15-059Z.ndjson +0 -2296
- /package/dist/sdk/{a2a-frontend-BPWLYLCG.mjs.map → a2a-frontend-U3PTNCLR.mjs.map} +0 -0
- /package/dist/sdk/{check-provider-registry-HW4QPPSA.mjs.map → check-provider-registry-SRASECAR.mjs.map} +0 -0
- /package/dist/sdk/{check-provider-registry-OY2EESIO.mjs.map → check-provider-registry-ZX76MY2L.mjs.map} +0 -0
- /package/dist/sdk/{chunk-BWC5R2UB.mjs.map → chunk-4ECMTCOM.mjs.map} +0 -0
- /package/dist/sdk/{chunk-ADQVGGKA.mjs.map → chunk-ENSZDV3O.mjs.map} +0 -0
- /package/dist/sdk/{check-provider-registry-TRHN5ZBY.mjs.map → config-DFOF7LP4.mjs.map} +0 -0
- /package/dist/sdk/{config-OOUMTCEA.mjs.map → failure-condition-evaluator-P3MS5DRL.mjs.map} +0 -0
- /package/dist/sdk/{host-6SBCE4VK.mjs.map → host-I2TBBKD5.mjs.map} +0 -0
- /package/dist/sdk/{host-6TBS44ER.mjs.map → host-THORKOEL.mjs.map} +0 -0
- /package/dist/sdk/{failure-condition-evaluator-63BECZYF.mjs.map → routing-2X6QF5IW.mjs.map} +0 -0
- /package/dist/sdk/{failure-condition-evaluator-DL6H57NX.mjs.map → schedule-tool-M6Y4YTXR.mjs.map} +0 -0
- /package/dist/sdk/{failure-condition-evaluator-HL33X7MH.mjs.map → schedule-tool-R6JJIDZ6.mjs.map} +0 -0
- /package/dist/sdk/{routing-GF2CF3JT.mjs.map → schedule-tool-handler-AOMZV3Q3.mjs.map} +0 -0
- /package/dist/sdk/{routing-PFFCQJV2.mjs.map → schedule-tool-handler-JYCVH377.mjs.map} +0 -0
- /package/dist/sdk/{routing-SFP4D6O3.mjs.map → trace-helpers-K47ZVJSU.mjs.map} +0 -0
- /package/dist/sdk/{schedule-tool-45NAALKS.mjs.map → workflow-check-provider-A3YH2UZJ.mjs.map} +0 -0
- /package/dist/sdk/{schedule-tool-DN2DSXIX.mjs.map → workflow-check-provider-EMFC7A5K.mjs.map} +0 -0
package/dist/index.js
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
process.env.VISOR_VERSION = '0.1.
|
|
3
|
-
process.env.PROBE_VERSION = '0.6.0-
|
|
4
|
-
process.env.VISOR_COMMIT_SHA = '
|
|
5
|
-
process.env.VISOR_COMMIT_SHORT = '
|
|
2
|
+
process.env.VISOR_VERSION = '0.1.178';
|
|
3
|
+
process.env.PROBE_VERSION = '0.6.0-rc293';
|
|
4
|
+
process.env.VISOR_COMMIT_SHA = '79fcc6344e1ab954501ff6a7f9614a0372019b2b';
|
|
5
|
+
process.env.VISOR_COMMIT_SHORT = '79fcc63';
|
|
6
6
|
/******/ (() => { // webpackBootstrap
|
|
7
7
|
/******/ var __webpack_modules__ = ({
|
|
8
8
|
|
|
@@ -302997,7 +302997,7 @@ async function handleDumpPolicyInput(checkId, argv) {
|
|
|
302997
302997
|
let PolicyInputBuilder;
|
|
302998
302998
|
try {
|
|
302999
302999
|
// @ts-ignore — enterprise/ may not exist in OSS builds (caught at runtime)
|
|
303000
|
-
const mod = await Promise.resolve().then(() => __importStar(__nccwpck_require__(
|
|
303000
|
+
const mod = await Promise.resolve().then(() => __importStar(__nccwpck_require__(17117)));
|
|
303001
303001
|
PolicyInputBuilder = mod.PolicyInputBuilder;
|
|
303002
303002
|
}
|
|
303003
303003
|
catch {
|
|
@@ -303229,7 +303229,7 @@ steps:
|
|
|
303229
303229
|
You are a senior software engineer. Use the provided tools to explore
|
|
303230
303230
|
the codebase. Always verify your assumptions by reading actual code.
|
|
303231
303231
|
Be concise and cite file paths in your response.
|
|
303232
|
-
max_iterations:
|
|
303232
|
+
max_iterations: 50
|
|
303233
303233
|
ai_custom_tools: [search-code, list-files, read-file]
|
|
303234
303234
|
enable_bash: true # also allow direct shell commands
|
|
303235
303235
|
tags: [agent]
|
|
@@ -311069,6 +311069,1810 @@ class EmailPollingRunner {
|
|
|
311069
311069
|
exports.EmailPollingRunner = EmailPollingRunner;
|
|
311070
311070
|
|
|
311071
311071
|
|
|
311072
|
+
/***/ }),
|
|
311073
|
+
|
|
311074
|
+
/***/ 50069:
|
|
311075
|
+
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
311076
|
+
|
|
311077
|
+
"use strict";
|
|
311078
|
+
|
|
311079
|
+
/**
|
|
311080
|
+
* Copyright (c) ProbeLabs. All rights reserved.
|
|
311081
|
+
* Licensed under the Elastic License 2.0; you may not use this file except
|
|
311082
|
+
* in compliance with the Elastic License 2.0.
|
|
311083
|
+
*/
|
|
311084
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
311085
|
+
if (k2 === undefined) k2 = k;
|
|
311086
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
311087
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
311088
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
311089
|
+
}
|
|
311090
|
+
Object.defineProperty(o, k2, desc);
|
|
311091
|
+
}) : (function(o, m, k, k2) {
|
|
311092
|
+
if (k2 === undefined) k2 = k;
|
|
311093
|
+
o[k2] = m[k];
|
|
311094
|
+
}));
|
|
311095
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
311096
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
311097
|
+
}) : function(o, v) {
|
|
311098
|
+
o["default"] = v;
|
|
311099
|
+
});
|
|
311100
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
311101
|
+
var ownKeys = function(o) {
|
|
311102
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
311103
|
+
var ar = [];
|
|
311104
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
311105
|
+
return ar;
|
|
311106
|
+
};
|
|
311107
|
+
return ownKeys(o);
|
|
311108
|
+
};
|
|
311109
|
+
return function (mod) {
|
|
311110
|
+
if (mod && mod.__esModule) return mod;
|
|
311111
|
+
var result = {};
|
|
311112
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
311113
|
+
__setModuleDefault(result, mod);
|
|
311114
|
+
return result;
|
|
311115
|
+
};
|
|
311116
|
+
})();
|
|
311117
|
+
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
311118
|
+
exports.LicenseValidator = void 0;
|
|
311119
|
+
const crypto = __importStar(__nccwpck_require__(76982));
|
|
311120
|
+
const fs = __importStar(__nccwpck_require__(79896));
|
|
311121
|
+
const path = __importStar(__nccwpck_require__(16928));
|
|
311122
|
+
class LicenseValidator {
|
|
311123
|
+
/** Ed25519 public key for license verification (PEM format). */
|
|
311124
|
+
static PUBLIC_KEY = '-----BEGIN PUBLIC KEY-----\n' +
|
|
311125
|
+
'MCowBQYDK2VwAyEAI/Zd08EFmgIdrDm/HXd0l3/5GBt7R1PrdvhdmEXhJlU=\n' +
|
|
311126
|
+
'-----END PUBLIC KEY-----\n';
|
|
311127
|
+
cache = null;
|
|
311128
|
+
static CACHE_TTL = 5 * 60 * 1000; // 5 minutes
|
|
311129
|
+
static GRACE_PERIOD = 72 * 3600 * 1000; // 72 hours after expiry
|
|
311130
|
+
/**
|
|
311131
|
+
* Load and validate license from environment or file.
|
|
311132
|
+
*
|
|
311133
|
+
* Resolution order:
|
|
311134
|
+
* 1. VISOR_LICENSE env var (JWT string)
|
|
311135
|
+
* 2. VISOR_LICENSE_FILE env var (path to file)
|
|
311136
|
+
* 3. .visor-license in project root (cwd)
|
|
311137
|
+
* 4. .visor-license in ~/.config/visor/
|
|
311138
|
+
*/
|
|
311139
|
+
async loadAndValidate() {
|
|
311140
|
+
// Return cached result if still fresh
|
|
311141
|
+
if (this.cache && Date.now() - this.cache.validatedAt < LicenseValidator.CACHE_TTL) {
|
|
311142
|
+
return this.cache.payload;
|
|
311143
|
+
}
|
|
311144
|
+
const token = this.resolveToken();
|
|
311145
|
+
if (!token)
|
|
311146
|
+
return null;
|
|
311147
|
+
const payload = this.verifyAndDecode(token);
|
|
311148
|
+
if (!payload)
|
|
311149
|
+
return null;
|
|
311150
|
+
this.cache = { payload, validatedAt: Date.now() };
|
|
311151
|
+
return payload;
|
|
311152
|
+
}
|
|
311153
|
+
/** Check if a specific feature is licensed */
|
|
311154
|
+
hasFeature(feature) {
|
|
311155
|
+
if (!this.cache)
|
|
311156
|
+
return false;
|
|
311157
|
+
return this.cache.payload.features.includes(feature);
|
|
311158
|
+
}
|
|
311159
|
+
/** Check if license is valid (with grace period) */
|
|
311160
|
+
isValid() {
|
|
311161
|
+
if (!this.cache)
|
|
311162
|
+
return false;
|
|
311163
|
+
const now = Date.now();
|
|
311164
|
+
const expiryMs = this.cache.payload.exp * 1000;
|
|
311165
|
+
return now < expiryMs + LicenseValidator.GRACE_PERIOD;
|
|
311166
|
+
}
|
|
311167
|
+
/** Check if the license is within its grace period (expired but still valid) */
|
|
311168
|
+
isInGracePeriod() {
|
|
311169
|
+
if (!this.cache)
|
|
311170
|
+
return false;
|
|
311171
|
+
const now = Date.now();
|
|
311172
|
+
const expiryMs = this.cache.payload.exp * 1000;
|
|
311173
|
+
return now >= expiryMs && now < expiryMs + LicenseValidator.GRACE_PERIOD;
|
|
311174
|
+
}
|
|
311175
|
+
resolveToken() {
|
|
311176
|
+
// 1. Direct env var
|
|
311177
|
+
if (process.env.VISOR_LICENSE) {
|
|
311178
|
+
return process.env.VISOR_LICENSE.trim();
|
|
311179
|
+
}
|
|
311180
|
+
// 2. File path from env (validate against path traversal)
|
|
311181
|
+
if (process.env.VISOR_LICENSE_FILE) {
|
|
311182
|
+
// path.resolve() produces an absolute path with all '..' segments resolved,
|
|
311183
|
+
// so a separate resolved.includes('..') check is unnecessary.
|
|
311184
|
+
const resolved = path.resolve(process.env.VISOR_LICENSE_FILE);
|
|
311185
|
+
const home = process.env.HOME || process.env.USERPROFILE || '';
|
|
311186
|
+
const allowedPrefixes = [path.normalize(process.cwd())];
|
|
311187
|
+
if (home)
|
|
311188
|
+
allowedPrefixes.push(path.normalize(path.join(home, '.config', 'visor')));
|
|
311189
|
+
// Resolve symlinks so an attacker cannot create a symlink inside an
|
|
311190
|
+
// allowed prefix that points to an arbitrary file outside it.
|
|
311191
|
+
let realPath;
|
|
311192
|
+
try {
|
|
311193
|
+
realPath = fs.realpathSync(resolved);
|
|
311194
|
+
}
|
|
311195
|
+
catch {
|
|
311196
|
+
return null; // File doesn't exist or isn't accessible
|
|
311197
|
+
}
|
|
311198
|
+
const isSafe = allowedPrefixes.some(prefix => realPath === prefix || realPath.startsWith(prefix + path.sep));
|
|
311199
|
+
if (!isSafe)
|
|
311200
|
+
return null;
|
|
311201
|
+
return this.readFile(realPath);
|
|
311202
|
+
}
|
|
311203
|
+
// 3. .visor-license in cwd
|
|
311204
|
+
const cwdPath = path.join(process.cwd(), '.visor-license');
|
|
311205
|
+
const cwdToken = this.readFile(cwdPath);
|
|
311206
|
+
if (cwdToken)
|
|
311207
|
+
return cwdToken;
|
|
311208
|
+
// 4. ~/.config/visor/.visor-license
|
|
311209
|
+
const home = process.env.HOME || process.env.USERPROFILE || '';
|
|
311210
|
+
if (home) {
|
|
311211
|
+
const configPath = path.join(home, '.config', 'visor', '.visor-license');
|
|
311212
|
+
const configToken = this.readFile(configPath);
|
|
311213
|
+
if (configToken)
|
|
311214
|
+
return configToken;
|
|
311215
|
+
}
|
|
311216
|
+
return null;
|
|
311217
|
+
}
|
|
311218
|
+
readFile(filePath) {
|
|
311219
|
+
try {
|
|
311220
|
+
return fs.readFileSync(filePath, 'utf-8').trim();
|
|
311221
|
+
}
|
|
311222
|
+
catch {
|
|
311223
|
+
return null;
|
|
311224
|
+
}
|
|
311225
|
+
}
|
|
311226
|
+
verifyAndDecode(token) {
|
|
311227
|
+
try {
|
|
311228
|
+
const parts = token.split('.');
|
|
311229
|
+
if (parts.length !== 3)
|
|
311230
|
+
return null;
|
|
311231
|
+
const [headerB64, payloadB64, signatureB64] = parts;
|
|
311232
|
+
// Decode header to verify algorithm
|
|
311233
|
+
const header = JSON.parse(Buffer.from(headerB64, 'base64url').toString());
|
|
311234
|
+
if (header.alg !== 'EdDSA')
|
|
311235
|
+
return null;
|
|
311236
|
+
// Verify signature
|
|
311237
|
+
const data = `${headerB64}.${payloadB64}`;
|
|
311238
|
+
const signature = Buffer.from(signatureB64, 'base64url');
|
|
311239
|
+
const publicKey = crypto.createPublicKey(LicenseValidator.PUBLIC_KEY);
|
|
311240
|
+
// Validate that the loaded public key is actually Ed25519 (OID 1.3.101.112).
|
|
311241
|
+
// This prevents algorithm-confusion attacks if the embedded key were ever
|
|
311242
|
+
// swapped to a different type.
|
|
311243
|
+
if (publicKey.asymmetricKeyType !== 'ed25519') {
|
|
311244
|
+
return null;
|
|
311245
|
+
}
|
|
311246
|
+
// Ed25519 verification: algorithm must be null because EdDSA performs its
|
|
311247
|
+
// own internal hashing (SHA-512) — passing a digest algorithm here would
|
|
311248
|
+
// cause Node.js to throw. The key type is validated above.
|
|
311249
|
+
const isValid = crypto.verify(null, Buffer.from(data), publicKey, signature);
|
|
311250
|
+
if (!isValid)
|
|
311251
|
+
return null;
|
|
311252
|
+
// Decode payload
|
|
311253
|
+
const payload = JSON.parse(Buffer.from(payloadB64, 'base64url').toString());
|
|
311254
|
+
// Validate required fields
|
|
311255
|
+
if (!payload.org ||
|
|
311256
|
+
!Array.isArray(payload.features) ||
|
|
311257
|
+
typeof payload.exp !== 'number' ||
|
|
311258
|
+
typeof payload.iat !== 'number' ||
|
|
311259
|
+
!payload.sub) {
|
|
311260
|
+
return null;
|
|
311261
|
+
}
|
|
311262
|
+
// Check expiry (with grace period)
|
|
311263
|
+
const now = Date.now();
|
|
311264
|
+
const expiryMs = payload.exp * 1000;
|
|
311265
|
+
if (now >= expiryMs + LicenseValidator.GRACE_PERIOD) {
|
|
311266
|
+
return null;
|
|
311267
|
+
}
|
|
311268
|
+
return payload;
|
|
311269
|
+
}
|
|
311270
|
+
catch {
|
|
311271
|
+
return null;
|
|
311272
|
+
}
|
|
311273
|
+
}
|
|
311274
|
+
}
|
|
311275
|
+
exports.LicenseValidator = LicenseValidator;
|
|
311276
|
+
|
|
311277
|
+
|
|
311278
|
+
/***/ }),
|
|
311279
|
+
|
|
311280
|
+
/***/ 87068:
|
|
311281
|
+
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
311282
|
+
|
|
311283
|
+
"use strict";
|
|
311284
|
+
|
|
311285
|
+
/**
|
|
311286
|
+
* Copyright (c) ProbeLabs. All rights reserved.
|
|
311287
|
+
* Licensed under the Elastic License 2.0; you may not use this file except
|
|
311288
|
+
* in compliance with the Elastic License 2.0.
|
|
311289
|
+
*/
|
|
311290
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
311291
|
+
if (k2 === undefined) k2 = k;
|
|
311292
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
311293
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
311294
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
311295
|
+
}
|
|
311296
|
+
Object.defineProperty(o, k2, desc);
|
|
311297
|
+
}) : (function(o, m, k, k2) {
|
|
311298
|
+
if (k2 === undefined) k2 = k;
|
|
311299
|
+
o[k2] = m[k];
|
|
311300
|
+
}));
|
|
311301
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
311302
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
311303
|
+
}) : function(o, v) {
|
|
311304
|
+
o["default"] = v;
|
|
311305
|
+
});
|
|
311306
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
311307
|
+
var ownKeys = function(o) {
|
|
311308
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
311309
|
+
var ar = [];
|
|
311310
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
311311
|
+
return ar;
|
|
311312
|
+
};
|
|
311313
|
+
return ownKeys(o);
|
|
311314
|
+
};
|
|
311315
|
+
return function (mod) {
|
|
311316
|
+
if (mod && mod.__esModule) return mod;
|
|
311317
|
+
var result = {};
|
|
311318
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
311319
|
+
__setModuleDefault(result, mod);
|
|
311320
|
+
return result;
|
|
311321
|
+
};
|
|
311322
|
+
})();
|
|
311323
|
+
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
311324
|
+
exports.loadEnterprisePolicyEngine = loadEnterprisePolicyEngine;
|
|
311325
|
+
exports.loadEnterpriseStoreBackend = loadEnterpriseStoreBackend;
|
|
311326
|
+
const default_engine_1 = __nccwpck_require__(93866);
|
|
311327
|
+
/**
|
|
311328
|
+
* Load the enterprise policy engine if licensed, otherwise return the default no-op engine.
|
|
311329
|
+
*
|
|
311330
|
+
* This is the sole import boundary between OSS and enterprise code. Core code
|
|
311331
|
+
* must only import from this module (via dynamic `await import()`), never from
|
|
311332
|
+
* individual enterprise submodules.
|
|
311333
|
+
*/
|
|
311334
|
+
async function loadEnterprisePolicyEngine(config) {
|
|
311335
|
+
try {
|
|
311336
|
+
const { LicenseValidator } = await Promise.resolve().then(() => __importStar(__nccwpck_require__(50069)));
|
|
311337
|
+
const validator = new LicenseValidator();
|
|
311338
|
+
const license = await validator.loadAndValidate();
|
|
311339
|
+
if (!license || !validator.hasFeature('policy')) {
|
|
311340
|
+
return new default_engine_1.DefaultPolicyEngine();
|
|
311341
|
+
}
|
|
311342
|
+
if (validator.isInGracePeriod()) {
|
|
311343
|
+
// eslint-disable-next-line no-console
|
|
311344
|
+
console.warn('[visor:enterprise] License has expired but is within the 72-hour grace period. ' +
|
|
311345
|
+
'Please renew your license.');
|
|
311346
|
+
}
|
|
311347
|
+
const { OpaPolicyEngine } = await Promise.resolve().then(() => __importStar(__nccwpck_require__(39530)));
|
|
311348
|
+
const engine = new OpaPolicyEngine(config);
|
|
311349
|
+
await engine.initialize(config);
|
|
311350
|
+
return engine;
|
|
311351
|
+
}
|
|
311352
|
+
catch (err) {
|
|
311353
|
+
// Enterprise code not available or initialization failed
|
|
311354
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
311355
|
+
try {
|
|
311356
|
+
const { logger } = __nccwpck_require__(86999);
|
|
311357
|
+
logger.warn(`[PolicyEngine] Enterprise policy init failed, falling back to default: ${msg}`);
|
|
311358
|
+
}
|
|
311359
|
+
catch {
|
|
311360
|
+
// silent
|
|
311361
|
+
}
|
|
311362
|
+
return new default_engine_1.DefaultPolicyEngine();
|
|
311363
|
+
}
|
|
311364
|
+
}
|
|
311365
|
+
/**
|
|
311366
|
+
* Load the enterprise schedule store backend if licensed.
|
|
311367
|
+
*
|
|
311368
|
+
* @param driver Database driver ('postgresql', 'mysql', or 'mssql')
|
|
311369
|
+
* @param storageConfig Storage configuration with connection details
|
|
311370
|
+
* @param haConfig Optional HA configuration
|
|
311371
|
+
* @throws Error if enterprise license is not available or missing 'scheduler-sql' feature
|
|
311372
|
+
*/
|
|
311373
|
+
async function loadEnterpriseStoreBackend(driver, storageConfig, haConfig) {
|
|
311374
|
+
const { LicenseValidator } = await Promise.resolve().then(() => __importStar(__nccwpck_require__(50069)));
|
|
311375
|
+
const validator = new LicenseValidator();
|
|
311376
|
+
const license = await validator.loadAndValidate();
|
|
311377
|
+
if (!license || !validator.hasFeature('scheduler-sql')) {
|
|
311378
|
+
throw new Error(`The ${driver} schedule storage driver requires a Visor Enterprise license ` +
|
|
311379
|
+
`with the 'scheduler-sql' feature. Please upgrade or use driver: 'sqlite' (default).`);
|
|
311380
|
+
}
|
|
311381
|
+
if (validator.isInGracePeriod()) {
|
|
311382
|
+
// eslint-disable-next-line no-console
|
|
311383
|
+
console.warn('[visor:enterprise] License has expired but is within the 72-hour grace period. ' +
|
|
311384
|
+
'Please renew your license.');
|
|
311385
|
+
}
|
|
311386
|
+
const { KnexStoreBackend } = await Promise.resolve().then(() => __importStar(__nccwpck_require__(63737)));
|
|
311387
|
+
return new KnexStoreBackend(driver, storageConfig, haConfig);
|
|
311388
|
+
}
|
|
311389
|
+
|
|
311390
|
+
|
|
311391
|
+
/***/ }),
|
|
311392
|
+
|
|
311393
|
+
/***/ 628:
|
|
311394
|
+
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
311395
|
+
|
|
311396
|
+
"use strict";
|
|
311397
|
+
|
|
311398
|
+
/**
|
|
311399
|
+
* Copyright (c) ProbeLabs. All rights reserved.
|
|
311400
|
+
* Licensed under the Elastic License 2.0; you may not use this file except
|
|
311401
|
+
* in compliance with the Elastic License 2.0.
|
|
311402
|
+
*/
|
|
311403
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
311404
|
+
if (k2 === undefined) k2 = k;
|
|
311405
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
311406
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
311407
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
311408
|
+
}
|
|
311409
|
+
Object.defineProperty(o, k2, desc);
|
|
311410
|
+
}) : (function(o, m, k, k2) {
|
|
311411
|
+
if (k2 === undefined) k2 = k;
|
|
311412
|
+
o[k2] = m[k];
|
|
311413
|
+
}));
|
|
311414
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
311415
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
311416
|
+
}) : function(o, v) {
|
|
311417
|
+
o["default"] = v;
|
|
311418
|
+
});
|
|
311419
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
311420
|
+
var ownKeys = function(o) {
|
|
311421
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
311422
|
+
var ar = [];
|
|
311423
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
311424
|
+
return ar;
|
|
311425
|
+
};
|
|
311426
|
+
return ownKeys(o);
|
|
311427
|
+
};
|
|
311428
|
+
return function (mod) {
|
|
311429
|
+
if (mod && mod.__esModule) return mod;
|
|
311430
|
+
var result = {};
|
|
311431
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
311432
|
+
__setModuleDefault(result, mod);
|
|
311433
|
+
return result;
|
|
311434
|
+
};
|
|
311435
|
+
})();
|
|
311436
|
+
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
311437
|
+
exports.OpaCompiler = void 0;
|
|
311438
|
+
const fs = __importStar(__nccwpck_require__(79896));
|
|
311439
|
+
const path = __importStar(__nccwpck_require__(16928));
|
|
311440
|
+
const os = __importStar(__nccwpck_require__(70857));
|
|
311441
|
+
const crypto = __importStar(__nccwpck_require__(76982));
|
|
311442
|
+
const child_process_1 = __nccwpck_require__(35317);
|
|
311443
|
+
/**
|
|
311444
|
+
* OPA Rego Compiler - compiles .rego policy files to WASM bundles using the `opa` CLI.
|
|
311445
|
+
*
|
|
311446
|
+
* Handles:
|
|
311447
|
+
* - Resolving input paths to WASM bytes (direct .wasm, directory with policy.wasm, or .rego files)
|
|
311448
|
+
* - Compiling .rego files to WASM via `opa build`
|
|
311449
|
+
* - Caching compiled bundles based on content hashes
|
|
311450
|
+
* - Extracting policy.wasm from OPA tar.gz bundles
|
|
311451
|
+
*
|
|
311452
|
+
* Requires:
|
|
311453
|
+
* - `opa` CLI on PATH (only when auto-compiling .rego files)
|
|
311454
|
+
*/
|
|
311455
|
+
class OpaCompiler {
|
|
311456
|
+
static CACHE_DIR = path.join(os.tmpdir(), 'visor-opa-cache');
|
|
311457
|
+
/**
|
|
311458
|
+
* Resolve the input paths to WASM bytes.
|
|
311459
|
+
*
|
|
311460
|
+
* Strategy:
|
|
311461
|
+
* 1. If any path is a .wasm file, read it directly
|
|
311462
|
+
* 2. If a directory contains policy.wasm, read it
|
|
311463
|
+
* 3. Otherwise, collect all .rego files and auto-compile via `opa build`
|
|
311464
|
+
*/
|
|
311465
|
+
async resolveWasmBytes(paths) {
|
|
311466
|
+
// Collect .rego files and check for existing .wasm
|
|
311467
|
+
const regoFiles = [];
|
|
311468
|
+
for (const p of paths) {
|
|
311469
|
+
const resolved = path.resolve(p);
|
|
311470
|
+
// Reject paths containing '..' after resolution (path traversal)
|
|
311471
|
+
if (path.normalize(resolved).includes('..')) {
|
|
311472
|
+
throw new Error(`Policy path contains traversal sequences: ${p}`);
|
|
311473
|
+
}
|
|
311474
|
+
// Direct .wasm file
|
|
311475
|
+
if (resolved.endsWith('.wasm') && fs.existsSync(resolved)) {
|
|
311476
|
+
return fs.readFileSync(resolved);
|
|
311477
|
+
}
|
|
311478
|
+
if (!fs.existsSync(resolved))
|
|
311479
|
+
continue;
|
|
311480
|
+
const stat = fs.statSync(resolved);
|
|
311481
|
+
if (stat.isDirectory()) {
|
|
311482
|
+
// Check for pre-compiled policy.wasm in directory
|
|
311483
|
+
const wasmCandidate = path.join(resolved, 'policy.wasm');
|
|
311484
|
+
if (fs.existsSync(wasmCandidate)) {
|
|
311485
|
+
return fs.readFileSync(wasmCandidate);
|
|
311486
|
+
}
|
|
311487
|
+
// Collect all .rego files from directory
|
|
311488
|
+
const files = fs.readdirSync(resolved);
|
|
311489
|
+
for (const f of files) {
|
|
311490
|
+
if (f.endsWith('.rego')) {
|
|
311491
|
+
regoFiles.push(path.join(resolved, f));
|
|
311492
|
+
}
|
|
311493
|
+
}
|
|
311494
|
+
}
|
|
311495
|
+
else if (resolved.endsWith('.rego')) {
|
|
311496
|
+
regoFiles.push(resolved);
|
|
311497
|
+
}
|
|
311498
|
+
}
|
|
311499
|
+
if (regoFiles.length === 0) {
|
|
311500
|
+
throw new Error(`OPA WASM evaluator: no .wasm bundle or .rego files found in: ${paths.join(', ')}`);
|
|
311501
|
+
}
|
|
311502
|
+
// Auto-compile .rego -> .wasm
|
|
311503
|
+
return this.compileRego(regoFiles);
|
|
311504
|
+
}
|
|
311505
|
+
/**
|
|
311506
|
+
* Auto-compile .rego files to a WASM bundle using the `opa` CLI.
|
|
311507
|
+
*
|
|
311508
|
+
* Caches the compiled bundle based on a content hash of all input .rego files
|
|
311509
|
+
* so subsequent runs skip compilation if policies haven't changed.
|
|
311510
|
+
*/
|
|
311511
|
+
compileRego(regoFiles) {
|
|
311512
|
+
// Check that `opa` CLI is available
|
|
311513
|
+
try {
|
|
311514
|
+
(0, child_process_1.execFileSync)('opa', ['version'], { stdio: 'pipe' });
|
|
311515
|
+
}
|
|
311516
|
+
catch {
|
|
311517
|
+
throw new Error('OPA CLI (`opa`) not found on PATH. Install it from https://www.openpolicyagent.org/docs/latest/#running-opa\n' +
|
|
311518
|
+
'Or pre-compile your .rego files: opa build -t wasm -e visor -o bundle.tar.gz ' +
|
|
311519
|
+
regoFiles.join(' '));
|
|
311520
|
+
}
|
|
311521
|
+
// Compute content hash for cache key
|
|
311522
|
+
const hash = crypto.createHash('sha256');
|
|
311523
|
+
for (const f of regoFiles.sort()) {
|
|
311524
|
+
hash.update(fs.readFileSync(f));
|
|
311525
|
+
hash.update(f); // include filename for disambiguation
|
|
311526
|
+
}
|
|
311527
|
+
const cacheKey = hash.digest('hex').slice(0, 16);
|
|
311528
|
+
const cacheDir = OpaCompiler.CACHE_DIR;
|
|
311529
|
+
const cachedWasm = path.join(cacheDir, `${cacheKey}.wasm`);
|
|
311530
|
+
// Return cached bundle if still valid
|
|
311531
|
+
if (fs.existsSync(cachedWasm)) {
|
|
311532
|
+
return fs.readFileSync(cachedWasm);
|
|
311533
|
+
}
|
|
311534
|
+
// Compile to WASM via opa build
|
|
311535
|
+
fs.mkdirSync(cacheDir, { recursive: true });
|
|
311536
|
+
const bundleTar = path.join(cacheDir, `${cacheKey}-bundle.tar.gz`);
|
|
311537
|
+
try {
|
|
311538
|
+
const args = [
|
|
311539
|
+
'build',
|
|
311540
|
+
'-t',
|
|
311541
|
+
'wasm',
|
|
311542
|
+
'-e',
|
|
311543
|
+
'visor', // entrypoint: the visor package tree
|
|
311544
|
+
'-o',
|
|
311545
|
+
bundleTar,
|
|
311546
|
+
...regoFiles,
|
|
311547
|
+
];
|
|
311548
|
+
(0, child_process_1.execFileSync)('opa', args, {
|
|
311549
|
+
stdio: 'pipe',
|
|
311550
|
+
timeout: 30000,
|
|
311551
|
+
});
|
|
311552
|
+
}
|
|
311553
|
+
catch (err) {
|
|
311554
|
+
const stderr = err?.stderr?.toString() || '';
|
|
311555
|
+
throw new Error(`Failed to compile .rego files to WASM:\n${stderr}\n` +
|
|
311556
|
+
'Ensure your .rego files are valid and the `opa` CLI is installed.');
|
|
311557
|
+
}
|
|
311558
|
+
// Extract policy.wasm from the tar.gz bundle
|
|
311559
|
+
// OPA bundles are tar.gz with /policy.wasm inside
|
|
311560
|
+
try {
|
|
311561
|
+
(0, child_process_1.execFileSync)('tar', ['-xzf', bundleTar, '-C', cacheDir, '/policy.wasm'], {
|
|
311562
|
+
stdio: 'pipe',
|
|
311563
|
+
});
|
|
311564
|
+
const extractedWasm = path.join(cacheDir, 'policy.wasm');
|
|
311565
|
+
if (fs.existsSync(extractedWasm)) {
|
|
311566
|
+
// Move to cache-key named file
|
|
311567
|
+
fs.renameSync(extractedWasm, cachedWasm);
|
|
311568
|
+
}
|
|
311569
|
+
}
|
|
311570
|
+
catch {
|
|
311571
|
+
// Some tar implementations don't like leading /
|
|
311572
|
+
try {
|
|
311573
|
+
(0, child_process_1.execFileSync)('tar', ['-xzf', bundleTar, '-C', cacheDir, 'policy.wasm'], {
|
|
311574
|
+
stdio: 'pipe',
|
|
311575
|
+
});
|
|
311576
|
+
const extractedWasm = path.join(cacheDir, 'policy.wasm');
|
|
311577
|
+
if (fs.existsSync(extractedWasm)) {
|
|
311578
|
+
fs.renameSync(extractedWasm, cachedWasm);
|
|
311579
|
+
}
|
|
311580
|
+
}
|
|
311581
|
+
catch (err2) {
|
|
311582
|
+
throw new Error(`Failed to extract policy.wasm from OPA bundle: ${err2?.message || err2}`);
|
|
311583
|
+
}
|
|
311584
|
+
}
|
|
311585
|
+
// Clean up tar
|
|
311586
|
+
try {
|
|
311587
|
+
fs.unlinkSync(bundleTar);
|
|
311588
|
+
}
|
|
311589
|
+
catch { }
|
|
311590
|
+
if (!fs.existsSync(cachedWasm)) {
|
|
311591
|
+
throw new Error('OPA build succeeded but policy.wasm was not found in the bundle');
|
|
311592
|
+
}
|
|
311593
|
+
return fs.readFileSync(cachedWasm);
|
|
311594
|
+
}
|
|
311595
|
+
}
|
|
311596
|
+
exports.OpaCompiler = OpaCompiler;
|
|
311597
|
+
|
|
311598
|
+
|
|
311599
|
+
/***/ }),
|
|
311600
|
+
|
|
311601
|
+
/***/ 44693:
|
|
311602
|
+
/***/ ((__unused_webpack_module, exports) => {
|
|
311603
|
+
|
|
311604
|
+
"use strict";
|
|
311605
|
+
|
|
311606
|
+
/**
|
|
311607
|
+
* Copyright (c) ProbeLabs. All rights reserved.
|
|
311608
|
+
* Licensed under the Elastic License 2.0; you may not use this file except
|
|
311609
|
+
* in compliance with the Elastic License 2.0.
|
|
311610
|
+
*/
|
|
311611
|
+
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
311612
|
+
exports.OpaHttpEvaluator = void 0;
|
|
311613
|
+
/**
|
|
311614
|
+
* OPA HTTP Evaluator - evaluates policies via an external OPA server's REST API.
|
|
311615
|
+
*
|
|
311616
|
+
* Uses the built-in `fetch` API (Node 18+), so no extra dependencies are needed.
|
|
311617
|
+
*/
|
|
311618
|
+
class OpaHttpEvaluator {
|
|
311619
|
+
baseUrl;
|
|
311620
|
+
timeout;
|
|
311621
|
+
constructor(baseUrl, timeout = 5000) {
|
|
311622
|
+
// Validate URL format and protocol
|
|
311623
|
+
let parsed;
|
|
311624
|
+
try {
|
|
311625
|
+
parsed = new URL(baseUrl);
|
|
311626
|
+
}
|
|
311627
|
+
catch {
|
|
311628
|
+
throw new Error(`OPA HTTP evaluator: invalid URL: ${baseUrl}`);
|
|
311629
|
+
}
|
|
311630
|
+
if (!['http:', 'https:'].includes(parsed.protocol)) {
|
|
311631
|
+
throw new Error(`OPA HTTP evaluator: url must use http:// or https:// protocol, got: ${baseUrl}`);
|
|
311632
|
+
}
|
|
311633
|
+
// Block cloud metadata, loopback, link-local, and private network addresses
|
|
311634
|
+
const hostname = parsed.hostname;
|
|
311635
|
+
if (this.isBlockedHostname(hostname)) {
|
|
311636
|
+
throw new Error(`OPA HTTP evaluator: url must not point to internal, loopback, or private network addresses`);
|
|
311637
|
+
}
|
|
311638
|
+
// Normalize: strip trailing slash
|
|
311639
|
+
this.baseUrl = baseUrl.replace(/\/+$/, '');
|
|
311640
|
+
this.timeout = timeout;
|
|
311641
|
+
}
|
|
311642
|
+
/**
|
|
311643
|
+
* Check if a hostname is blocked due to SSRF concerns.
|
|
311644
|
+
*
|
|
311645
|
+
* Blocks:
|
|
311646
|
+
* - Loopback addresses (127.x.x.x, localhost, 0.0.0.0, ::1)
|
|
311647
|
+
* - Link-local addresses (169.254.x.x)
|
|
311648
|
+
* - Private networks (10.x.x.x, 172.16-31.x.x, 192.168.x.x)
|
|
311649
|
+
* - IPv6 unique local addresses (fd00::/8)
|
|
311650
|
+
* - Cloud metadata services (*.internal)
|
|
311651
|
+
*/
|
|
311652
|
+
isBlockedHostname(hostname) {
|
|
311653
|
+
if (!hostname)
|
|
311654
|
+
return true; // block empty hostnames
|
|
311655
|
+
// Normalize hostname: lowercase and remove brackets for IPv6
|
|
311656
|
+
const normalized = hostname.toLowerCase().replace(/^\[|\]$/g, '');
|
|
311657
|
+
// Block .internal domains (cloud metadata services)
|
|
311658
|
+
if (normalized === 'metadata.google.internal' || normalized.endsWith('.internal')) {
|
|
311659
|
+
return true;
|
|
311660
|
+
}
|
|
311661
|
+
// Block localhost variants
|
|
311662
|
+
if (normalized === 'localhost' || normalized === 'localhost.localdomain') {
|
|
311663
|
+
return true;
|
|
311664
|
+
}
|
|
311665
|
+
// Block IPv6 loopback
|
|
311666
|
+
if (normalized === '::1' || normalized === '0:0:0:0:0:0:0:1') {
|
|
311667
|
+
return true;
|
|
311668
|
+
}
|
|
311669
|
+
// Check IPv4 patterns
|
|
311670
|
+
const ipv4Pattern = /^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})$/;
|
|
311671
|
+
const ipv4Match = normalized.match(ipv4Pattern);
|
|
311672
|
+
if (ipv4Match) {
|
|
311673
|
+
const octets = ipv4Match.slice(1, 5).map(Number);
|
|
311674
|
+
// Validate octets are in range [0, 255]
|
|
311675
|
+
if (octets.some(octet => octet > 255)) {
|
|
311676
|
+
return false;
|
|
311677
|
+
}
|
|
311678
|
+
const [a, b] = octets;
|
|
311679
|
+
// Block loopback: 127.0.0.0/8
|
|
311680
|
+
if (a === 127) {
|
|
311681
|
+
return true;
|
|
311682
|
+
}
|
|
311683
|
+
// Block 0.0.0.0/8 (this host)
|
|
311684
|
+
if (a === 0) {
|
|
311685
|
+
return true;
|
|
311686
|
+
}
|
|
311687
|
+
// Block link-local: 169.254.0.0/16
|
|
311688
|
+
if (a === 169 && b === 254) {
|
|
311689
|
+
return true;
|
|
311690
|
+
}
|
|
311691
|
+
// Block private networks
|
|
311692
|
+
// 10.0.0.0/8
|
|
311693
|
+
if (a === 10) {
|
|
311694
|
+
return true;
|
|
311695
|
+
}
|
|
311696
|
+
// 172.16.0.0/12 (172.16.x.x through 172.31.x.x)
|
|
311697
|
+
if (a === 172 && b >= 16 && b <= 31) {
|
|
311698
|
+
return true;
|
|
311699
|
+
}
|
|
311700
|
+
// 192.168.0.0/16
|
|
311701
|
+
if (a === 192 && b === 168) {
|
|
311702
|
+
return true;
|
|
311703
|
+
}
|
|
311704
|
+
}
|
|
311705
|
+
// Check IPv6 patterns
|
|
311706
|
+
// Block unique local addresses: fd00::/8
|
|
311707
|
+
if (normalized.startsWith('fd') || normalized.startsWith('fc')) {
|
|
311708
|
+
return true;
|
|
311709
|
+
}
|
|
311710
|
+
// Block link-local: fe80::/10
|
|
311711
|
+
if (normalized.startsWith('fe80:')) {
|
|
311712
|
+
return true;
|
|
311713
|
+
}
|
|
311714
|
+
return false;
|
|
311715
|
+
}
|
|
311716
|
+
/**
|
|
311717
|
+
* Evaluate a policy rule against an input document via OPA REST API.
|
|
311718
|
+
*
|
|
311719
|
+
* @param input - The input document to evaluate
|
|
311720
|
+
* @param rulePath - OPA rule path (e.g., 'visor/check/execute')
|
|
311721
|
+
* @returns The result object from OPA, or undefined on error
|
|
311722
|
+
*/
|
|
311723
|
+
async evaluate(input, rulePath) {
|
|
311724
|
+
// OPA Data API: POST /v1/data/<path>
|
|
311725
|
+
const encodedPath = rulePath
|
|
311726
|
+
.split('/')
|
|
311727
|
+
.map(s => encodeURIComponent(s))
|
|
311728
|
+
.join('/');
|
|
311729
|
+
const url = `${this.baseUrl}/v1/data/${encodedPath}`;
|
|
311730
|
+
const controller = new AbortController();
|
|
311731
|
+
const timer = setTimeout(() => controller.abort(), this.timeout);
|
|
311732
|
+
try {
|
|
311733
|
+
const response = await fetch(url, {
|
|
311734
|
+
method: 'POST',
|
|
311735
|
+
headers: { 'Content-Type': 'application/json' },
|
|
311736
|
+
body: JSON.stringify({ input }),
|
|
311737
|
+
signal: controller.signal,
|
|
311738
|
+
});
|
|
311739
|
+
if (!response.ok) {
|
|
311740
|
+
throw new Error(`OPA HTTP ${response.status}: ${response.statusText}`);
|
|
311741
|
+
}
|
|
311742
|
+
let body;
|
|
311743
|
+
try {
|
|
311744
|
+
body = await response.json();
|
|
311745
|
+
}
|
|
311746
|
+
catch (jsonErr) {
|
|
311747
|
+
throw new Error(`OPA HTTP evaluator: failed to parse JSON response: ${jsonErr instanceof Error ? jsonErr.message : String(jsonErr)}`);
|
|
311748
|
+
}
|
|
311749
|
+
// OPA returns { result: { ... } }
|
|
311750
|
+
return body?.result;
|
|
311751
|
+
}
|
|
311752
|
+
finally {
|
|
311753
|
+
clearTimeout(timer);
|
|
311754
|
+
}
|
|
311755
|
+
}
|
|
311756
|
+
async shutdown() {
|
|
311757
|
+
// No persistent connections to close
|
|
311758
|
+
}
|
|
311759
|
+
}
|
|
311760
|
+
exports.OpaHttpEvaluator = OpaHttpEvaluator;
|
|
311761
|
+
|
|
311762
|
+
|
|
311763
|
+
/***/ }),
|
|
311764
|
+
|
|
311765
|
+
/***/ 39530:
|
|
311766
|
+
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
311767
|
+
|
|
311768
|
+
"use strict";
|
|
311769
|
+
|
|
311770
|
+
/**
|
|
311771
|
+
* Copyright (c) ProbeLabs. All rights reserved.
|
|
311772
|
+
* Licensed under the Elastic License 2.0; you may not use this file except
|
|
311773
|
+
* in compliance with the Elastic License 2.0.
|
|
311774
|
+
*/
|
|
311775
|
+
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
311776
|
+
exports.OpaPolicyEngine = void 0;
|
|
311777
|
+
const opa_wasm_evaluator_1 = __nccwpck_require__(8613);
|
|
311778
|
+
const opa_http_evaluator_1 = __nccwpck_require__(44693);
|
|
311779
|
+
const policy_input_builder_1 = __nccwpck_require__(17117);
|
|
311780
|
+
/**
|
|
311781
|
+
* Enterprise OPA Policy Engine.
|
|
311782
|
+
*
|
|
311783
|
+
* Wraps both WASM (local) and HTTP (remote) OPA evaluators behind the
|
|
311784
|
+
* OSS PolicyEngine interface. All OPA input building and role resolution
|
|
311785
|
+
* is handled internally — the OSS call sites pass only plain types.
|
|
311786
|
+
*/
|
|
311787
|
+
class OpaPolicyEngine {
|
|
311788
|
+
evaluator = null;
|
|
311789
|
+
fallback;
|
|
311790
|
+
timeout;
|
|
311791
|
+
config;
|
|
311792
|
+
inputBuilder = null;
|
|
311793
|
+
logger = null;
|
|
311794
|
+
constructor(config) {
|
|
311795
|
+
this.config = config;
|
|
311796
|
+
this.fallback = config.fallback || 'deny';
|
|
311797
|
+
this.timeout = config.timeout || 5000;
|
|
311798
|
+
}
|
|
311799
|
+
async initialize(config) {
|
|
311800
|
+
// Resolve logger once at initialization
|
|
311801
|
+
try {
|
|
311802
|
+
this.logger = (__nccwpck_require__(86999).logger);
|
|
311803
|
+
}
|
|
311804
|
+
catch {
|
|
311805
|
+
// logger not available in this context
|
|
311806
|
+
}
|
|
311807
|
+
// Build actor/repo context from environment (available at engine init time)
|
|
311808
|
+
const actor = {
|
|
311809
|
+
authorAssociation: process.env.VISOR_AUTHOR_ASSOCIATION,
|
|
311810
|
+
login: process.env.VISOR_AUTHOR_LOGIN || process.env.GITHUB_ACTOR,
|
|
311811
|
+
isLocalMode: !process.env.GITHUB_ACTIONS,
|
|
311812
|
+
};
|
|
311813
|
+
const repo = {
|
|
311814
|
+
owner: process.env.GITHUB_REPOSITORY_OWNER,
|
|
311815
|
+
name: process.env.GITHUB_REPOSITORY?.split('/')[1],
|
|
311816
|
+
branch: process.env.GITHUB_HEAD_REF,
|
|
311817
|
+
baseBranch: process.env.GITHUB_BASE_REF,
|
|
311818
|
+
event: process.env.GITHUB_EVENT_NAME,
|
|
311819
|
+
};
|
|
311820
|
+
const prNum = process.env.GITHUB_PR_NUMBER
|
|
311821
|
+
? parseInt(process.env.GITHUB_PR_NUMBER, 10)
|
|
311822
|
+
: undefined;
|
|
311823
|
+
const pullRequest = {
|
|
311824
|
+
number: prNum !== undefined && Number.isFinite(prNum) ? prNum : undefined,
|
|
311825
|
+
};
|
|
311826
|
+
this.inputBuilder = new policy_input_builder_1.PolicyInputBuilder(config, actor, repo, pullRequest);
|
|
311827
|
+
if (config.engine === 'local') {
|
|
311828
|
+
if (!config.rules) {
|
|
311829
|
+
throw new Error('OPA local mode requires `policy.rules` path to .wasm or .rego files');
|
|
311830
|
+
}
|
|
311831
|
+
const wasm = new opa_wasm_evaluator_1.OpaWasmEvaluator();
|
|
311832
|
+
await wasm.initialize(config.rules);
|
|
311833
|
+
if (config.data) {
|
|
311834
|
+
wasm.loadData(config.data);
|
|
311835
|
+
}
|
|
311836
|
+
this.evaluator = wasm;
|
|
311837
|
+
}
|
|
311838
|
+
else if (config.engine === 'remote') {
|
|
311839
|
+
if (!config.url) {
|
|
311840
|
+
throw new Error('OPA remote mode requires `policy.url` pointing to OPA server');
|
|
311841
|
+
}
|
|
311842
|
+
this.evaluator = new opa_http_evaluator_1.OpaHttpEvaluator(config.url, this.timeout);
|
|
311843
|
+
}
|
|
311844
|
+
else {
|
|
311845
|
+
this.evaluator = null;
|
|
311846
|
+
}
|
|
311847
|
+
}
|
|
311848
|
+
/**
|
|
311849
|
+
* Update actor/repo/PR context (e.g., after PR info becomes available).
|
|
311850
|
+
* Called by the enterprise loader when engine context is enriched.
|
|
311851
|
+
*/
|
|
311852
|
+
setActorContext(actor, repo, pullRequest) {
|
|
311853
|
+
this.inputBuilder = new policy_input_builder_1.PolicyInputBuilder(this.config, actor, repo, pullRequest);
|
|
311854
|
+
}
|
|
311855
|
+
async evaluateCheckExecution(checkId, checkConfig) {
|
|
311856
|
+
if (!this.evaluator || !this.inputBuilder)
|
|
311857
|
+
return { allowed: true };
|
|
311858
|
+
const cfg = checkConfig && typeof checkConfig === 'object'
|
|
311859
|
+
? checkConfig
|
|
311860
|
+
: {};
|
|
311861
|
+
const policyOverride = cfg.policy;
|
|
311862
|
+
const input = this.inputBuilder.forCheckExecution({
|
|
311863
|
+
id: checkId,
|
|
311864
|
+
type: cfg.type || 'ai',
|
|
311865
|
+
group: cfg.group,
|
|
311866
|
+
tags: cfg.tags,
|
|
311867
|
+
criticality: cfg.criticality,
|
|
311868
|
+
sandbox: cfg.sandbox,
|
|
311869
|
+
policy: policyOverride,
|
|
311870
|
+
});
|
|
311871
|
+
return this.doEvaluate(input, this.resolveRulePath('check.execute', policyOverride?.rule));
|
|
311872
|
+
}
|
|
311873
|
+
async evaluateToolInvocation(serverName, methodName, transport) {
|
|
311874
|
+
if (!this.evaluator || !this.inputBuilder)
|
|
311875
|
+
return { allowed: true };
|
|
311876
|
+
const input = this.inputBuilder.forToolInvocation(serverName, methodName, transport);
|
|
311877
|
+
return this.doEvaluate(input, 'visor/tool/invoke');
|
|
311878
|
+
}
|
|
311879
|
+
async evaluateCapabilities(checkId, capabilities) {
|
|
311880
|
+
if (!this.evaluator || !this.inputBuilder)
|
|
311881
|
+
return { allowed: true };
|
|
311882
|
+
const input = this.inputBuilder.forCapabilityResolve(checkId, capabilities);
|
|
311883
|
+
return this.doEvaluate(input, 'visor/capability/resolve');
|
|
311884
|
+
}
|
|
311885
|
+
async shutdown() {
|
|
311886
|
+
if (this.evaluator && 'shutdown' in this.evaluator) {
|
|
311887
|
+
await this.evaluator.shutdown();
|
|
311888
|
+
}
|
|
311889
|
+
this.evaluator = null;
|
|
311890
|
+
this.inputBuilder = null;
|
|
311891
|
+
}
|
|
311892
|
+
resolveRulePath(defaultScope, override) {
|
|
311893
|
+
if (override) {
|
|
311894
|
+
return override.startsWith('visor/') ? override : `visor/${override}`;
|
|
311895
|
+
}
|
|
311896
|
+
return `visor/${defaultScope.replace(/\./g, '/')}`;
|
|
311897
|
+
}
|
|
311898
|
+
async doEvaluate(input, rulePath) {
|
|
311899
|
+
try {
|
|
311900
|
+
this.logger?.debug(`[PolicyEngine] Evaluating ${rulePath}`, JSON.stringify(input));
|
|
311901
|
+
let timer;
|
|
311902
|
+
const timeoutPromise = new Promise((_resolve, reject) => {
|
|
311903
|
+
timer = setTimeout(() => reject(new Error('policy evaluation timeout')), this.timeout);
|
|
311904
|
+
});
|
|
311905
|
+
try {
|
|
311906
|
+
const result = await Promise.race([this.rawEvaluate(input, rulePath), timeoutPromise]);
|
|
311907
|
+
const decision = this.parseDecision(result);
|
|
311908
|
+
// In warn mode, override denied decisions to allowed but flag as warn
|
|
311909
|
+
if (!decision.allowed && this.fallback === 'warn') {
|
|
311910
|
+
decision.allowed = true;
|
|
311911
|
+
decision.warn = true;
|
|
311912
|
+
decision.reason = `audit: ${decision.reason || 'policy denied'}`;
|
|
311913
|
+
}
|
|
311914
|
+
this.logger?.debug(`[PolicyEngine] Decision for ${rulePath}: allowed=${decision.allowed}, warn=${decision.warn || false}, reason=${decision.reason || 'none'}`);
|
|
311915
|
+
return decision;
|
|
311916
|
+
}
|
|
311917
|
+
finally {
|
|
311918
|
+
if (timer)
|
|
311919
|
+
clearTimeout(timer);
|
|
311920
|
+
}
|
|
311921
|
+
}
|
|
311922
|
+
catch (err) {
|
|
311923
|
+
const msg = err instanceof Error ? err.message : String(err);
|
|
311924
|
+
this.logger?.warn(`[PolicyEngine] Evaluation failed for ${rulePath}: ${msg}`);
|
|
311925
|
+
return {
|
|
311926
|
+
allowed: this.fallback === 'allow' || this.fallback === 'warn',
|
|
311927
|
+
warn: this.fallback === 'warn' ? true : undefined,
|
|
311928
|
+
reason: `policy evaluation failed, fallback=${this.fallback}`,
|
|
311929
|
+
};
|
|
311930
|
+
}
|
|
311931
|
+
}
|
|
311932
|
+
async rawEvaluate(input, rulePath) {
|
|
311933
|
+
if (this.evaluator instanceof opa_wasm_evaluator_1.OpaWasmEvaluator) {
|
|
311934
|
+
const result = await this.evaluator.evaluate(input);
|
|
311935
|
+
// WASM compiled with `-e visor` entrypoint returns the full visor package tree.
|
|
311936
|
+
// Navigate to the specific rule subtree using rulePath segments.
|
|
311937
|
+
// e.g., 'visor/check/execute' → result.check.execute
|
|
311938
|
+
return this.navigateWasmResult(result, rulePath);
|
|
311939
|
+
}
|
|
311940
|
+
return this.evaluator.evaluate(input, rulePath);
|
|
311941
|
+
}
|
|
311942
|
+
/**
|
|
311943
|
+
* Navigate nested OPA WASM result tree to reach the specific rule's output.
|
|
311944
|
+
* The WASM entrypoint `-e visor` means the result root IS the visor package,
|
|
311945
|
+
* so we strip the `visor/` prefix and walk the remaining segments.
|
|
311946
|
+
*/
|
|
311947
|
+
navigateWasmResult(result, rulePath) {
|
|
311948
|
+
if (!result || typeof result !== 'object')
|
|
311949
|
+
return result;
|
|
311950
|
+
// Strip the 'visor/' prefix (matches our compilation entrypoint)
|
|
311951
|
+
const segments = rulePath.replace(/^visor\//, '').split('/');
|
|
311952
|
+
let current = result;
|
|
311953
|
+
for (const seg of segments) {
|
|
311954
|
+
if (current && typeof current === 'object' && seg in current) {
|
|
311955
|
+
current = current[seg];
|
|
311956
|
+
}
|
|
311957
|
+
else {
|
|
311958
|
+
return undefined; // path not found in result tree
|
|
311959
|
+
}
|
|
311960
|
+
}
|
|
311961
|
+
return current;
|
|
311962
|
+
}
|
|
311963
|
+
parseDecision(result) {
|
|
311964
|
+
if (result === undefined || result === null) {
|
|
311965
|
+
return {
|
|
311966
|
+
allowed: this.fallback === 'allow' || this.fallback === 'warn',
|
|
311967
|
+
warn: this.fallback === 'warn' ? true : undefined,
|
|
311968
|
+
reason: this.fallback === 'warn' ? 'audit: no policy result' : 'no policy result',
|
|
311969
|
+
};
|
|
311970
|
+
}
|
|
311971
|
+
const allowed = result.allowed !== false;
|
|
311972
|
+
const decision = {
|
|
311973
|
+
allowed,
|
|
311974
|
+
reason: result.reason,
|
|
311975
|
+
};
|
|
311976
|
+
if (result.capabilities) {
|
|
311977
|
+
decision.capabilities = result.capabilities;
|
|
311978
|
+
}
|
|
311979
|
+
return decision;
|
|
311980
|
+
}
|
|
311981
|
+
}
|
|
311982
|
+
exports.OpaPolicyEngine = OpaPolicyEngine;
|
|
311983
|
+
|
|
311984
|
+
|
|
311985
|
+
/***/ }),
|
|
311986
|
+
|
|
311987
|
+
/***/ 8613:
|
|
311988
|
+
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
311989
|
+
|
|
311990
|
+
"use strict";
|
|
311991
|
+
|
|
311992
|
+
/**
|
|
311993
|
+
* Copyright (c) ProbeLabs. All rights reserved.
|
|
311994
|
+
* Licensed under the Elastic License 2.0; you may not use this file except
|
|
311995
|
+
* in compliance with the Elastic License 2.0.
|
|
311996
|
+
*/
|
|
311997
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
311998
|
+
if (k2 === undefined) k2 = k;
|
|
311999
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
312000
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
312001
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
312002
|
+
}
|
|
312003
|
+
Object.defineProperty(o, k2, desc);
|
|
312004
|
+
}) : (function(o, m, k, k2) {
|
|
312005
|
+
if (k2 === undefined) k2 = k;
|
|
312006
|
+
o[k2] = m[k];
|
|
312007
|
+
}));
|
|
312008
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
312009
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
312010
|
+
}) : function(o, v) {
|
|
312011
|
+
o["default"] = v;
|
|
312012
|
+
});
|
|
312013
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
312014
|
+
var ownKeys = function(o) {
|
|
312015
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
312016
|
+
var ar = [];
|
|
312017
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
312018
|
+
return ar;
|
|
312019
|
+
};
|
|
312020
|
+
return ownKeys(o);
|
|
312021
|
+
};
|
|
312022
|
+
return function (mod) {
|
|
312023
|
+
if (mod && mod.__esModule) return mod;
|
|
312024
|
+
var result = {};
|
|
312025
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
312026
|
+
__setModuleDefault(result, mod);
|
|
312027
|
+
return result;
|
|
312028
|
+
};
|
|
312029
|
+
})();
|
|
312030
|
+
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
312031
|
+
exports.OpaWasmEvaluator = void 0;
|
|
312032
|
+
const fs = __importStar(__nccwpck_require__(79896));
|
|
312033
|
+
const path = __importStar(__nccwpck_require__(16928));
|
|
312034
|
+
const opa_compiler_1 = __nccwpck_require__(628);
|
|
312035
|
+
/**
|
|
312036
|
+
* OPA WASM Evaluator - loads and evaluates OPA policies locally.
|
|
312037
|
+
*
|
|
312038
|
+
* Supports three input formats:
|
|
312039
|
+
* 1. Pre-compiled `.wasm` bundle — loaded directly (fastest startup)
|
|
312040
|
+
* 2. `.rego` files or directory — auto-compiled to WASM via `opa build` CLI
|
|
312041
|
+
* 3. Directory with `policy.wasm` inside — loaded directly
|
|
312042
|
+
*
|
|
312043
|
+
* Compilation and caching of .rego files is delegated to {@link OpaCompiler}.
|
|
312044
|
+
*
|
|
312045
|
+
* Requires:
|
|
312046
|
+
* - `@open-policy-agent/opa-wasm` npm package (optional dep)
|
|
312047
|
+
* - `opa` CLI on PATH (only when auto-compiling .rego files)
|
|
312048
|
+
*/
|
|
312049
|
+
class OpaWasmEvaluator {
|
|
312050
|
+
policy = null;
|
|
312051
|
+
dataDocument = {};
|
|
312052
|
+
compiler = new opa_compiler_1.OpaCompiler();
|
|
312053
|
+
async initialize(rulesPath) {
|
|
312054
|
+
const paths = Array.isArray(rulesPath) ? rulesPath : [rulesPath];
|
|
312055
|
+
const wasmBytes = await this.compiler.resolveWasmBytes(paths);
|
|
312056
|
+
try {
|
|
312057
|
+
// Use createRequire to load the optional dep at runtime without ncc bundling it.
|
|
312058
|
+
// `new Function('id', 'return require(id)')` fails in ncc bundles because
|
|
312059
|
+
// `require` is not in the `new Function` scope. `createRequire` works correctly
|
|
312060
|
+
// because it creates a real Node.js require rooted at the given path.
|
|
312061
|
+
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
|
312062
|
+
const { createRequire } = __nccwpck_require__(73339);
|
|
312063
|
+
const runtimeRequire = createRequire(__filename);
|
|
312064
|
+
const opaWasm = runtimeRequire('@open-policy-agent/opa-wasm');
|
|
312065
|
+
const loadPolicy = opaWasm.loadPolicy || opaWasm.default?.loadPolicy;
|
|
312066
|
+
if (!loadPolicy) {
|
|
312067
|
+
throw new Error('loadPolicy not found in @open-policy-agent/opa-wasm');
|
|
312068
|
+
}
|
|
312069
|
+
this.policy = await loadPolicy(wasmBytes);
|
|
312070
|
+
}
|
|
312071
|
+
catch (err) {
|
|
312072
|
+
if (err?.code === 'MODULE_NOT_FOUND' || err?.code === 'ERR_MODULE_NOT_FOUND') {
|
|
312073
|
+
throw new Error('OPA WASM evaluator requires @open-policy-agent/opa-wasm. ' +
|
|
312074
|
+
'Install it with: npm install @open-policy-agent/opa-wasm');
|
|
312075
|
+
}
|
|
312076
|
+
throw err;
|
|
312077
|
+
}
|
|
312078
|
+
}
|
|
312079
|
+
/**
|
|
312080
|
+
* Load external data from a JSON file to use as the OPA data document.
|
|
312081
|
+
* The loaded data will be passed to `policy.setData()` during evaluation,
|
|
312082
|
+
* making it available in Rego via `data.<key>`.
|
|
312083
|
+
*/
|
|
312084
|
+
loadData(dataPath) {
|
|
312085
|
+
const resolved = path.resolve(dataPath);
|
|
312086
|
+
if (path.normalize(resolved).includes('..')) {
|
|
312087
|
+
throw new Error(`Data path contains traversal sequences: ${dataPath}`);
|
|
312088
|
+
}
|
|
312089
|
+
if (!fs.existsSync(resolved)) {
|
|
312090
|
+
throw new Error(`OPA data file not found: ${resolved}`);
|
|
312091
|
+
}
|
|
312092
|
+
const stat = fs.statSync(resolved);
|
|
312093
|
+
if (stat.size > 10 * 1024 * 1024) {
|
|
312094
|
+
throw new Error(`OPA data file exceeds 10MB limit: ${resolved} (${stat.size} bytes)`);
|
|
312095
|
+
}
|
|
312096
|
+
const raw = fs.readFileSync(resolved, 'utf-8');
|
|
312097
|
+
try {
|
|
312098
|
+
const parsed = JSON.parse(raw);
|
|
312099
|
+
if (typeof parsed !== 'object' || parsed === null || Array.isArray(parsed)) {
|
|
312100
|
+
throw new Error('OPA data file must contain a JSON object (not an array or primitive)');
|
|
312101
|
+
}
|
|
312102
|
+
this.dataDocument = parsed;
|
|
312103
|
+
}
|
|
312104
|
+
catch (err) {
|
|
312105
|
+
if (err.message.startsWith('OPA data file must')) {
|
|
312106
|
+
throw err;
|
|
312107
|
+
}
|
|
312108
|
+
throw new Error(`Failed to parse OPA data file ${resolved}: ${err.message}`);
|
|
312109
|
+
}
|
|
312110
|
+
}
|
|
312111
|
+
async evaluate(input) {
|
|
312112
|
+
if (!this.policy) {
|
|
312113
|
+
throw new Error('OPA WASM evaluator not initialized');
|
|
312114
|
+
}
|
|
312115
|
+
this.policy.setData(this.dataDocument);
|
|
312116
|
+
const resultSet = this.policy.evaluate(input);
|
|
312117
|
+
if (Array.isArray(resultSet) && resultSet.length > 0) {
|
|
312118
|
+
return resultSet[0].result;
|
|
312119
|
+
}
|
|
312120
|
+
return undefined;
|
|
312121
|
+
}
|
|
312122
|
+
async shutdown() {
|
|
312123
|
+
if (this.policy) {
|
|
312124
|
+
// opa-wasm policy objects may have a close/free method for WASM cleanup
|
|
312125
|
+
if (typeof this.policy.close === 'function') {
|
|
312126
|
+
try {
|
|
312127
|
+
this.policy.close();
|
|
312128
|
+
}
|
|
312129
|
+
catch { }
|
|
312130
|
+
}
|
|
312131
|
+
else if (typeof this.policy.free === 'function') {
|
|
312132
|
+
try {
|
|
312133
|
+
this.policy.free();
|
|
312134
|
+
}
|
|
312135
|
+
catch { }
|
|
312136
|
+
}
|
|
312137
|
+
}
|
|
312138
|
+
this.policy = null;
|
|
312139
|
+
}
|
|
312140
|
+
}
|
|
312141
|
+
exports.OpaWasmEvaluator = OpaWasmEvaluator;
|
|
312142
|
+
|
|
312143
|
+
|
|
312144
|
+
/***/ }),
|
|
312145
|
+
|
|
312146
|
+
/***/ 17117:
|
|
312147
|
+
/***/ ((__unused_webpack_module, exports) => {
|
|
312148
|
+
|
|
312149
|
+
"use strict";
|
|
312150
|
+
|
|
312151
|
+
/**
|
|
312152
|
+
* Copyright (c) ProbeLabs. All rights reserved.
|
|
312153
|
+
* Licensed under the Elastic License 2.0; you may not use this file except
|
|
312154
|
+
* in compliance with the Elastic License 2.0.
|
|
312155
|
+
*/
|
|
312156
|
+
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
312157
|
+
exports.PolicyInputBuilder = void 0;
|
|
312158
|
+
/**
|
|
312159
|
+
* Builds OPA-compatible input documents from engine context.
|
|
312160
|
+
*
|
|
312161
|
+
* Resolves actor roles from the `policy.roles` config section by matching
|
|
312162
|
+
* the actor's authorAssociation and login against role definitions.
|
|
312163
|
+
*/
|
|
312164
|
+
class PolicyInputBuilder {
|
|
312165
|
+
roles;
|
|
312166
|
+
actor;
|
|
312167
|
+
repository;
|
|
312168
|
+
pullRequest;
|
|
312169
|
+
constructor(policyConfig, actor, repository, pullRequest) {
|
|
312170
|
+
this.roles = policyConfig.roles || {};
|
|
312171
|
+
this.actor = actor;
|
|
312172
|
+
this.repository = repository;
|
|
312173
|
+
this.pullRequest = pullRequest;
|
|
312174
|
+
}
|
|
312175
|
+
/** Resolve which roles apply to the current actor. */
|
|
312176
|
+
resolveRoles() {
|
|
312177
|
+
const matched = [];
|
|
312178
|
+
for (const [roleName, roleConfig] of Object.entries(this.roles)) {
|
|
312179
|
+
let identityMatch = false;
|
|
312180
|
+
if (roleConfig.author_association &&
|
|
312181
|
+
this.actor.authorAssociation &&
|
|
312182
|
+
roleConfig.author_association.includes(this.actor.authorAssociation)) {
|
|
312183
|
+
identityMatch = true;
|
|
312184
|
+
}
|
|
312185
|
+
if (!identityMatch &&
|
|
312186
|
+
roleConfig.users &&
|
|
312187
|
+
this.actor.login &&
|
|
312188
|
+
roleConfig.users.includes(this.actor.login)) {
|
|
312189
|
+
identityMatch = true;
|
|
312190
|
+
}
|
|
312191
|
+
// Slack user ID match
|
|
312192
|
+
if (!identityMatch &&
|
|
312193
|
+
roleConfig.slack_users &&
|
|
312194
|
+
this.actor.slack?.userId &&
|
|
312195
|
+
roleConfig.slack_users.includes(this.actor.slack.userId)) {
|
|
312196
|
+
identityMatch = true;
|
|
312197
|
+
}
|
|
312198
|
+
// Email match (case-insensitive)
|
|
312199
|
+
if (!identityMatch && roleConfig.emails && this.actor.slack?.email) {
|
|
312200
|
+
const actorEmail = this.actor.slack.email.toLowerCase();
|
|
312201
|
+
if (roleConfig.emails.some(e => e.toLowerCase() === actorEmail)) {
|
|
312202
|
+
identityMatch = true;
|
|
312203
|
+
}
|
|
312204
|
+
}
|
|
312205
|
+
// Note: teams-based role resolution requires GitHub API access (read:org scope)
|
|
312206
|
+
// and is not yet implemented. If configured, the role will not match via teams.
|
|
312207
|
+
if (!identityMatch)
|
|
312208
|
+
continue;
|
|
312209
|
+
// slack_channels gate: if set, the role only applies when triggered from one of these channels
|
|
312210
|
+
if (roleConfig.slack_channels && roleConfig.slack_channels.length > 0) {
|
|
312211
|
+
if (!this.actor.slack?.channelId ||
|
|
312212
|
+
!roleConfig.slack_channels.includes(this.actor.slack.channelId)) {
|
|
312213
|
+
continue;
|
|
312214
|
+
}
|
|
312215
|
+
}
|
|
312216
|
+
matched.push(roleName);
|
|
312217
|
+
}
|
|
312218
|
+
return matched;
|
|
312219
|
+
}
|
|
312220
|
+
buildActor() {
|
|
312221
|
+
return {
|
|
312222
|
+
authorAssociation: this.actor.authorAssociation,
|
|
312223
|
+
login: this.actor.login,
|
|
312224
|
+
roles: this.resolveRoles(),
|
|
312225
|
+
isLocalMode: this.actor.isLocalMode,
|
|
312226
|
+
...(this.actor.slack && { slack: this.actor.slack }),
|
|
312227
|
+
};
|
|
312228
|
+
}
|
|
312229
|
+
forCheckExecution(check) {
|
|
312230
|
+
return {
|
|
312231
|
+
scope: 'check.execute',
|
|
312232
|
+
check: {
|
|
312233
|
+
id: check.id,
|
|
312234
|
+
type: check.type,
|
|
312235
|
+
group: check.group,
|
|
312236
|
+
tags: check.tags,
|
|
312237
|
+
criticality: check.criticality,
|
|
312238
|
+
sandbox: check.sandbox,
|
|
312239
|
+
policy: check.policy,
|
|
312240
|
+
},
|
|
312241
|
+
actor: this.buildActor(),
|
|
312242
|
+
repository: this.repository,
|
|
312243
|
+
pullRequest: this.pullRequest,
|
|
312244
|
+
};
|
|
312245
|
+
}
|
|
312246
|
+
forToolInvocation(serverName, methodName, transport) {
|
|
312247
|
+
return {
|
|
312248
|
+
scope: 'tool.invoke',
|
|
312249
|
+
tool: { serverName, methodName, transport },
|
|
312250
|
+
actor: this.buildActor(),
|
|
312251
|
+
repository: this.repository,
|
|
312252
|
+
pullRequest: this.pullRequest,
|
|
312253
|
+
};
|
|
312254
|
+
}
|
|
312255
|
+
forCapabilityResolve(checkId, capabilities) {
|
|
312256
|
+
return {
|
|
312257
|
+
scope: 'capability.resolve',
|
|
312258
|
+
check: { id: checkId, type: 'ai' },
|
|
312259
|
+
capability: capabilities,
|
|
312260
|
+
actor: this.buildActor(),
|
|
312261
|
+
repository: this.repository,
|
|
312262
|
+
pullRequest: this.pullRequest,
|
|
312263
|
+
};
|
|
312264
|
+
}
|
|
312265
|
+
}
|
|
312266
|
+
exports.PolicyInputBuilder = PolicyInputBuilder;
|
|
312267
|
+
|
|
312268
|
+
|
|
312269
|
+
/***/ }),
|
|
312270
|
+
|
|
312271
|
+
/***/ 63737:
|
|
312272
|
+
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
|
312273
|
+
|
|
312274
|
+
"use strict";
|
|
312275
|
+
|
|
312276
|
+
/**
|
|
312277
|
+
* Copyright (c) ProbeLabs. All rights reserved.
|
|
312278
|
+
* Licensed under the Elastic License 2.0; you may not use this file except
|
|
312279
|
+
* in compliance with the Elastic License 2.0.
|
|
312280
|
+
*/
|
|
312281
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
312282
|
+
if (k2 === undefined) k2 = k;
|
|
312283
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
312284
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
312285
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
312286
|
+
}
|
|
312287
|
+
Object.defineProperty(o, k2, desc);
|
|
312288
|
+
}) : (function(o, m, k, k2) {
|
|
312289
|
+
if (k2 === undefined) k2 = k;
|
|
312290
|
+
o[k2] = m[k];
|
|
312291
|
+
}));
|
|
312292
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
312293
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
312294
|
+
}) : function(o, v) {
|
|
312295
|
+
o["default"] = v;
|
|
312296
|
+
});
|
|
312297
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
312298
|
+
var ownKeys = function(o) {
|
|
312299
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
312300
|
+
var ar = [];
|
|
312301
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
312302
|
+
return ar;
|
|
312303
|
+
};
|
|
312304
|
+
return ownKeys(o);
|
|
312305
|
+
};
|
|
312306
|
+
return function (mod) {
|
|
312307
|
+
if (mod && mod.__esModule) return mod;
|
|
312308
|
+
var result = {};
|
|
312309
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
312310
|
+
__setModuleDefault(result, mod);
|
|
312311
|
+
return result;
|
|
312312
|
+
};
|
|
312313
|
+
})();
|
|
312314
|
+
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
312315
|
+
exports.KnexStoreBackend = void 0;
|
|
312316
|
+
/**
|
|
312317
|
+
* Knex-backed schedule store for PostgreSQL, MySQL, and MSSQL (Enterprise)
|
|
312318
|
+
*
|
|
312319
|
+
* Uses Knex query builder for database-agnostic SQL. Same schema as SQLite backend
|
|
312320
|
+
* but with real distributed locking via row-level claims (claimed_by/claimed_at/lock_token).
|
|
312321
|
+
*/
|
|
312322
|
+
const fs = __importStar(__nccwpck_require__(79896));
|
|
312323
|
+
const path = __importStar(__nccwpck_require__(16928));
|
|
312324
|
+
const uuid_1 = __nccwpck_require__(31914);
|
|
312325
|
+
const logger_1 = __nccwpck_require__(86999);
|
|
312326
|
+
function toNum(val) {
|
|
312327
|
+
if (val === null || val === undefined)
|
|
312328
|
+
return undefined;
|
|
312329
|
+
return typeof val === 'string' ? parseInt(val, 10) : val;
|
|
312330
|
+
}
|
|
312331
|
+
function safeJsonParse(value) {
|
|
312332
|
+
if (!value)
|
|
312333
|
+
return undefined;
|
|
312334
|
+
try {
|
|
312335
|
+
return JSON.parse(value);
|
|
312336
|
+
}
|
|
312337
|
+
catch {
|
|
312338
|
+
return undefined;
|
|
312339
|
+
}
|
|
312340
|
+
}
|
|
312341
|
+
function fromTriggerRow(row) {
|
|
312342
|
+
return {
|
|
312343
|
+
id: row.id,
|
|
312344
|
+
creatorId: row.creator_id,
|
|
312345
|
+
creatorContext: row.creator_context ?? undefined,
|
|
312346
|
+
creatorName: row.creator_name ?? undefined,
|
|
312347
|
+
description: row.description ?? undefined,
|
|
312348
|
+
channels: safeJsonParse(row.channels),
|
|
312349
|
+
fromUsers: safeJsonParse(row.from_users),
|
|
312350
|
+
fromBots: row.from_bots === true || row.from_bots === 1,
|
|
312351
|
+
contains: safeJsonParse(row.contains),
|
|
312352
|
+
matchPattern: row.match_pattern ?? undefined,
|
|
312353
|
+
threads: row.threads,
|
|
312354
|
+
workflow: row.workflow,
|
|
312355
|
+
inputs: safeJsonParse(row.inputs),
|
|
312356
|
+
outputContext: safeJsonParse(row.output_context),
|
|
312357
|
+
status: row.status,
|
|
312358
|
+
enabled: row.enabled === true || row.enabled === 1,
|
|
312359
|
+
createdAt: toNum(row.created_at),
|
|
312360
|
+
};
|
|
312361
|
+
}
|
|
312362
|
+
function toTriggerInsertRow(trigger) {
|
|
312363
|
+
return {
|
|
312364
|
+
id: trigger.id,
|
|
312365
|
+
creator_id: trigger.creatorId,
|
|
312366
|
+
creator_context: trigger.creatorContext ?? null,
|
|
312367
|
+
creator_name: trigger.creatorName ?? null,
|
|
312368
|
+
description: trigger.description ?? null,
|
|
312369
|
+
channels: trigger.channels ? JSON.stringify(trigger.channels) : null,
|
|
312370
|
+
from_users: trigger.fromUsers ? JSON.stringify(trigger.fromUsers) : null,
|
|
312371
|
+
from_bots: trigger.fromBots,
|
|
312372
|
+
contains: trigger.contains ? JSON.stringify(trigger.contains) : null,
|
|
312373
|
+
match_pattern: trigger.matchPattern ?? null,
|
|
312374
|
+
threads: trigger.threads,
|
|
312375
|
+
workflow: trigger.workflow,
|
|
312376
|
+
inputs: trigger.inputs ? JSON.stringify(trigger.inputs) : null,
|
|
312377
|
+
output_context: trigger.outputContext ? JSON.stringify(trigger.outputContext) : null,
|
|
312378
|
+
status: trigger.status,
|
|
312379
|
+
enabled: trigger.enabled,
|
|
312380
|
+
created_at: trigger.createdAt,
|
|
312381
|
+
};
|
|
312382
|
+
}
|
|
312383
|
+
function fromDbRow(row) {
|
|
312384
|
+
return {
|
|
312385
|
+
id: row.id,
|
|
312386
|
+
creatorId: row.creator_id,
|
|
312387
|
+
creatorContext: row.creator_context ?? undefined,
|
|
312388
|
+
creatorName: row.creator_name ?? undefined,
|
|
312389
|
+
timezone: row.timezone,
|
|
312390
|
+
schedule: row.schedule_expr,
|
|
312391
|
+
runAt: toNum(row.run_at),
|
|
312392
|
+
isRecurring: row.is_recurring === true || row.is_recurring === 1,
|
|
312393
|
+
originalExpression: row.original_expression,
|
|
312394
|
+
workflow: row.workflow ?? undefined,
|
|
312395
|
+
workflowInputs: safeJsonParse(row.workflow_inputs),
|
|
312396
|
+
outputContext: safeJsonParse(row.output_context),
|
|
312397
|
+
status: row.status,
|
|
312398
|
+
createdAt: toNum(row.created_at),
|
|
312399
|
+
lastRunAt: toNum(row.last_run_at),
|
|
312400
|
+
nextRunAt: toNum(row.next_run_at),
|
|
312401
|
+
runCount: row.run_count,
|
|
312402
|
+
failureCount: row.failure_count,
|
|
312403
|
+
lastError: row.last_error ?? undefined,
|
|
312404
|
+
previousResponse: row.previous_response ?? undefined,
|
|
312405
|
+
};
|
|
312406
|
+
}
|
|
312407
|
+
function toInsertRow(schedule) {
|
|
312408
|
+
return {
|
|
312409
|
+
id: schedule.id,
|
|
312410
|
+
creator_id: schedule.creatorId,
|
|
312411
|
+
creator_context: schedule.creatorContext ?? null,
|
|
312412
|
+
creator_name: schedule.creatorName ?? null,
|
|
312413
|
+
timezone: schedule.timezone,
|
|
312414
|
+
schedule_expr: schedule.schedule,
|
|
312415
|
+
run_at: schedule.runAt ?? null,
|
|
312416
|
+
is_recurring: schedule.isRecurring,
|
|
312417
|
+
original_expression: schedule.originalExpression,
|
|
312418
|
+
workflow: schedule.workflow ?? null,
|
|
312419
|
+
workflow_inputs: schedule.workflowInputs ? JSON.stringify(schedule.workflowInputs) : null,
|
|
312420
|
+
output_context: schedule.outputContext ? JSON.stringify(schedule.outputContext) : null,
|
|
312421
|
+
status: schedule.status,
|
|
312422
|
+
created_at: schedule.createdAt,
|
|
312423
|
+
last_run_at: schedule.lastRunAt ?? null,
|
|
312424
|
+
next_run_at: schedule.nextRunAt ?? null,
|
|
312425
|
+
run_count: schedule.runCount,
|
|
312426
|
+
failure_count: schedule.failureCount,
|
|
312427
|
+
last_error: schedule.lastError ?? null,
|
|
312428
|
+
previous_response: schedule.previousResponse ?? null,
|
|
312429
|
+
};
|
|
312430
|
+
}
|
|
312431
|
+
/**
|
|
312432
|
+
* Enterprise Knex-backed store for PostgreSQL, MySQL, and MSSQL
|
|
312433
|
+
*/
|
|
312434
|
+
class KnexStoreBackend {
|
|
312435
|
+
knex = null;
|
|
312436
|
+
driver;
|
|
312437
|
+
connection;
|
|
312438
|
+
constructor(driver, storageConfig, _haConfig) {
|
|
312439
|
+
this.driver = driver;
|
|
312440
|
+
this.connection = (storageConfig.connection || {});
|
|
312441
|
+
}
|
|
312442
|
+
async initialize() {
|
|
312443
|
+
// Load knex dynamically
|
|
312444
|
+
const { createRequire } = __nccwpck_require__(73339);
|
|
312445
|
+
const runtimeRequire = createRequire(__filename);
|
|
312446
|
+
let knexFactory;
|
|
312447
|
+
try {
|
|
312448
|
+
knexFactory = runtimeRequire('knex');
|
|
312449
|
+
}
|
|
312450
|
+
catch (err) {
|
|
312451
|
+
const code = err?.code;
|
|
312452
|
+
if (code === 'MODULE_NOT_FOUND' || code === 'ERR_MODULE_NOT_FOUND') {
|
|
312453
|
+
throw new Error('knex is required for PostgreSQL/MySQL/MSSQL schedule storage. ' +
|
|
312454
|
+
'Install it with: npm install knex');
|
|
312455
|
+
}
|
|
312456
|
+
throw err;
|
|
312457
|
+
}
|
|
312458
|
+
const clientMap = {
|
|
312459
|
+
postgresql: 'pg',
|
|
312460
|
+
mysql: 'mysql2',
|
|
312461
|
+
mssql: 'tedious',
|
|
312462
|
+
};
|
|
312463
|
+
const client = clientMap[this.driver];
|
|
312464
|
+
// Build connection config
|
|
312465
|
+
let connection;
|
|
312466
|
+
if (this.connection.connection_string) {
|
|
312467
|
+
connection = this.connection.connection_string;
|
|
312468
|
+
}
|
|
312469
|
+
else if (this.driver === 'mssql') {
|
|
312470
|
+
connection = this.buildMssqlConnection();
|
|
312471
|
+
}
|
|
312472
|
+
else {
|
|
312473
|
+
connection = this.buildStandardConnection();
|
|
312474
|
+
}
|
|
312475
|
+
this.knex = knexFactory({
|
|
312476
|
+
client,
|
|
312477
|
+
connection,
|
|
312478
|
+
pool: {
|
|
312479
|
+
min: this.connection.pool?.min ?? 0,
|
|
312480
|
+
max: this.connection.pool?.max ?? 10,
|
|
312481
|
+
},
|
|
312482
|
+
});
|
|
312483
|
+
// Run schema migration
|
|
312484
|
+
await this.migrateSchema();
|
|
312485
|
+
logger_1.logger.info(`[KnexStore] Initialized (${this.driver})`);
|
|
312486
|
+
}
|
|
312487
|
+
buildStandardConnection() {
|
|
312488
|
+
return {
|
|
312489
|
+
host: this.connection.host || 'localhost',
|
|
312490
|
+
port: this.connection.port,
|
|
312491
|
+
database: this.connection.database || 'visor',
|
|
312492
|
+
user: this.connection.user,
|
|
312493
|
+
password: this.connection.password,
|
|
312494
|
+
ssl: this.resolveSslConfig(),
|
|
312495
|
+
};
|
|
312496
|
+
}
|
|
312497
|
+
buildMssqlConnection() {
|
|
312498
|
+
const ssl = this.connection.ssl;
|
|
312499
|
+
const sslEnabled = ssl === true || (typeof ssl === 'object' && ssl.enabled !== false);
|
|
312500
|
+
return {
|
|
312501
|
+
server: this.connection.host || 'localhost',
|
|
312502
|
+
port: this.connection.port,
|
|
312503
|
+
database: this.connection.database || 'visor',
|
|
312504
|
+
user: this.connection.user,
|
|
312505
|
+
password: this.connection.password,
|
|
312506
|
+
options: {
|
|
312507
|
+
encrypt: sslEnabled,
|
|
312508
|
+
trustServerCertificate: typeof ssl === 'object' ? ssl.reject_unauthorized === false : !sslEnabled,
|
|
312509
|
+
},
|
|
312510
|
+
};
|
|
312511
|
+
}
|
|
312512
|
+
resolveSslConfig() {
|
|
312513
|
+
const ssl = this.connection.ssl;
|
|
312514
|
+
if (ssl === false || ssl === undefined)
|
|
312515
|
+
return false;
|
|
312516
|
+
if (ssl === true)
|
|
312517
|
+
return { rejectUnauthorized: true };
|
|
312518
|
+
// Object config
|
|
312519
|
+
if (ssl.enabled === false)
|
|
312520
|
+
return false;
|
|
312521
|
+
const result = {
|
|
312522
|
+
rejectUnauthorized: ssl.reject_unauthorized !== false,
|
|
312523
|
+
};
|
|
312524
|
+
if (ssl.ca) {
|
|
312525
|
+
const caPath = this.validateSslPath(ssl.ca, 'CA certificate');
|
|
312526
|
+
result.ca = fs.readFileSync(caPath, 'utf8');
|
|
312527
|
+
}
|
|
312528
|
+
if (ssl.cert) {
|
|
312529
|
+
const certPath = this.validateSslPath(ssl.cert, 'client certificate');
|
|
312530
|
+
result.cert = fs.readFileSync(certPath, 'utf8');
|
|
312531
|
+
}
|
|
312532
|
+
if (ssl.key) {
|
|
312533
|
+
const keyPath = this.validateSslPath(ssl.key, 'client key');
|
|
312534
|
+
result.key = fs.readFileSync(keyPath, 'utf8');
|
|
312535
|
+
}
|
|
312536
|
+
return result;
|
|
312537
|
+
}
|
|
312538
|
+
validateSslPath(filePath, label) {
|
|
312539
|
+
const resolved = path.resolve(filePath);
|
|
312540
|
+
if (resolved !== path.normalize(resolved)) {
|
|
312541
|
+
throw new Error(`SSL ${label} path contains invalid sequences: ${filePath}`);
|
|
312542
|
+
}
|
|
312543
|
+
if (!fs.existsSync(resolved)) {
|
|
312544
|
+
throw new Error(`SSL ${label} not found: ${filePath}`);
|
|
312545
|
+
}
|
|
312546
|
+
return resolved;
|
|
312547
|
+
}
|
|
312548
|
+
async shutdown() {
|
|
312549
|
+
if (this.knex) {
|
|
312550
|
+
await this.knex.destroy();
|
|
312551
|
+
this.knex = null;
|
|
312552
|
+
}
|
|
312553
|
+
}
|
|
312554
|
+
async migrateSchema() {
|
|
312555
|
+
const knex = this.getKnex();
|
|
312556
|
+
const exists = await knex.schema.hasTable('schedules');
|
|
312557
|
+
if (!exists) {
|
|
312558
|
+
await knex.schema.createTable('schedules', table => {
|
|
312559
|
+
table.string('id', 36).primary();
|
|
312560
|
+
table.string('creator_id', 255).notNullable().index();
|
|
312561
|
+
table.string('creator_context', 255);
|
|
312562
|
+
table.string('creator_name', 255);
|
|
312563
|
+
table.string('timezone', 64).notNullable().defaultTo('UTC');
|
|
312564
|
+
table.string('schedule_expr', 255);
|
|
312565
|
+
table.bigInteger('run_at');
|
|
312566
|
+
table.boolean('is_recurring').notNullable();
|
|
312567
|
+
table.text('original_expression');
|
|
312568
|
+
table.string('workflow', 255);
|
|
312569
|
+
table.text('workflow_inputs');
|
|
312570
|
+
table.text('output_context');
|
|
312571
|
+
table.string('status', 20).notNullable().index();
|
|
312572
|
+
table.bigInteger('created_at').notNullable();
|
|
312573
|
+
table.bigInteger('last_run_at');
|
|
312574
|
+
table.bigInteger('next_run_at');
|
|
312575
|
+
table.integer('run_count').notNullable().defaultTo(0);
|
|
312576
|
+
table.integer('failure_count').notNullable().defaultTo(0);
|
|
312577
|
+
table.text('last_error');
|
|
312578
|
+
table.text('previous_response');
|
|
312579
|
+
table.index(['status', 'next_run_at']);
|
|
312580
|
+
});
|
|
312581
|
+
}
|
|
312582
|
+
// Create message_triggers table
|
|
312583
|
+
const triggersExist = await knex.schema.hasTable('message_triggers');
|
|
312584
|
+
if (!triggersExist) {
|
|
312585
|
+
await knex.schema.createTable('message_triggers', table => {
|
|
312586
|
+
table.string('id', 36).primary();
|
|
312587
|
+
table.string('creator_id', 255).notNullable().index();
|
|
312588
|
+
table.string('creator_context', 255);
|
|
312589
|
+
table.string('creator_name', 255);
|
|
312590
|
+
table.text('description');
|
|
312591
|
+
table.text('channels'); // JSON array
|
|
312592
|
+
table.text('from_users'); // JSON array
|
|
312593
|
+
table.boolean('from_bots').notNullable().defaultTo(false);
|
|
312594
|
+
table.text('contains'); // JSON array
|
|
312595
|
+
table.text('match_pattern');
|
|
312596
|
+
table.string('threads', 20).notNullable().defaultTo('any');
|
|
312597
|
+
table.string('workflow', 255).notNullable();
|
|
312598
|
+
table.text('inputs'); // JSON
|
|
312599
|
+
table.text('output_context'); // JSON
|
|
312600
|
+
table.string('status', 20).notNullable().defaultTo('active').index();
|
|
312601
|
+
table.boolean('enabled').notNullable().defaultTo(true);
|
|
312602
|
+
table.bigInteger('created_at').notNullable();
|
|
312603
|
+
});
|
|
312604
|
+
}
|
|
312605
|
+
// Create scheduler_locks table for distributed locking
|
|
312606
|
+
const locksExist = await knex.schema.hasTable('scheduler_locks');
|
|
312607
|
+
if (!locksExist) {
|
|
312608
|
+
await knex.schema.createTable('scheduler_locks', table => {
|
|
312609
|
+
table.string('lock_id', 255).primary();
|
|
312610
|
+
table.string('node_id', 255).notNullable();
|
|
312611
|
+
table.string('lock_token', 36).notNullable();
|
|
312612
|
+
table.bigInteger('acquired_at').notNullable();
|
|
312613
|
+
table.bigInteger('expires_at').notNullable();
|
|
312614
|
+
});
|
|
312615
|
+
}
|
|
312616
|
+
}
|
|
312617
|
+
getKnex() {
|
|
312618
|
+
if (!this.knex) {
|
|
312619
|
+
throw new Error('[KnexStore] Not initialized. Call initialize() first.');
|
|
312620
|
+
}
|
|
312621
|
+
return this.knex;
|
|
312622
|
+
}
|
|
312623
|
+
// --- CRUD ---
|
|
312624
|
+
async create(schedule) {
|
|
312625
|
+
const knex = this.getKnex();
|
|
312626
|
+
const newSchedule = {
|
|
312627
|
+
...schedule,
|
|
312628
|
+
id: (0, uuid_1.v4)(),
|
|
312629
|
+
createdAt: Date.now(),
|
|
312630
|
+
runCount: 0,
|
|
312631
|
+
failureCount: 0,
|
|
312632
|
+
status: 'active',
|
|
312633
|
+
};
|
|
312634
|
+
await knex('schedules').insert(toInsertRow(newSchedule));
|
|
312635
|
+
logger_1.logger.info(`[KnexStore] Created schedule ${newSchedule.id} for user ${newSchedule.creatorId}`);
|
|
312636
|
+
return newSchedule;
|
|
312637
|
+
}
|
|
312638
|
+
async importSchedule(schedule) {
|
|
312639
|
+
const knex = this.getKnex();
|
|
312640
|
+
const existing = await knex('schedules').where('id', schedule.id).first();
|
|
312641
|
+
if (existing)
|
|
312642
|
+
return; // Already imported (idempotent)
|
|
312643
|
+
await knex('schedules').insert(toInsertRow(schedule));
|
|
312644
|
+
}
|
|
312645
|
+
async get(id) {
|
|
312646
|
+
const knex = this.getKnex();
|
|
312647
|
+
const row = await knex('schedules').where('id', id).first();
|
|
312648
|
+
return row ? fromDbRow(row) : undefined;
|
|
312649
|
+
}
|
|
312650
|
+
async update(id, patch) {
|
|
312651
|
+
const knex = this.getKnex();
|
|
312652
|
+
const existing = await knex('schedules').where('id', id).first();
|
|
312653
|
+
if (!existing)
|
|
312654
|
+
return undefined;
|
|
312655
|
+
const current = fromDbRow(existing);
|
|
312656
|
+
const updated = { ...current, ...patch, id: current.id };
|
|
312657
|
+
const row = toInsertRow(updated);
|
|
312658
|
+
// Remove id from update (PK cannot change)
|
|
312659
|
+
delete row.id;
|
|
312660
|
+
await knex('schedules').where('id', id).update(row);
|
|
312661
|
+
return updated;
|
|
312662
|
+
}
|
|
312663
|
+
async delete(id) {
|
|
312664
|
+
const knex = this.getKnex();
|
|
312665
|
+
const deleted = await knex('schedules').where('id', id).del();
|
|
312666
|
+
if (deleted > 0) {
|
|
312667
|
+
logger_1.logger.info(`[KnexStore] Deleted schedule ${id}`);
|
|
312668
|
+
return true;
|
|
312669
|
+
}
|
|
312670
|
+
return false;
|
|
312671
|
+
}
|
|
312672
|
+
// --- Queries ---
|
|
312673
|
+
async getByCreator(creatorId) {
|
|
312674
|
+
const knex = this.getKnex();
|
|
312675
|
+
const rows = await knex('schedules').where('creator_id', creatorId);
|
|
312676
|
+
return rows.map((r) => fromDbRow(r));
|
|
312677
|
+
}
|
|
312678
|
+
async getActiveSchedules() {
|
|
312679
|
+
const knex = this.getKnex();
|
|
312680
|
+
const rows = await knex('schedules').where('status', 'active');
|
|
312681
|
+
return rows.map((r) => fromDbRow(r));
|
|
312682
|
+
}
|
|
312683
|
+
async getDueSchedules(now) {
|
|
312684
|
+
const ts = now ?? Date.now();
|
|
312685
|
+
const knex = this.getKnex();
|
|
312686
|
+
// MSSQL uses 1/0 for booleans
|
|
312687
|
+
const bFalse = this.driver === 'mssql' ? 0 : false;
|
|
312688
|
+
const bTrue = this.driver === 'mssql' ? 1 : true;
|
|
312689
|
+
const rows = await knex('schedules')
|
|
312690
|
+
.where('status', 'active')
|
|
312691
|
+
.andWhere(function () {
|
|
312692
|
+
this.where(function () {
|
|
312693
|
+
this.where('is_recurring', bFalse)
|
|
312694
|
+
.whereNotNull('run_at')
|
|
312695
|
+
.where('run_at', '<=', ts);
|
|
312696
|
+
}).orWhere(function () {
|
|
312697
|
+
this.where('is_recurring', bTrue)
|
|
312698
|
+
.whereNotNull('next_run_at')
|
|
312699
|
+
.where('next_run_at', '<=', ts);
|
|
312700
|
+
});
|
|
312701
|
+
});
|
|
312702
|
+
return rows.map((r) => fromDbRow(r));
|
|
312703
|
+
}
|
|
312704
|
+
async findByWorkflow(creatorId, workflowName) {
|
|
312705
|
+
const knex = this.getKnex();
|
|
312706
|
+
const escaped = workflowName.toLowerCase().replace(/[%_\\]/g, '\\$&');
|
|
312707
|
+
const pattern = `%${escaped}%`;
|
|
312708
|
+
const rows = await knex('schedules')
|
|
312709
|
+
.where('creator_id', creatorId)
|
|
312710
|
+
.where('status', 'active')
|
|
312711
|
+
.whereRaw("LOWER(workflow) LIKE ? ESCAPE '\\'", [pattern]);
|
|
312712
|
+
return rows.map((r) => fromDbRow(r));
|
|
312713
|
+
}
|
|
312714
|
+
async getAll() {
|
|
312715
|
+
const knex = this.getKnex();
|
|
312716
|
+
const rows = await knex('schedules');
|
|
312717
|
+
return rows.map((r) => fromDbRow(r));
|
|
312718
|
+
}
|
|
312719
|
+
async getStats() {
|
|
312720
|
+
const knex = this.getKnex();
|
|
312721
|
+
// MSSQL uses 1/0 for booleans; PostgreSQL/MySQL accept both true/1
|
|
312722
|
+
const boolTrue = this.driver === 'mssql' ? '1' : 'true';
|
|
312723
|
+
const boolFalse = this.driver === 'mssql' ? '0' : 'false';
|
|
312724
|
+
const result = await knex('schedules')
|
|
312725
|
+
.select(knex.raw('COUNT(*) as total'), knex.raw("SUM(CASE WHEN status = 'active' THEN 1 ELSE 0 END) as active"), knex.raw("SUM(CASE WHEN status = 'paused' THEN 1 ELSE 0 END) as paused"), knex.raw("SUM(CASE WHEN status = 'completed' THEN 1 ELSE 0 END) as completed"), knex.raw("SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END) as failed"), knex.raw(`SUM(CASE WHEN is_recurring = ${boolTrue} THEN 1 ELSE 0 END) as recurring`), knex.raw(`SUM(CASE WHEN is_recurring = ${boolFalse} THEN 1 ELSE 0 END) as one_time`))
|
|
312726
|
+
.first();
|
|
312727
|
+
return {
|
|
312728
|
+
total: Number(result.total) || 0,
|
|
312729
|
+
active: Number(result.active) || 0,
|
|
312730
|
+
paused: Number(result.paused) || 0,
|
|
312731
|
+
completed: Number(result.completed) || 0,
|
|
312732
|
+
failed: Number(result.failed) || 0,
|
|
312733
|
+
recurring: Number(result.recurring) || 0,
|
|
312734
|
+
oneTime: Number(result.one_time) || 0,
|
|
312735
|
+
};
|
|
312736
|
+
}
|
|
312737
|
+
async validateLimits(creatorId, isRecurring, limits) {
|
|
312738
|
+
const knex = this.getKnex();
|
|
312739
|
+
if (limits.maxGlobal) {
|
|
312740
|
+
const result = await knex('schedules').count('* as cnt').first();
|
|
312741
|
+
if (Number(result?.cnt) >= limits.maxGlobal) {
|
|
312742
|
+
throw new Error(`Global schedule limit reached (${limits.maxGlobal})`);
|
|
312743
|
+
}
|
|
312744
|
+
}
|
|
312745
|
+
if (limits.maxPerUser) {
|
|
312746
|
+
const result = await knex('schedules')
|
|
312747
|
+
.where('creator_id', creatorId)
|
|
312748
|
+
.count('* as cnt')
|
|
312749
|
+
.first();
|
|
312750
|
+
if (Number(result?.cnt) >= limits.maxPerUser) {
|
|
312751
|
+
throw new Error(`You have reached the maximum number of schedules (${limits.maxPerUser})`);
|
|
312752
|
+
}
|
|
312753
|
+
}
|
|
312754
|
+
if (isRecurring && limits.maxRecurringPerUser) {
|
|
312755
|
+
const bTrue = this.driver === 'mssql' ? 1 : true;
|
|
312756
|
+
const result = await knex('schedules')
|
|
312757
|
+
.where('creator_id', creatorId)
|
|
312758
|
+
.where('is_recurring', bTrue)
|
|
312759
|
+
.count('* as cnt')
|
|
312760
|
+
.first();
|
|
312761
|
+
if (Number(result?.cnt) >= limits.maxRecurringPerUser) {
|
|
312762
|
+
throw new Error(`You have reached the maximum number of recurring schedules (${limits.maxRecurringPerUser})`);
|
|
312763
|
+
}
|
|
312764
|
+
}
|
|
312765
|
+
}
|
|
312766
|
+
// --- HA Distributed Locking (via scheduler_locks table) ---
|
|
312767
|
+
async tryAcquireLock(lockId, nodeId, ttlSeconds) {
|
|
312768
|
+
const knex = this.getKnex();
|
|
312769
|
+
const now = Date.now();
|
|
312770
|
+
const expiresAt = now + ttlSeconds * 1000;
|
|
312771
|
+
const token = (0, uuid_1.v4)();
|
|
312772
|
+
// Step 1: Try to claim an existing expired lock
|
|
312773
|
+
const updated = await knex('scheduler_locks')
|
|
312774
|
+
.where('lock_id', lockId)
|
|
312775
|
+
.where('expires_at', '<', now)
|
|
312776
|
+
.update({
|
|
312777
|
+
node_id: nodeId,
|
|
312778
|
+
lock_token: token,
|
|
312779
|
+
acquired_at: now,
|
|
312780
|
+
expires_at: expiresAt,
|
|
312781
|
+
});
|
|
312782
|
+
if (updated > 0)
|
|
312783
|
+
return token;
|
|
312784
|
+
// Step 2: Try to INSERT a new lock row
|
|
312785
|
+
try {
|
|
312786
|
+
await knex('scheduler_locks').insert({
|
|
312787
|
+
lock_id: lockId,
|
|
312788
|
+
node_id: nodeId,
|
|
312789
|
+
lock_token: token,
|
|
312790
|
+
acquired_at: now,
|
|
312791
|
+
expires_at: expiresAt,
|
|
312792
|
+
});
|
|
312793
|
+
return token;
|
|
312794
|
+
}
|
|
312795
|
+
catch {
|
|
312796
|
+
// Unique constraint violation — another node holds the lock
|
|
312797
|
+
return null;
|
|
312798
|
+
}
|
|
312799
|
+
}
|
|
312800
|
+
async releaseLock(lockId, lockToken) {
|
|
312801
|
+
const knex = this.getKnex();
|
|
312802
|
+
await knex('scheduler_locks').where('lock_id', lockId).where('lock_token', lockToken).del();
|
|
312803
|
+
}
|
|
312804
|
+
async renewLock(lockId, lockToken, ttlSeconds) {
|
|
312805
|
+
const knex = this.getKnex();
|
|
312806
|
+
const now = Date.now();
|
|
312807
|
+
const expiresAt = now + ttlSeconds * 1000;
|
|
312808
|
+
const updated = await knex('scheduler_locks')
|
|
312809
|
+
.where('lock_id', lockId)
|
|
312810
|
+
.where('lock_token', lockToken)
|
|
312811
|
+
.update({ acquired_at: now, expires_at: expiresAt });
|
|
312812
|
+
return updated > 0;
|
|
312813
|
+
}
|
|
312814
|
+
async flush() {
|
|
312815
|
+
// No-op for server-based backends
|
|
312816
|
+
}
|
|
312817
|
+
// --- Message Trigger CRUD ---
|
|
312818
|
+
async createTrigger(trigger) {
|
|
312819
|
+
const knex = this.getKnex();
|
|
312820
|
+
const newTrigger = {
|
|
312821
|
+
...trigger,
|
|
312822
|
+
id: (0, uuid_1.v4)(),
|
|
312823
|
+
createdAt: Date.now(),
|
|
312824
|
+
};
|
|
312825
|
+
await knex('message_triggers').insert(toTriggerInsertRow(newTrigger));
|
|
312826
|
+
logger_1.logger.info(`[KnexStore] Created trigger ${newTrigger.id} for user ${newTrigger.creatorId}`);
|
|
312827
|
+
return newTrigger;
|
|
312828
|
+
}
|
|
312829
|
+
async getTrigger(id) {
|
|
312830
|
+
const knex = this.getKnex();
|
|
312831
|
+
const row = await knex('message_triggers').where('id', id).first();
|
|
312832
|
+
return row ? fromTriggerRow(row) : undefined;
|
|
312833
|
+
}
|
|
312834
|
+
async updateTrigger(id, patch) {
|
|
312835
|
+
const knex = this.getKnex();
|
|
312836
|
+
const existing = await knex('message_triggers').where('id', id).first();
|
|
312837
|
+
if (!existing)
|
|
312838
|
+
return undefined;
|
|
312839
|
+
const current = fromTriggerRow(existing);
|
|
312840
|
+
const updated = {
|
|
312841
|
+
...current,
|
|
312842
|
+
...patch,
|
|
312843
|
+
id: current.id,
|
|
312844
|
+
createdAt: current.createdAt,
|
|
312845
|
+
};
|
|
312846
|
+
const row = toTriggerInsertRow(updated);
|
|
312847
|
+
delete row.id;
|
|
312848
|
+
await knex('message_triggers').where('id', id).update(row);
|
|
312849
|
+
return updated;
|
|
312850
|
+
}
|
|
312851
|
+
async deleteTrigger(id) {
|
|
312852
|
+
const knex = this.getKnex();
|
|
312853
|
+
const deleted = await knex('message_triggers').where('id', id).del();
|
|
312854
|
+
if (deleted > 0) {
|
|
312855
|
+
logger_1.logger.info(`[KnexStore] Deleted trigger ${id}`);
|
|
312856
|
+
return true;
|
|
312857
|
+
}
|
|
312858
|
+
return false;
|
|
312859
|
+
}
|
|
312860
|
+
async getTriggersByCreator(creatorId) {
|
|
312861
|
+
const knex = this.getKnex();
|
|
312862
|
+
const rows = await knex('message_triggers').where('creator_id', creatorId);
|
|
312863
|
+
return rows.map((r) => fromTriggerRow(r));
|
|
312864
|
+
}
|
|
312865
|
+
async getActiveTriggers() {
|
|
312866
|
+
const knex = this.getKnex();
|
|
312867
|
+
const rows = await knex('message_triggers')
|
|
312868
|
+
.where('status', 'active')
|
|
312869
|
+
.where('enabled', this.driver === 'mssql' ? 1 : true);
|
|
312870
|
+
return rows.map((r) => fromTriggerRow(r));
|
|
312871
|
+
}
|
|
312872
|
+
}
|
|
312873
|
+
exports.KnexStoreBackend = KnexStoreBackend;
|
|
312874
|
+
|
|
312875
|
+
|
|
311072
312876
|
/***/ }),
|
|
311073
312877
|
|
|
311074
312878
|
/***/ 83864:
|
|
@@ -314610,6 +316414,10 @@ exports.configSchema = {
|
|
|
314610
316414
|
'^x-': {},
|
|
314611
316415
|
},
|
|
314612
316416
|
},
|
|
316417
|
+
rate_limit: {
|
|
316418
|
+
$ref: '#/definitions/RateLimitConfig',
|
|
316419
|
+
description: 'Rate limiting configuration for HTTP/API tools',
|
|
316420
|
+
},
|
|
314613
316421
|
workflow: {
|
|
314614
316422
|
type: 'string',
|
|
314615
316423
|
description: "Workflow ID (registry lookup) or file path (for type: 'workflow')",
|
|
@@ -314646,6 +316454,43 @@ exports.configSchema = {
|
|
|
314646
316454
|
type: 'string',
|
|
314647
316455
|
},
|
|
314648
316456
|
},
|
|
316457
|
+
RateLimitConfig: {
|
|
316458
|
+
type: 'object',
|
|
316459
|
+
properties: {
|
|
316460
|
+
key: {
|
|
316461
|
+
type: 'string',
|
|
316462
|
+
description: 'Shared bucket name; defaults to URL origin',
|
|
316463
|
+
},
|
|
316464
|
+
requests: {
|
|
316465
|
+
type: 'number',
|
|
316466
|
+
description: 'Max requests per window',
|
|
316467
|
+
},
|
|
316468
|
+
per: {
|
|
316469
|
+
type: 'string',
|
|
316470
|
+
enum: ['second', 'minute', 'hour'],
|
|
316471
|
+
description: 'Time window unit',
|
|
316472
|
+
},
|
|
316473
|
+
max_retries: {
|
|
316474
|
+
type: 'number',
|
|
316475
|
+
description: 'Max retries on 429 (default: 3)',
|
|
316476
|
+
},
|
|
316477
|
+
backoff: {
|
|
316478
|
+
type: 'string',
|
|
316479
|
+
enum: ['fixed', 'exponential'],
|
|
316480
|
+
description: 'Backoff strategy (default: exponential)',
|
|
316481
|
+
},
|
|
316482
|
+
initial_delay_ms: {
|
|
316483
|
+
type: 'number',
|
|
316484
|
+
description: 'Base delay for backoff in ms (default: 1000)',
|
|
316485
|
+
},
|
|
316486
|
+
},
|
|
316487
|
+
required: ['requests', 'per'],
|
|
316488
|
+
additionalProperties: false,
|
|
316489
|
+
description: 'Rate limit configuration for HTTP/API requests.',
|
|
316490
|
+
patternProperties: {
|
|
316491
|
+
'^x-': {},
|
|
316492
|
+
},
|
|
316493
|
+
},
|
|
314649
316494
|
WorkflowInput: {
|
|
314650
316495
|
type: 'object',
|
|
314651
316496
|
properties: {
|
|
@@ -314748,6 +316593,10 @@ exports.configSchema = {
|
|
|
314748
316593
|
$ref: '#/definitions/Record%3Cstring%2Cstring%3E',
|
|
314749
316594
|
description: 'HTTP headers',
|
|
314750
316595
|
},
|
|
316596
|
+
rate_limit: {
|
|
316597
|
+
$ref: '#/definitions/RateLimitConfig',
|
|
316598
|
+
description: 'Rate limiting configuration for http_client checks',
|
|
316599
|
+
},
|
|
314751
316600
|
endpoint: {
|
|
314752
316601
|
type: 'string',
|
|
314753
316602
|
description: 'HTTP endpoint path - required for http_input checks',
|
|
@@ -315149,7 +316998,7 @@ exports.configSchema = {
|
|
|
315149
316998
|
description: 'Arguments/inputs for the workflow',
|
|
315150
316999
|
},
|
|
315151
317000
|
overrides: {
|
|
315152
|
-
$ref: '#/definitions/Record%3Cstring%2CPartial%3Cinterface-src_types_config.ts-
|
|
317001
|
+
$ref: '#/definitions/Record%3Cstring%2CPartial%3Cinterface-src_types_config.ts-14532-29218-src_types_config.ts-0-57785%3E%3E',
|
|
315153
317002
|
description: 'Override specific step configurations in the workflow',
|
|
315154
317003
|
},
|
|
315155
317004
|
output_mapping: {
|
|
@@ -315165,7 +317014,7 @@ exports.configSchema = {
|
|
|
315165
317014
|
description: 'Config file path - alternative to workflow ID (loads a Visor config file as workflow)',
|
|
315166
317015
|
},
|
|
315167
317016
|
workflow_overrides: {
|
|
315168
|
-
$ref: '#/definitions/Record%3Cstring%2CPartial%3Cinterface-src_types_config.ts-
|
|
317017
|
+
$ref: '#/definitions/Record%3Cstring%2CPartial%3Cinterface-src_types_config.ts-14532-29218-src_types_config.ts-0-57785%3E%3E',
|
|
315169
317018
|
description: 'Alias for overrides - workflow step overrides (backward compatibility)',
|
|
315170
317019
|
},
|
|
315171
317020
|
ref: {
|
|
@@ -315867,7 +317716,7 @@ exports.configSchema = {
|
|
|
315867
317716
|
description: 'Custom output name (defaults to workflow name)',
|
|
315868
317717
|
},
|
|
315869
317718
|
overrides: {
|
|
315870
|
-
$ref: '#/definitions/Record%3Cstring%2CPartial%3Cinterface-src_types_config.ts-
|
|
317719
|
+
$ref: '#/definitions/Record%3Cstring%2CPartial%3Cinterface-src_types_config.ts-14532-29218-src_types_config.ts-0-57785%3E%3E',
|
|
315871
317720
|
description: 'Step overrides',
|
|
315872
317721
|
},
|
|
315873
317722
|
output_mapping: {
|
|
@@ -315882,13 +317731,13 @@ exports.configSchema = {
|
|
|
315882
317731
|
'^x-': {},
|
|
315883
317732
|
},
|
|
315884
317733
|
},
|
|
315885
|
-
'Record<string,Partial<interface-src_types_config.ts-
|
|
317734
|
+
'Record<string,Partial<interface-src_types_config.ts-14532-29218-src_types_config.ts-0-57785>>': {
|
|
315886
317735
|
type: 'object',
|
|
315887
317736
|
additionalProperties: {
|
|
315888
|
-
$ref: '#/definitions/Partial%3Cinterface-src_types_config.ts-
|
|
317737
|
+
$ref: '#/definitions/Partial%3Cinterface-src_types_config.ts-14532-29218-src_types_config.ts-0-57785%3E',
|
|
315889
317738
|
},
|
|
315890
317739
|
},
|
|
315891
|
-
'Partial<interface-src_types_config.ts-
|
|
317740
|
+
'Partial<interface-src_types_config.ts-14532-29218-src_types_config.ts-0-57785>': {
|
|
315892
317741
|
type: 'object',
|
|
315893
317742
|
additionalProperties: false,
|
|
315894
317743
|
},
|
|
@@ -319052,11 +320901,16 @@ const human_id_1 = __nccwpck_require__(30920);
|
|
|
319052
320901
|
const logger_1 = __nccwpck_require__(86999);
|
|
319053
320902
|
const footer_1 = __nccwpck_require__(6924);
|
|
319054
320903
|
/**
|
|
319055
|
-
* Manages GitHub PR comments with dynamic updating capabilities
|
|
320904
|
+
* Manages GitHub PR comments with dynamic updating capabilities.
|
|
320905
|
+
* All write operations are serialized through an internal queue to prevent
|
|
320906
|
+
* concurrent GitHub API calls from racing against each other.
|
|
319056
320907
|
*/
|
|
319057
320908
|
class CommentManager {
|
|
319058
320909
|
octokit;
|
|
319059
320910
|
retryConfig;
|
|
320911
|
+
// Serial write queue: chains all updateOrCreateComment calls so only one
|
|
320912
|
+
// GitHub comment write is in-flight at a time within a job.
|
|
320913
|
+
_writeQueue = Promise.resolve();
|
|
319060
320914
|
constructor(octokit, retryConfig) {
|
|
319061
320915
|
this.octokit = octokit;
|
|
319062
320916
|
this.retryConfig = {
|
|
@@ -319097,6 +320951,16 @@ class CommentManager {
|
|
|
319097
320951
|
* Update existing comment or create new one with collision detection
|
|
319098
320952
|
*/
|
|
319099
320953
|
async updateOrCreateComment(owner, repo, prNumber, content, options = {}) {
|
|
320954
|
+
// Serialize all comment writes through a single queue so only one
|
|
320955
|
+
// GitHub API write is in-flight at a time, preventing races between
|
|
320956
|
+
// concurrent checks updating the same or different comments.
|
|
320957
|
+
return new Promise((resolve, reject) => {
|
|
320958
|
+
this._writeQueue = this._writeQueue
|
|
320959
|
+
.then(() => this._doUpdateOrCreate(owner, repo, prNumber, content, options))
|
|
320960
|
+
.then(resolve, reject);
|
|
320961
|
+
});
|
|
320962
|
+
}
|
|
320963
|
+
async _doUpdateOrCreate(owner, repo, prNumber, content, options = {}) {
|
|
319100
320964
|
const { commentId = this.generateCommentId(), triggeredBy = 'unknown', allowConcurrentUpdates = false, commitSha, cachedGithubCommentId, } = options;
|
|
319101
320965
|
return this.withRetry(async () => {
|
|
319102
320966
|
// First try to find the comment via listComments API
|
|
@@ -323617,6 +325481,35 @@ class OutputFormatters {
|
|
|
323617
325481
|
exports.OutputFormatters = OutputFormatters;
|
|
323618
325482
|
|
|
323619
325483
|
|
|
325484
|
+
/***/ }),
|
|
325485
|
+
|
|
325486
|
+
/***/ 93866:
|
|
325487
|
+
/***/ ((__unused_webpack_module, exports) => {
|
|
325488
|
+
|
|
325489
|
+
"use strict";
|
|
325490
|
+
|
|
325491
|
+
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
325492
|
+
exports.DefaultPolicyEngine = void 0;
|
|
325493
|
+
/**
|
|
325494
|
+
* Default (no-op) policy engine — always allows everything.
|
|
325495
|
+
* Used when no enterprise license is present or policy is disabled.
|
|
325496
|
+
*/
|
|
325497
|
+
class DefaultPolicyEngine {
|
|
325498
|
+
async initialize(_config) { }
|
|
325499
|
+
async evaluateCheckExecution(_checkId, _checkConfig) {
|
|
325500
|
+
return { allowed: true };
|
|
325501
|
+
}
|
|
325502
|
+
async evaluateToolInvocation(_serverName, _methodName, _transport) {
|
|
325503
|
+
return { allowed: true };
|
|
325504
|
+
}
|
|
325505
|
+
async evaluateCapabilities(_checkId, _capabilities) {
|
|
325506
|
+
return { allowed: true };
|
|
325507
|
+
}
|
|
325508
|
+
async shutdown() { }
|
|
325509
|
+
}
|
|
325510
|
+
exports.DefaultPolicyEngine = DefaultPolicyEngine;
|
|
325511
|
+
|
|
325512
|
+
|
|
323620
325513
|
/***/ }),
|
|
323621
325514
|
|
|
323622
325515
|
/***/ 96611:
|
|
@@ -326853,6 +328746,7 @@ const deepmerge_1 = __importDefault(__nccwpck_require__(2569));
|
|
|
326853
328746
|
const jsonpath_plus_1 = __nccwpck_require__(55464);
|
|
326854
328747
|
const minimatch_1 = __nccwpck_require__(46507);
|
|
326855
328748
|
const logger_1 = __nccwpck_require__(86999);
|
|
328749
|
+
const rate_limiter_1 = __nccwpck_require__(56898);
|
|
326856
328750
|
const HTTP_METHODS = new Set(['get', 'put', 'post', 'delete', 'options', 'head', 'patch', 'trace']);
|
|
326857
328751
|
function isHttpUrl(value) {
|
|
326858
328752
|
return value.startsWith('http://') || value.startsWith('https://');
|
|
@@ -327118,6 +329012,7 @@ function getApiToolConfig(tool) {
|
|
|
327118
329012
|
securitySchemeName: tool.securitySchemeName ?? tool.security_scheme_name,
|
|
327119
329013
|
securityCredentials: tool.securityCredentials || tool.security_credentials || {},
|
|
327120
329014
|
requestTimeoutMs: tool.requestTimeoutMs ?? tool.request_timeout_ms ?? tool.timeout ?? 30000,
|
|
329015
|
+
rateLimitConfig: tool.rate_limit,
|
|
327121
329016
|
};
|
|
327122
329017
|
}
|
|
327123
329018
|
function buildOutputSchema(operation) {
|
|
@@ -327592,7 +329487,7 @@ async function executeMappedApiTool(mappedTool, args) {
|
|
|
327592
329487
|
const controller = new AbortController();
|
|
327593
329488
|
const timeout = setTimeout(() => controller.abort(), apiToolConfig.requestTimeoutMs);
|
|
327594
329489
|
try {
|
|
327595
|
-
const response = await
|
|
329490
|
+
const response = await (0, rate_limiter_1.rateLimitedFetch)(endpoint.toString(), {
|
|
327596
329491
|
method,
|
|
327597
329492
|
headers,
|
|
327598
329493
|
body: requestBodyValue === undefined
|
|
@@ -327601,7 +329496,7 @@ async function executeMappedApiTool(mappedTool, args) {
|
|
|
327601
329496
|
? JSON.stringify(requestBodyValue)
|
|
327602
329497
|
: String(requestBodyValue),
|
|
327603
329498
|
signal: controller.signal,
|
|
327604
|
-
});
|
|
329499
|
+
}, apiToolConfig.rateLimitConfig);
|
|
327605
329500
|
const raw = await response.text();
|
|
327606
329501
|
let body = raw;
|
|
327607
329502
|
const contentType = response.headers.get('content-type') || '';
|
|
@@ -331581,6 +333476,7 @@ const sandbox_1 = __nccwpck_require__(12630);
|
|
|
331581
333476
|
const template_context_1 = __nccwpck_require__(1581);
|
|
331582
333477
|
const oauth2_token_cache_1 = __nccwpck_require__(34713);
|
|
331583
333478
|
const logger_1 = __nccwpck_require__(86999);
|
|
333479
|
+
const rate_limiter_1 = __nccwpck_require__(56898);
|
|
331584
333480
|
const fs = __importStar(__nccwpck_require__(79896));
|
|
331585
333481
|
const path = __importStar(__nccwpck_require__(16928));
|
|
331586
333482
|
/**
|
|
@@ -331758,12 +333654,13 @@ class HttpClientProvider extends check_provider_interface_1.CheckProvider {
|
|
|
331758
333654
|
if (requestBody) {
|
|
331759
333655
|
logger_1.logger.verbose(`[http_client] Body: ${requestBody.substring(0, 500)}${requestBody.length > 500 ? '...' : ''}`);
|
|
331760
333656
|
}
|
|
333657
|
+
const rateLimitConfig = config.rate_limit;
|
|
331761
333658
|
// If output_file is specified, download to file instead of returning data
|
|
331762
333659
|
if (resolvedOutputFile) {
|
|
331763
|
-
const fileResult = await this.downloadToFile(renderedUrl, method, resolvedHeaders, requestBody, timeout, resolvedOutputFile);
|
|
333660
|
+
const fileResult = await this.downloadToFile(renderedUrl, method, resolvedHeaders, requestBody, timeout, resolvedOutputFile, rateLimitConfig);
|
|
331764
333661
|
return fileResult;
|
|
331765
333662
|
}
|
|
331766
|
-
const data = await this.fetchData(renderedUrl, method, resolvedHeaders, requestBody, timeout);
|
|
333663
|
+
const data = await this.fetchData(renderedUrl, method, resolvedHeaders, requestBody, timeout, rateLimitConfig);
|
|
331767
333664
|
// Apply Liquid transformation if specified
|
|
331768
333665
|
let processedData = data;
|
|
331769
333666
|
if (transform) {
|
|
@@ -331854,7 +333751,7 @@ class HttpClientProvider extends check_provider_interface_1.CheckProvider {
|
|
|
331854
333751
|
};
|
|
331855
333752
|
}
|
|
331856
333753
|
}
|
|
331857
|
-
async fetchData(url, method, headers, body, timeout = 30000) {
|
|
333754
|
+
async fetchData(url, method, headers, body, timeout = 30000, rateLimitConfig) {
|
|
331858
333755
|
// Check if fetch is available (Node 18+)
|
|
331859
333756
|
if (typeof fetch === 'undefined') {
|
|
331860
333757
|
throw new Error('HTTP client provider requires Node.js 18+ or node-fetch package');
|
|
@@ -331880,7 +333777,7 @@ class HttpClientProvider extends check_provider_interface_1.CheckProvider {
|
|
|
331880
333777
|
};
|
|
331881
333778
|
}
|
|
331882
333779
|
}
|
|
331883
|
-
const response = await
|
|
333780
|
+
const response = await (0, rate_limiter_1.rateLimitedFetch)(url, requestOptions, rateLimitConfig);
|
|
331884
333781
|
clearTimeout(timeoutId);
|
|
331885
333782
|
logger_1.logger.verbose(`[http_client] Response: ${response.status} ${response.statusText}`);
|
|
331886
333783
|
if (!response.ok) {
|
|
@@ -331919,7 +333816,7 @@ class HttpClientProvider extends check_provider_interface_1.CheckProvider {
|
|
|
331919
333816
|
throw error;
|
|
331920
333817
|
}
|
|
331921
333818
|
}
|
|
331922
|
-
async downloadToFile(url, method, headers, body, timeout, outputFile) {
|
|
333819
|
+
async downloadToFile(url, method, headers, body, timeout, outputFile, rateLimitConfig) {
|
|
331923
333820
|
// Check if fetch is available (Node 18+)
|
|
331924
333821
|
if (typeof fetch === 'undefined') {
|
|
331925
333822
|
throw new Error('HTTP client provider requires Node.js 18+ or node-fetch package');
|
|
@@ -331942,7 +333839,7 @@ class HttpClientProvider extends check_provider_interface_1.CheckProvider {
|
|
|
331942
333839
|
};
|
|
331943
333840
|
}
|
|
331944
333841
|
}
|
|
331945
|
-
const response = await
|
|
333842
|
+
const response = await (0, rate_limiter_1.rateLimitedFetch)(url, requestOptions, rateLimitConfig);
|
|
331946
333843
|
clearTimeout(timeoutId);
|
|
331947
333844
|
if (!response.ok) {
|
|
331948
333845
|
return {
|
|
@@ -333883,6 +335780,7 @@ const schedule_tool_1 = __nccwpck_require__(13395);
|
|
|
333883
335780
|
// Legacy Slack-specific imports for backwards compatibility
|
|
333884
335781
|
const schedule_tool_handler_1 = __nccwpck_require__(28883);
|
|
333885
335782
|
const env_resolver_1 = __nccwpck_require__(58749);
|
|
335783
|
+
const rate_limiter_1 = __nccwpck_require__(56898);
|
|
333886
335784
|
/**
|
|
333887
335785
|
* Check if a tool definition is an http_client tool
|
|
333888
335786
|
*/
|
|
@@ -334655,7 +336553,8 @@ class CustomToolsSSEServer {
|
|
|
334655
336553
|
resolvedHeaders['Content-Type'] = 'application/json';
|
|
334656
336554
|
}
|
|
334657
336555
|
}
|
|
334658
|
-
const
|
|
336556
|
+
const rateLimitConfig = tool.rate_limit;
|
|
336557
|
+
const response = await (0, rate_limiter_1.rateLimitedFetch)(url, requestOptions, rateLimitConfig);
|
|
334659
336558
|
clearTimeout(timeoutId);
|
|
334660
336559
|
if (!response.ok) {
|
|
334661
336560
|
let errorBody = '';
|
|
@@ -336175,6 +338074,17 @@ class WorkflowCheckProvider extends check_provider_interface_1.CheckProvider {
|
|
|
336175
338074
|
validateWorkflowDepth(currentDepth, maxDepth, workflow.id);
|
|
336176
338075
|
// Project workflow to dependency graph
|
|
336177
338076
|
const { config: workflowConfig, checks: checksMetadata } = projectWorkflowToGraph(workflow, inputs, config.checkName || workflow.id);
|
|
338077
|
+
// Propagate parent check's timeout to nested workflow steps that don't define their own.
|
|
338078
|
+
// This ensures that a parent `timeout: 120000` caps nested AI steps instead of them
|
|
338079
|
+
// falling back to the 30-minute default.
|
|
338080
|
+
const parentTimeout = config.timeout || config.ai?.timeout;
|
|
338081
|
+
if (parentTimeout && workflowConfig.checks) {
|
|
338082
|
+
for (const stepCfg of Object.values(workflowConfig.checks)) {
|
|
338083
|
+
if (!stepCfg.timeout && !stepCfg.ai?.timeout) {
|
|
338084
|
+
stepCfg.timeout = parentTimeout;
|
|
338085
|
+
}
|
|
338086
|
+
}
|
|
338087
|
+
}
|
|
336178
338088
|
// Build isolated child engine context (separate journal/memory to avoid state contamination)
|
|
336179
338089
|
// Reuse parent's memory config if available, but never the instance
|
|
336180
338090
|
const parentMemoryCfg = (parentContext?.memory &&
|
|
@@ -346769,7 +348679,7 @@ class StateMachineExecutionEngine {
|
|
|
346769
348679
|
try {
|
|
346770
348680
|
logger_1.logger.debug(`[PolicyEngine] Loading enterprise policy engine (engine=${configWithTagFilter.policy.engine})`);
|
|
346771
348681
|
// @ts-ignore — enterprise/ may not exist in OSS builds (caught at runtime)
|
|
346772
|
-
const { loadEnterprisePolicyEngine } = await Promise.resolve().then(() => __importStar(__nccwpck_require__(
|
|
348682
|
+
const { loadEnterprisePolicyEngine } = await Promise.resolve().then(() => __importStar(__nccwpck_require__(87068)));
|
|
346773
348683
|
context.policyEngine = await loadEnterprisePolicyEngine(configWithTagFilter.policy);
|
|
346774
348684
|
logger_1.logger.debug(`[PolicyEngine] Initialized: ${context.policyEngine?.constructor?.name || 'unknown'}`);
|
|
346775
348685
|
}
|
|
@@ -347803,7 +349713,12 @@ function buildEngineContextForRun(workingDirectory, config, prInfo, debug, maxPa
|
|
|
347803
349713
|
async acquire(parentSessionId, _dbg, queueTimeout) {
|
|
347804
349714
|
// Use visor session ID if probe didn't provide one
|
|
347805
349715
|
const sid = parentSessionId || sessionId;
|
|
347806
|
-
|
|
349716
|
+
// ProbeAgent calls acquire(null) without queueTimeout, which defaults
|
|
349717
|
+
// to 120s in FairConcurrencyLimiter — too short when AI checks take
|
|
349718
|
+
// 5-30+ min and slots are occupied. Override to 0 (disabled) so the
|
|
349719
|
+
// step/AI timeout governs cancellation instead.
|
|
349720
|
+
const effectiveQueueTimeout = queueTimeout ?? 0;
|
|
349721
|
+
return fairLimiter.acquire(sid, _dbg, effectiveQueueTimeout);
|
|
347807
349722
|
},
|
|
347808
349723
|
release(parentSessionId, _dbg) {
|
|
347809
349724
|
const sid = parentSessionId || sessionId;
|
|
@@ -351671,11 +353586,14 @@ async function executeCheckWithForEachItems(checkId, forEachParent, forEachItems
|
|
|
351671
353586
|
// Evaluate assume contract for this iteration (design-by-contract)
|
|
351672
353587
|
{
|
|
351673
353588
|
const assumeExpr = checkConfig?.assume;
|
|
351674
|
-
if (assumeExpr) {
|
|
353589
|
+
if (assumeExpr !== undefined && assumeExpr !== null) {
|
|
351675
353590
|
let ok = true;
|
|
351676
353591
|
try {
|
|
351677
353592
|
const evaluator = new failure_condition_evaluator_1.FailureConditionEvaluator();
|
|
351678
|
-
const
|
|
353593
|
+
const rawExprs = Array.isArray(assumeExpr) ? assumeExpr : [assumeExpr];
|
|
353594
|
+
// Coerce non-string values (e.g., YAML boolean `true`) to strings
|
|
353595
|
+
// so they can be safely evaluated as JavaScript expressions.
|
|
353596
|
+
const exprs = rawExprs.map((e) => (typeof e === 'string' ? e : String(e)));
|
|
351679
353597
|
// Get conversation from execution context (TUI/CLI) or provider event context (Slack)
|
|
351680
353598
|
const conversation = context.executionContext?.conversation ||
|
|
351681
353599
|
providerConfig?.eventContext?.conversation;
|
|
@@ -352843,11 +354761,14 @@ async function executeSingleCheck(checkId, context, state, emitEvent, transition
|
|
|
352843
354761
|
// Evaluate assume contract (design-by-contract) before executing
|
|
352844
354762
|
{
|
|
352845
354763
|
const assumeExpr = checkConfig?.assume;
|
|
352846
|
-
if (assumeExpr) {
|
|
354764
|
+
if (assumeExpr !== undefined && assumeExpr !== null) {
|
|
352847
354765
|
let ok = true;
|
|
352848
354766
|
try {
|
|
352849
354767
|
const evaluator = new failure_condition_evaluator_1.FailureConditionEvaluator();
|
|
352850
|
-
const
|
|
354768
|
+
const rawExprs = Array.isArray(assumeExpr) ? assumeExpr : [assumeExpr];
|
|
354769
|
+
// Coerce non-string values (e.g., YAML boolean `true`) to strings
|
|
354770
|
+
// so they can be safely evaluated as JavaScript expressions.
|
|
354771
|
+
const exprs = rawExprs.map((e) => (typeof e === 'string' ? e : String(e)));
|
|
352851
354772
|
// Get conversation from execution context (TUI/CLI) or provider event context (Slack)
|
|
352852
354773
|
const conversation = context.executionContext?.conversation ||
|
|
352853
354774
|
providerConfig?.eventContext?.conversation;
|
|
@@ -358513,7 +360434,7 @@ async function initTelemetry(opts = {}) {
|
|
|
358513
360434
|
const path = __nccwpck_require__(16928);
|
|
358514
360435
|
const outDir = opts.file?.dir ||
|
|
358515
360436
|
process.env.VISOR_TRACE_DIR ||
|
|
358516
|
-
|
|
360437
|
+
path.join(process.cwd(), 'output', 'traces');
|
|
358517
360438
|
fs.mkdirSync(outDir, { recursive: true });
|
|
358518
360439
|
const ts = new Date().toISOString().replace(/[:.]/g, '-');
|
|
358519
360440
|
process.env.VISOR_FALLBACK_TRACE_FILE = path.join(outDir, `run-${ts}.ndjson`);
|
|
@@ -358763,7 +360684,7 @@ async function shutdownTelemetry() {
|
|
|
358763
360684
|
if (process.env.VISOR_TRACE_REPORT === 'true') {
|
|
358764
360685
|
const fs = __nccwpck_require__(79896);
|
|
358765
360686
|
const path = __nccwpck_require__(16928);
|
|
358766
|
-
const outDir = process.env.VISOR_TRACE_DIR ||
|
|
360687
|
+
const outDir = process.env.VISOR_TRACE_DIR || path.join(process.cwd(), 'output', 'traces');
|
|
358767
360688
|
if (!fs.existsSync(outDir))
|
|
358768
360689
|
fs.mkdirSync(outDir, { recursive: true });
|
|
358769
360690
|
const ts = new Date().toISOString().replace(/[:.]/g, '-');
|
|
@@ -359314,7 +361235,7 @@ function __getOrCreateNdjsonPath() {
|
|
|
359314
361235
|
fs.mkdirSync(dir, { recursive: true });
|
|
359315
361236
|
return __ndjsonPath;
|
|
359316
361237
|
}
|
|
359317
|
-
const outDir = process.env.VISOR_TRACE_DIR ||
|
|
361238
|
+
const outDir = process.env.VISOR_TRACE_DIR || path.join(process.cwd(), 'output', 'traces');
|
|
359318
361239
|
if (!fs.existsSync(outDir))
|
|
359319
361240
|
fs.mkdirSync(outDir, { recursive: true });
|
|
359320
361241
|
if (!__ndjsonPath) {
|
|
@@ -359794,7 +361715,7 @@ function expandConversationToFlow(testCase) {
|
|
|
359794
361715
|
transport,
|
|
359795
361716
|
thread: { id: threadId },
|
|
359796
361717
|
messages: [...currentMessages],
|
|
359797
|
-
current: { role: 'user', text: turn.text },
|
|
361718
|
+
current: { role: 'user', text: turn.text, ...(turn.user ? { user: turn.user } : {}) },
|
|
359798
361719
|
},
|
|
359799
361720
|
},
|
|
359800
361721
|
...(turn.mocks ? { mocks: turn.mocks } : {}),
|
|
@@ -364260,6 +366181,7 @@ const schema = {
|
|
|
364260
366181
|
properties: {
|
|
364261
366182
|
role: { type: 'string', enum: ['user', 'assistant'] },
|
|
364262
366183
|
text: { type: 'string' },
|
|
366184
|
+
user: { type: 'string' },
|
|
364263
366185
|
mocks: {
|
|
364264
366186
|
type: 'object',
|
|
364265
366187
|
additionalProperties: {
|
|
@@ -370607,6 +372529,193 @@ class OAuth2TokenCache {
|
|
|
370607
372529
|
exports.OAuth2TokenCache = OAuth2TokenCache;
|
|
370608
372530
|
|
|
370609
372531
|
|
|
372532
|
+
/***/ }),
|
|
372533
|
+
|
|
372534
|
+
/***/ 56898:
|
|
372535
|
+
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
|
372536
|
+
|
|
372537
|
+
"use strict";
|
|
372538
|
+
|
|
372539
|
+
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
|
372540
|
+
exports.RateLimiterRegistry = exports.TokenBucket = void 0;
|
|
372541
|
+
exports.resolveRateLimitKey = resolveRateLimitKey;
|
|
372542
|
+
exports.rateLimitedFetch = rateLimitedFetch;
|
|
372543
|
+
const logger_1 = __nccwpck_require__(86999);
|
|
372544
|
+
/**
|
|
372545
|
+
* Token bucket rate limiter with FIFO wait queue.
|
|
372546
|
+
*/
|
|
372547
|
+
class TokenBucket {
|
|
372548
|
+
tokens;
|
|
372549
|
+
capacity;
|
|
372550
|
+
refillRate; // tokens per ms
|
|
372551
|
+
lastRefill;
|
|
372552
|
+
waitQueue = [];
|
|
372553
|
+
constructor(capacity, windowMs) {
|
|
372554
|
+
this.capacity = capacity;
|
|
372555
|
+
this.tokens = capacity;
|
|
372556
|
+
this.refillRate = capacity / windowMs;
|
|
372557
|
+
this.lastRefill = Date.now();
|
|
372558
|
+
}
|
|
372559
|
+
refill() {
|
|
372560
|
+
const now = Date.now();
|
|
372561
|
+
const elapsed = now - this.lastRefill;
|
|
372562
|
+
const newTokens = elapsed * this.refillRate;
|
|
372563
|
+
this.tokens = Math.min(this.capacity, this.tokens + newTokens);
|
|
372564
|
+
this.lastRefill = now;
|
|
372565
|
+
}
|
|
372566
|
+
/**
|
|
372567
|
+
* Non-blocking: try to consume one token.
|
|
372568
|
+
*/
|
|
372569
|
+
tryConsume() {
|
|
372570
|
+
this.refill();
|
|
372571
|
+
if (this.tokens >= 1) {
|
|
372572
|
+
this.tokens -= 1;
|
|
372573
|
+
return true;
|
|
372574
|
+
}
|
|
372575
|
+
return false;
|
|
372576
|
+
}
|
|
372577
|
+
/**
|
|
372578
|
+
* Blocking: wait until a token is available, then consume it.
|
|
372579
|
+
* Requests are served FIFO.
|
|
372580
|
+
*/
|
|
372581
|
+
async acquire() {
|
|
372582
|
+
if (this.tryConsume()) {
|
|
372583
|
+
return;
|
|
372584
|
+
}
|
|
372585
|
+
// Calculate wait time for next token
|
|
372586
|
+
const waitMs = Math.ceil((1 - this.tokens) / this.refillRate);
|
|
372587
|
+
return new Promise(resolve => {
|
|
372588
|
+
const entry = { resolve };
|
|
372589
|
+
this.waitQueue.push(entry);
|
|
372590
|
+
setTimeout(() => {
|
|
372591
|
+
// Remove from queue
|
|
372592
|
+
const idx = this.waitQueue.indexOf(entry);
|
|
372593
|
+
if (idx >= 0) {
|
|
372594
|
+
this.waitQueue.splice(idx, 1);
|
|
372595
|
+
}
|
|
372596
|
+
this.refill();
|
|
372597
|
+
if (this.tokens >= 1) {
|
|
372598
|
+
this.tokens -= 1;
|
|
372599
|
+
}
|
|
372600
|
+
resolve();
|
|
372601
|
+
}, waitMs);
|
|
372602
|
+
});
|
|
372603
|
+
}
|
|
372604
|
+
}
|
|
372605
|
+
exports.TokenBucket = TokenBucket;
|
|
372606
|
+
function windowToMs(per) {
|
|
372607
|
+
switch (per) {
|
|
372608
|
+
case 'second':
|
|
372609
|
+
return 1000;
|
|
372610
|
+
case 'minute':
|
|
372611
|
+
return 60_000;
|
|
372612
|
+
case 'hour':
|
|
372613
|
+
return 3_600_000;
|
|
372614
|
+
}
|
|
372615
|
+
}
|
|
372616
|
+
const REGISTRY_KEY = Symbol.for('visor.rateLimiterRegistry');
|
|
372617
|
+
/**
|
|
372618
|
+
* Global singleton registry of named token buckets.
|
|
372619
|
+
*/
|
|
372620
|
+
class RateLimiterRegistry {
|
|
372621
|
+
buckets = new Map();
|
|
372622
|
+
static getInstance() {
|
|
372623
|
+
const g = globalThis;
|
|
372624
|
+
if (!g[REGISTRY_KEY]) {
|
|
372625
|
+
g[REGISTRY_KEY] = new RateLimiterRegistry();
|
|
372626
|
+
}
|
|
372627
|
+
return g[REGISTRY_KEY];
|
|
372628
|
+
}
|
|
372629
|
+
getOrCreate(key, config) {
|
|
372630
|
+
let bucket = this.buckets.get(key);
|
|
372631
|
+
if (!bucket) {
|
|
372632
|
+
const windowMs = windowToMs(config.per);
|
|
372633
|
+
bucket = new TokenBucket(config.requests, windowMs);
|
|
372634
|
+
this.buckets.set(key, bucket);
|
|
372635
|
+
logger_1.logger.verbose(`[rate-limiter] Created bucket "${key}": ${config.requests} req/${config.per}`);
|
|
372636
|
+
}
|
|
372637
|
+
return bucket;
|
|
372638
|
+
}
|
|
372639
|
+
cleanup() {
|
|
372640
|
+
this.buckets.clear();
|
|
372641
|
+
}
|
|
372642
|
+
}
|
|
372643
|
+
exports.RateLimiterRegistry = RateLimiterRegistry;
|
|
372644
|
+
/**
|
|
372645
|
+
* Resolve the rate limit key from config and optional fallback URL.
|
|
372646
|
+
*/
|
|
372647
|
+
function resolveRateLimitKey(config, fallbackUrl) {
|
|
372648
|
+
if (config.key) {
|
|
372649
|
+
return config.key;
|
|
372650
|
+
}
|
|
372651
|
+
if (fallbackUrl) {
|
|
372652
|
+
try {
|
|
372653
|
+
const url = new URL(fallbackUrl);
|
|
372654
|
+
return url.origin;
|
|
372655
|
+
}
|
|
372656
|
+
catch {
|
|
372657
|
+
// not a valid URL, use as-is
|
|
372658
|
+
return fallbackUrl;
|
|
372659
|
+
}
|
|
372660
|
+
}
|
|
372661
|
+
return '__default__';
|
|
372662
|
+
}
|
|
372663
|
+
/**
|
|
372664
|
+
* Rate-limited fetch wrapper.
|
|
372665
|
+
*
|
|
372666
|
+
* If rateLimitConfig is provided, acquires a token before making the request
|
|
372667
|
+
* and retries on 429 responses with backoff.
|
|
372668
|
+
*
|
|
372669
|
+
* If no config is provided, behaves exactly like native fetch().
|
|
372670
|
+
*/
|
|
372671
|
+
async function rateLimitedFetch(url, options, rateLimitConfig) {
|
|
372672
|
+
if (!rateLimitConfig) {
|
|
372673
|
+
return fetch(url, options);
|
|
372674
|
+
}
|
|
372675
|
+
const key = resolveRateLimitKey(rateLimitConfig, url);
|
|
372676
|
+
const registry = RateLimiterRegistry.getInstance();
|
|
372677
|
+
const bucket = registry.getOrCreate(key, rateLimitConfig);
|
|
372678
|
+
const maxRetries = rateLimitConfig.max_retries ?? 3;
|
|
372679
|
+
const backoff = rateLimitConfig.backoff ?? 'exponential';
|
|
372680
|
+
const initialDelay = rateLimitConfig.initial_delay_ms ?? 1000;
|
|
372681
|
+
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
|
372682
|
+
// Acquire a token (waits if bucket is empty)
|
|
372683
|
+
await bucket.acquire();
|
|
372684
|
+
const response = await fetch(url, options);
|
|
372685
|
+
if (response.status !== 429) {
|
|
372686
|
+
return response;
|
|
372687
|
+
}
|
|
372688
|
+
// 429 — rate limited by server
|
|
372689
|
+
if (attempt === maxRetries) {
|
|
372690
|
+
logger_1.logger.warn(`[rate-limiter] Exhausted ${maxRetries} retries for ${url} (bucket: ${key})`);
|
|
372691
|
+
return response;
|
|
372692
|
+
}
|
|
372693
|
+
// Calculate delay: respect Retry-After header if present
|
|
372694
|
+
let delayMs;
|
|
372695
|
+
const retryAfter = response.headers.get('retry-after');
|
|
372696
|
+
if (retryAfter) {
|
|
372697
|
+
const parsed = Number(retryAfter);
|
|
372698
|
+
if (!isNaN(parsed)) {
|
|
372699
|
+
// Retry-After in seconds
|
|
372700
|
+
delayMs = parsed * 1000;
|
|
372701
|
+
}
|
|
372702
|
+
else {
|
|
372703
|
+
// Retry-After as HTTP date
|
|
372704
|
+
const date = new Date(retryAfter).getTime();
|
|
372705
|
+
delayMs = Math.max(0, date - Date.now());
|
|
372706
|
+
}
|
|
372707
|
+
}
|
|
372708
|
+
else {
|
|
372709
|
+
delayMs = backoff === 'exponential' ? initialDelay * Math.pow(2, attempt) : initialDelay;
|
|
372710
|
+
}
|
|
372711
|
+
logger_1.logger.verbose(`[rate-limiter] 429 on ${url} (bucket: ${key}), retry ${attempt + 1}/${maxRetries} in ${delayMs}ms`);
|
|
372712
|
+
await new Promise(resolve => setTimeout(resolve, delayMs));
|
|
372713
|
+
}
|
|
372714
|
+
// Should not reach here, but satisfy TypeScript
|
|
372715
|
+
return fetch(url, options);
|
|
372716
|
+
}
|
|
372717
|
+
|
|
372718
|
+
|
|
370610
372719
|
/***/ }),
|
|
370611
372720
|
|
|
370612
372721
|
/***/ 12630:
|
|
@@ -375434,22 +377543,6 @@ class WorkflowRegistry {
|
|
|
375434
377543
|
exports.WorkflowRegistry = WorkflowRegistry;
|
|
375435
377544
|
|
|
375436
377545
|
|
|
375437
|
-
/***/ }),
|
|
375438
|
-
|
|
375439
|
-
/***/ 7065:
|
|
375440
|
-
/***/ ((module) => {
|
|
375441
|
-
|
|
375442
|
-
module.exports = eval("require")("./enterprise/loader");
|
|
375443
|
-
|
|
375444
|
-
|
|
375445
|
-
/***/ }),
|
|
375446
|
-
|
|
375447
|
-
/***/ 71370:
|
|
375448
|
-
/***/ ((module) => {
|
|
375449
|
-
|
|
375450
|
-
module.exports = eval("require")("./enterprise/policy/policy-input-builder");
|
|
375451
|
-
|
|
375452
|
-
|
|
375453
377546
|
/***/ }),
|
|
375454
377547
|
|
|
375455
377548
|
/***/ 18327:
|
|
@@ -475346,6 +477439,10 @@ var init_symbolEdit = __esm({
|
|
|
475346
477439
|
});
|
|
475347
477440
|
|
|
475348
477441
|
// src/tools/fileTracker.js
|
|
477442
|
+
function normalizePath(filePath) {
|
|
477443
|
+
if (!filePath) return filePath;
|
|
477444
|
+
return (0, import_path6.resolve)(filePath);
|
|
477445
|
+
}
|
|
475349
477446
|
function computeContentHash(content) {
|
|
475350
477447
|
const normalized = (content || "").split("\n").map((l) => l.trimEnd()).join("\n");
|
|
475351
477448
|
return (0, import_crypto2.createHash)("sha256").update(normalized).digest("hex").slice(0, 16);
|
|
@@ -475408,10 +477505,11 @@ var init_fileTracker = __esm({
|
|
|
475408
477505
|
* @param {string} resolvedPath - Absolute path to the file
|
|
475409
477506
|
*/
|
|
475410
477507
|
markFileSeen(resolvedPath) {
|
|
475411
|
-
|
|
475412
|
-
this.
|
|
477508
|
+
const normalized = normalizePath(resolvedPath);
|
|
477509
|
+
this._seenFiles.add(normalized);
|
|
477510
|
+
this._textEditCounts.set(normalized, 0);
|
|
475413
477511
|
if (this.debug) {
|
|
475414
|
-
console.error(`[FileTracker] Marked as seen: ${
|
|
477512
|
+
console.error(`[FileTracker] Marked as seen: ${normalized}`);
|
|
475415
477513
|
}
|
|
475416
477514
|
}
|
|
475417
477515
|
/**
|
|
@@ -475420,7 +477518,7 @@ var init_fileTracker = __esm({
|
|
|
475420
477518
|
* @returns {boolean}
|
|
475421
477519
|
*/
|
|
475422
477520
|
isFileSeen(resolvedPath) {
|
|
475423
|
-
return this._seenFiles.has(resolvedPath);
|
|
477521
|
+
return this._seenFiles.has(normalizePath(resolvedPath));
|
|
475424
477522
|
}
|
|
475425
477523
|
/**
|
|
475426
477524
|
* Store a content hash for a symbol in a file.
|
|
@@ -475432,7 +477530,7 @@ var init_fileTracker = __esm({
|
|
|
475432
477530
|
* @param {string} [source='extract'] - How the content was obtained
|
|
475433
477531
|
*/
|
|
475434
477532
|
trackSymbolContent(resolvedPath, symbolName, code, startLine, endLine, source = "extract") {
|
|
475435
|
-
const key = `${resolvedPath}#${symbolName}`;
|
|
477533
|
+
const key = `${normalizePath(resolvedPath)}#${symbolName}`;
|
|
475436
477534
|
const contentHash = computeContentHash(code);
|
|
475437
477535
|
this._contentRecords.set(key, {
|
|
475438
477536
|
contentHash,
|
|
@@ -475453,7 +477551,7 @@ var init_fileTracker = __esm({
|
|
|
475453
477551
|
* @returns {Object|null} The stored record or null
|
|
475454
477552
|
*/
|
|
475455
477553
|
getSymbolRecord(resolvedPath, symbolName) {
|
|
475456
|
-
return this._contentRecords.get(`${resolvedPath}#${symbolName}`) || null;
|
|
477554
|
+
return this._contentRecords.get(`${normalizePath(resolvedPath)}#${symbolName}`) || null;
|
|
475457
477555
|
}
|
|
475458
477556
|
/**
|
|
475459
477557
|
* Check if a symbol's current content matches what was stored.
|
|
@@ -475463,7 +477561,7 @@ var init_fileTracker = __esm({
|
|
|
475463
477561
|
* @returns {{ok: boolean, reason?: string, message?: string}}
|
|
475464
477562
|
*/
|
|
475465
477563
|
checkSymbolContent(resolvedPath, symbolName, currentCode) {
|
|
475466
|
-
const key = `${resolvedPath}#${symbolName}`;
|
|
477564
|
+
const key = `${normalizePath(resolvedPath)}#${symbolName}`;
|
|
475467
477565
|
const record2 = this._contentRecords.get(key);
|
|
475468
477566
|
if (!record2) {
|
|
475469
477567
|
return { ok: true };
|
|
@@ -475540,7 +477638,7 @@ var init_fileTracker = __esm({
|
|
|
475540
477638
|
* @returns {{ok: boolean, reason?: string, message?: string}}
|
|
475541
477639
|
*/
|
|
475542
477640
|
checkBeforeEdit(resolvedPath) {
|
|
475543
|
-
if (!this._seenFiles.has(resolvedPath)) {
|
|
477641
|
+
if (!this._seenFiles.has(normalizePath(resolvedPath))) {
|
|
475544
477642
|
return {
|
|
475545
477643
|
ok: false,
|
|
475546
477644
|
reason: "untracked",
|
|
@@ -475555,8 +477653,9 @@ var init_fileTracker = __esm({
|
|
|
475555
477653
|
* @param {string} resolvedPath - Absolute path to the file
|
|
475556
477654
|
*/
|
|
475557
477655
|
async trackFileAfterWrite(resolvedPath) {
|
|
475558
|
-
|
|
475559
|
-
this.
|
|
477656
|
+
const normalized = normalizePath(resolvedPath);
|
|
477657
|
+
this._seenFiles.add(normalized);
|
|
477658
|
+
this.invalidateFileRecords(normalized);
|
|
475560
477659
|
}
|
|
475561
477660
|
/**
|
|
475562
477661
|
* Record a text-mode edit (old_string/new_string) to a file.
|
|
@@ -475564,10 +477663,11 @@ var init_fileTracker = __esm({
|
|
|
475564
477663
|
* @param {string} resolvedPath - Absolute path to the file
|
|
475565
477664
|
*/
|
|
475566
477665
|
recordTextEdit(resolvedPath) {
|
|
475567
|
-
const
|
|
475568
|
-
this._textEditCounts.
|
|
477666
|
+
const normalized = normalizePath(resolvedPath);
|
|
477667
|
+
const count = (this._textEditCounts.get(normalized) || 0) + 1;
|
|
477668
|
+
this._textEditCounts.set(normalized, count);
|
|
475569
477669
|
if (this.debug) {
|
|
475570
|
-
console.error(`[FileTracker] Text edit #${count} for ${
|
|
477670
|
+
console.error(`[FileTracker] Text edit #${count} for ${normalized}`);
|
|
475571
477671
|
}
|
|
475572
477672
|
}
|
|
475573
477673
|
/**
|
|
@@ -475576,7 +477676,7 @@ var init_fileTracker = __esm({
|
|
|
475576
477676
|
* @returns {{ok: boolean, editCount?: number, message?: string}}
|
|
475577
477677
|
*/
|
|
475578
477678
|
checkTextEditStaleness(resolvedPath) {
|
|
475579
|
-
const count = this._textEditCounts.get(resolvedPath) || 0;
|
|
477679
|
+
const count = this._textEditCounts.get(normalizePath(resolvedPath)) || 0;
|
|
475580
477680
|
if (count >= this.maxConsecutiveTextEdits) {
|
|
475581
477681
|
return {
|
|
475582
477682
|
ok: false,
|
|
@@ -475605,7 +477705,7 @@ var init_fileTracker = __esm({
|
|
|
475605
477705
|
* @param {string} resolvedPath - Absolute path to the file
|
|
475606
477706
|
*/
|
|
475607
477707
|
invalidateFileRecords(resolvedPath) {
|
|
475608
|
-
const prefix = resolvedPath + "#";
|
|
477708
|
+
const prefix = normalizePath(resolvedPath) + "#";
|
|
475609
477709
|
for (const key of this._contentRecords.keys()) {
|
|
475610
477710
|
if (key.startsWith(prefix)) {
|
|
475611
477711
|
this._contentRecords.delete(key);
|
|
@@ -475621,7 +477721,7 @@ var init_fileTracker = __esm({
|
|
|
475621
477721
|
* @returns {boolean}
|
|
475622
477722
|
*/
|
|
475623
477723
|
isTracked(resolvedPath) {
|
|
475624
|
-
return this.isFileSeen(resolvedPath);
|
|
477724
|
+
return this.isFileSeen(normalizePath(resolvedPath));
|
|
475625
477725
|
}
|
|
475626
477726
|
/**
|
|
475627
477727
|
* Clear all tracking state.
|
|
@@ -479674,7 +481774,7 @@ var init_esm3 = __esm({
|
|
|
479674
481774
|
});
|
|
479675
481775
|
|
|
479676
481776
|
// node_modules/path-scurry/dist/esm/index.js
|
|
479677
|
-
var import_node_path, import_node_url, import_fs4, actualFS, import_promises, realpathSync2, defaultFS, fsFromOption, uncDriveRegexp, uncToDrive, eitherSep, UNKNOWN, IFIFO, IFCHR, IFDIR, IFBLK, IFREG, IFLNK, IFSOCK, IFMT, IFMT_UNKNOWN, READDIR_CALLED, LSTAT_CALLED, ENOTDIR, ENOENT, ENOREADLINK, ENOREALPATH, ENOCHILD, TYPEMASK, entToType, normalizeCache,
|
|
481777
|
+
var import_node_path, import_node_url, import_fs4, actualFS, import_promises, realpathSync2, defaultFS, fsFromOption, uncDriveRegexp, uncToDrive, eitherSep, UNKNOWN, IFIFO, IFCHR, IFDIR, IFBLK, IFREG, IFLNK, IFSOCK, IFMT, IFMT_UNKNOWN, READDIR_CALLED, LSTAT_CALLED, ENOTDIR, ENOENT, ENOREADLINK, ENOREALPATH, ENOCHILD, TYPEMASK, entToType, normalizeCache, normalize2, normalizeNocaseCache, normalizeNocase, ResolveCache, ChildrenCache, setAsCwd, PathBase, PathWin32, PathPosix, PathScurryBase, PathScurryWin32, PathScurryPosix, PathScurryDarwin, Path, PathScurry;
|
|
479678
481778
|
var init_esm4 = __esm({
|
|
479679
481779
|
"node_modules/path-scurry/dist/esm/index.js"() {
|
|
479680
481780
|
init_esm2();
|
|
@@ -479729,7 +481829,7 @@ var init_esm4 = __esm({
|
|
|
479729
481829
|
TYPEMASK = 1023;
|
|
479730
481830
|
entToType = (s) => s.isFile() ? IFREG : s.isDirectory() ? IFDIR : s.isSymbolicLink() ? IFLNK : s.isCharacterDevice() ? IFCHR : s.isBlockDevice() ? IFBLK : s.isSocket() ? IFSOCK : s.isFIFO() ? IFIFO : UNKNOWN;
|
|
479731
481831
|
normalizeCache = /* @__PURE__ */ new Map();
|
|
479732
|
-
|
|
481832
|
+
normalize2 = (s) => {
|
|
479733
481833
|
const c = normalizeCache.get(s);
|
|
479734
481834
|
if (c)
|
|
479735
481835
|
return c;
|
|
@@ -479742,7 +481842,7 @@ var init_esm4 = __esm({
|
|
|
479742
481842
|
const c = normalizeNocaseCache.get(s);
|
|
479743
481843
|
if (c)
|
|
479744
481844
|
return c;
|
|
479745
|
-
const n =
|
|
481845
|
+
const n = normalize2(s.toLowerCase());
|
|
479746
481846
|
normalizeNocaseCache.set(s, n);
|
|
479747
481847
|
return n;
|
|
479748
481848
|
};
|
|
@@ -479909,7 +482009,7 @@ var init_esm4 = __esm({
|
|
|
479909
482009
|
*/
|
|
479910
482010
|
constructor(name15, type = UNKNOWN, root2, roots, nocase, children, opts) {
|
|
479911
482011
|
this.name = name15;
|
|
479912
|
-
this.#matchName = nocase ? normalizeNocase(name15) :
|
|
482012
|
+
this.#matchName = nocase ? normalizeNocase(name15) : normalize2(name15);
|
|
479913
482013
|
this.#type = type & TYPEMASK;
|
|
479914
482014
|
this.nocase = nocase;
|
|
479915
482015
|
this.roots = roots;
|
|
@@ -480002,7 +482102,7 @@ var init_esm4 = __esm({
|
|
|
480002
482102
|
return this.parent || this;
|
|
480003
482103
|
}
|
|
480004
482104
|
const children = this.children();
|
|
480005
|
-
const name15 = this.nocase ? normalizeNocase(pathPart) :
|
|
482105
|
+
const name15 = this.nocase ? normalizeNocase(pathPart) : normalize2(pathPart);
|
|
480006
482106
|
for (const p of children) {
|
|
480007
482107
|
if (p.#matchName === name15) {
|
|
480008
482108
|
return p;
|
|
@@ -480247,7 +482347,7 @@ var init_esm4 = __esm({
|
|
|
480247
482347
|
* directly.
|
|
480248
482348
|
*/
|
|
480249
482349
|
isNamed(n) {
|
|
480250
|
-
return !this.nocase ? this.#matchName ===
|
|
482350
|
+
return !this.nocase ? this.#matchName === normalize2(n) : this.#matchName === normalizeNocase(n);
|
|
480251
482351
|
}
|
|
480252
482352
|
/**
|
|
480253
482353
|
* Return the Path object corresponding to the target of a symbolic link.
|
|
@@ -480386,7 +482486,7 @@ var init_esm4 = __esm({
|
|
|
480386
482486
|
#readdirMaybePromoteChild(e, c) {
|
|
480387
482487
|
for (let p = c.provisional; p < c.length; p++) {
|
|
480388
482488
|
const pchild = c[p];
|
|
480389
|
-
const name15 = this.nocase ? normalizeNocase(e.name) :
|
|
482489
|
+
const name15 = this.nocase ? normalizeNocase(e.name) : normalize2(e.name);
|
|
480390
482490
|
if (name15 !== pchild.#matchName) {
|
|
480391
482491
|
continue;
|
|
480392
482492
|
}
|
|
@@ -501703,7 +503803,7 @@ var init_graph_builder = __esm({
|
|
|
501703
503803
|
applyLinkStyles() {
|
|
501704
503804
|
if (!this.pendingLinkStyles.length || !this.edges.length)
|
|
501705
503805
|
return;
|
|
501706
|
-
const
|
|
503806
|
+
const normalize4 = (s) => {
|
|
501707
503807
|
const out = {};
|
|
501708
503808
|
for (const [kRaw, vRaw] of Object.entries(s)) {
|
|
501709
503809
|
const k = kRaw.trim().toLowerCase();
|
|
@@ -501724,7 +503824,7 @@ var init_graph_builder = __esm({
|
|
|
501724
503824
|
return out;
|
|
501725
503825
|
};
|
|
501726
503826
|
for (const cmd of this.pendingLinkStyles) {
|
|
501727
|
-
const style =
|
|
503827
|
+
const style = normalize4(cmd.props);
|
|
501728
503828
|
for (const idx of cmd.indices) {
|
|
501729
503829
|
if (idx >= 0 && idx < this.edges.length) {
|
|
501730
503830
|
const e = this.edges[idx];
|
|
@@ -508991,7 +511091,7 @@ var require_layout = __commonJS({
|
|
|
508991
511091
|
"use strict";
|
|
508992
511092
|
var _ = require_lodash2();
|
|
508993
511093
|
var acyclic = require_acyclic();
|
|
508994
|
-
var
|
|
511094
|
+
var normalize4 = require_normalize();
|
|
508995
511095
|
var rank = require_rank();
|
|
508996
511096
|
var normalizeRanks = require_util().normalizeRanks;
|
|
508997
511097
|
var parentDummyChains = require_parent_dummy_chains();
|
|
@@ -509053,7 +511153,7 @@ var require_layout = __commonJS({
|
|
|
509053
511153
|
removeEdgeLabelProxies(g);
|
|
509054
511154
|
});
|
|
509055
511155
|
time3(" normalize.run", function() {
|
|
509056
|
-
|
|
511156
|
+
normalize4.run(g);
|
|
509057
511157
|
});
|
|
509058
511158
|
time3(" parentDummyChains", function() {
|
|
509059
511159
|
parentDummyChains(g);
|
|
@@ -509080,7 +511180,7 @@ var require_layout = __commonJS({
|
|
|
509080
511180
|
removeBorderNodes(g);
|
|
509081
511181
|
});
|
|
509082
511182
|
time3(" normalize.undo", function() {
|
|
509083
|
-
|
|
511183
|
+
normalize4.undo(g);
|
|
509084
511184
|
});
|
|
509085
511185
|
time3(" fixupEdgeLabelCoords", function() {
|
|
509086
511186
|
fixupEdgeLabelCoords(g);
|
|
@@ -515451,8 +517551,8 @@ var require_resolve = __commonJS({
|
|
|
515451
517551
|
}
|
|
515452
517552
|
return count;
|
|
515453
517553
|
}
|
|
515454
|
-
function getFullPath(resolver, id = "",
|
|
515455
|
-
if (
|
|
517554
|
+
function getFullPath(resolver, id = "", normalize4) {
|
|
517555
|
+
if (normalize4 !== false)
|
|
515456
517556
|
id = normalizeId(id);
|
|
515457
517557
|
const p = resolver.parse(id);
|
|
515458
517558
|
return _getFullPath(resolver, p);
|
|
@@ -516792,7 +518892,7 @@ var require_fast_uri = __commonJS({
|
|
|
516792
518892
|
"use strict";
|
|
516793
518893
|
var { normalizeIPv6, removeDotSegments, recomposeAuthority, normalizeComponentEncoding, isIPv4, nonSimpleDomain } = require_utils();
|
|
516794
518894
|
var { SCHEMES, getSchemeHandler } = require_schemes();
|
|
516795
|
-
function
|
|
518895
|
+
function normalize4(uri, options) {
|
|
516796
518896
|
if (typeof uri === "string") {
|
|
516797
518897
|
uri = /** @type {T} */
|
|
516798
518898
|
serialize(parse11(uri, options), options);
|
|
@@ -517028,7 +519128,7 @@ var require_fast_uri = __commonJS({
|
|
|
517028
519128
|
}
|
|
517029
519129
|
var fastUri = {
|
|
517030
519130
|
SCHEMES,
|
|
517031
|
-
normalize:
|
|
519131
|
+
normalize: normalize4,
|
|
517032
519132
|
resolve: resolve9,
|
|
517033
519133
|
resolveComponent,
|
|
517034
519134
|
equal,
|
|
@@ -521235,9 +523335,9 @@ If the solution is clear, you can jump to implementation right away. If not, ask
|
|
|
521235
523335
|
- Do not add code comments unless the logic is genuinely complex and non-obvious.
|
|
521236
523336
|
|
|
521237
523337
|
# Before Implementation
|
|
521238
|
-
-
|
|
521239
|
-
-
|
|
521240
|
-
-
|
|
523338
|
+
- Read tests first \u2014 find existing test files for the module you're changing. They reveal expected behavior, edge cases, and the project's testing patterns.
|
|
523339
|
+
- Read neighboring files \u2014 understand naming conventions, error handling patterns, import style, and existing utilities before creating new ones.
|
|
523340
|
+
- Trace the call chain \u2014 follow how the code you're changing is called and what depends on it. Check interfaces, types, and consumers.
|
|
521241
523341
|
- Focus on backward compatibility
|
|
521242
523342
|
- Consider scalability, maintainability, and extensibility in your analysis
|
|
521243
523343
|
|
|
@@ -521262,6 +523362,20 @@ Before building or testing, determine the project's toolchain:
|
|
|
521262
523362
|
- Read README for build/test instructions if the above are unclear
|
|
521263
523363
|
- Common patterns: \`make build\`/\`make test\`, \`npm run build\`/\`npm test\`, \`cargo build\`/\`cargo test\`, \`go build ./...\`/\`go test ./...\`, \`python -m pytest\`
|
|
521264
523364
|
|
|
523365
|
+
# File Editing Rules
|
|
523366
|
+
You have access to the \`edit\`, \`create\`, and \`multi_edit\` tools for modifying files. You MUST use these tools for ALL code changes. They are purpose-built, atomic, and safe.
|
|
523367
|
+
|
|
523368
|
+
DO NOT use sed, awk, echo/cat redirection, or heredocs to modify source code. These commands cause real damage in practice: truncated lines, duplicate code blocks, broken syntax. Every bad edit wastes iterations on fix-up commits.
|
|
523369
|
+
|
|
523370
|
+
Use the right tool:
|
|
523371
|
+
1. To MODIFY existing code \u2192 \`edit\` tool (old_string \u2192 new_string, or start_line/end_line)
|
|
523372
|
+
2. To CREATE a new file \u2192 \`create\` tool
|
|
523373
|
+
3. To CHANGE multiple files at once \u2192 \`multi_edit\` tool
|
|
523374
|
+
4. To READ code \u2192 \`extract\` or \`search\` tools
|
|
523375
|
+
5. If \`edit\` fails with "file has not been read yet" \u2192 use \`extract\` with the EXACT same file path you will pass to \`edit\`. Relative vs absolute path mismatch causes this error. Use the same path format consistently. If it still fails, use bash \`cat\` to read the file, then use \`create\` to write the entire modified file. Do NOT fall back to sed.
|
|
523376
|
+
|
|
523377
|
+
Bash is fine for: formatters (gofmt, prettier, black), build/test/lint commands, git operations, and read-only file inspection (cat, head, tail). sed/awk should ONLY be used for trivial non-code tasks (e.g., config file tweaks) where the replacement is a simple literal string swap.
|
|
523378
|
+
|
|
521265
523379
|
# During Implementation
|
|
521266
523380
|
- Always create a new branch before making changes to the codebase.
|
|
521267
523381
|
- Fix problems at the root cause, not with surface-level patches. Prefer general solutions over special cases.
|
|
@@ -521288,6 +523402,22 @@ Before committing or creating a PR, run through this checklist:
|
|
|
521288
523402
|
|
|
521289
523403
|
Do NOT skip verification. Do NOT proceed to PR creation with a broken build or failing tests.
|
|
521290
523404
|
|
|
523405
|
+
# Output Integrity
|
|
523406
|
+
Your final output MUST accurately reflect what ACTUALLY happened. Do NOT fabricate, hallucinate, or report aspirational results.
|
|
523407
|
+
|
|
523408
|
+
- Only report PR URLs you actually created or updated with \`gh pr create\` or \`git push\`. If you checked out an existing PR but did NOT push changes to it, do NOT claim you updated it.
|
|
523409
|
+
- Describe what you ACTUALLY DID, not what you planned or intended to do. If you ran out of iterations, say so. If tests failed, say so.
|
|
523410
|
+
- Only list files you actually modified AND committed.
|
|
523411
|
+
- If you could not complete the task \u2014 ran out of iterations, tests failed, build broken, push rejected \u2014 report the real reason honestly.
|
|
523412
|
+
|
|
523413
|
+
NEVER claim success when:
|
|
523414
|
+
- You did not run \`git push\` successfully
|
|
523415
|
+
- Tests failed and you did not fix them
|
|
523416
|
+
- You hit the iteration limit before completing the work
|
|
523417
|
+
- You only analyzed/investigated but did not implement changes
|
|
523418
|
+
|
|
523419
|
+
A false success report is WORSE than an honest failure \u2014 it misleads the user into thinking work is done when it is not.
|
|
523420
|
+
|
|
521291
523421
|
# GitHub Integration
|
|
521292
523422
|
- Use the \`gh\` CLI for all GitHub operations: issues, pull requests, checks, releases.
|
|
521293
523423
|
- To view issues or PRs: \`gh issue view <number>\`, \`gh pr view <number>\`.
|
|
@@ -549490,7 +551620,7 @@ var init_vercel = __esm({
|
|
|
549490
551620
|
name: "search",
|
|
549491
551621
|
description: searchDelegate ? searchDelegateDescription : searchDescription,
|
|
549492
551622
|
inputSchema: searchSchema,
|
|
549493
|
-
execute: async ({ query: searchQuery, path: path9, allow_tests, exact, maxTokens: paramMaxTokens, language, session, nextPage }) => {
|
|
551623
|
+
execute: async ({ query: searchQuery, path: path9, allow_tests, exact, maxTokens: paramMaxTokens, language, session, nextPage, workingDirectory }) => {
|
|
549494
551624
|
if (!exact && searchQuery) {
|
|
549495
551625
|
const originalQuery = searchQuery;
|
|
549496
551626
|
searchQuery = autoQuoteSearchTerms(searchQuery);
|
|
@@ -549499,18 +551629,19 @@ var init_vercel = __esm({
|
|
|
549499
551629
|
}
|
|
549500
551630
|
}
|
|
549501
551631
|
const effectiveMaxTokens = paramMaxTokens || maxTokens;
|
|
551632
|
+
const effectiveSearchCwd = workingDirectory || options.cwd || ".";
|
|
549502
551633
|
let searchPaths;
|
|
549503
551634
|
if (path9) {
|
|
549504
|
-
searchPaths = parseAndResolvePaths(path9,
|
|
551635
|
+
searchPaths = parseAndResolvePaths(path9, effectiveSearchCwd);
|
|
549505
551636
|
}
|
|
549506
551637
|
if (!searchPaths || searchPaths.length === 0) {
|
|
549507
|
-
searchPaths = [
|
|
551638
|
+
searchPaths = [effectiveSearchCwd];
|
|
549508
551639
|
}
|
|
549509
551640
|
const searchPath = searchPaths.join(" ");
|
|
549510
551641
|
const searchOptions = {
|
|
549511
551642
|
query: searchQuery,
|
|
549512
551643
|
path: searchPath,
|
|
549513
|
-
cwd:
|
|
551644
|
+
cwd: effectiveSearchCwd,
|
|
549514
551645
|
// Working directory for resolving relative paths
|
|
549515
551646
|
allowTests: allow_tests ?? true,
|
|
549516
551647
|
exact,
|
|
@@ -549561,7 +551692,7 @@ var init_vercel = __esm({
|
|
|
549561
551692
|
try {
|
|
549562
551693
|
const result = maybeAnnotate(await runRawSearch());
|
|
549563
551694
|
if (options.fileTracker && typeof result === "string") {
|
|
549564
|
-
options.fileTracker.trackFilesFromOutput(result,
|
|
551695
|
+
options.fileTracker.trackFilesFromOutput(result, effectiveSearchCwd).catch(() => {
|
|
549565
551696
|
});
|
|
549566
551697
|
}
|
|
549567
551698
|
return result;
|
|
@@ -549614,7 +551745,7 @@ var init_vercel = __esm({
|
|
|
549614
551745
|
}
|
|
549615
551746
|
const fallbackResult = maybeAnnotate(await runRawSearch());
|
|
549616
551747
|
if (options.fileTracker && typeof fallbackResult === "string") {
|
|
549617
|
-
options.fileTracker.trackFilesFromOutput(fallbackResult,
|
|
551748
|
+
options.fileTracker.trackFilesFromOutput(fallbackResult, effectiveSearchCwd).catch(() => {
|
|
549618
551749
|
});
|
|
549619
551750
|
}
|
|
549620
551751
|
return fallbackResult;
|
|
@@ -549677,7 +551808,7 @@ var init_vercel = __esm({
|
|
|
549677
551808
|
try {
|
|
549678
551809
|
const fallbackResult2 = maybeAnnotate(await runRawSearch());
|
|
549679
551810
|
if (options.fileTracker && typeof fallbackResult2 === "string") {
|
|
549680
|
-
options.fileTracker.trackFilesFromOutput(fallbackResult2,
|
|
551811
|
+
options.fileTracker.trackFilesFromOutput(fallbackResult2, effectiveSearchCwd).catch(() => {
|
|
549681
551812
|
});
|
|
549682
551813
|
}
|
|
549683
551814
|
return fallbackResult2;
|
|
@@ -549731,9 +551862,9 @@ var init_vercel = __esm({
|
|
|
549731
551862
|
name: "extract",
|
|
549732
551863
|
description: extractDescription,
|
|
549733
551864
|
inputSchema: extractSchema,
|
|
549734
|
-
execute: async ({ targets, input_content, line, end_line, allow_tests, context_lines, format }) => {
|
|
551865
|
+
execute: async ({ targets, input_content, line, end_line, allow_tests, context_lines, format, workingDirectory }) => {
|
|
549735
551866
|
try {
|
|
549736
|
-
const effectiveCwd = options.cwd || ".";
|
|
551867
|
+
const effectiveCwd = workingDirectory || options.cwd || ".";
|
|
549737
551868
|
if (debug) {
|
|
549738
551869
|
if (targets) {
|
|
549739
551870
|
console.error(`Executing extract with targets: "${targets}", cwd: "${effectiveCwd}", context lines: ${context_lines || 10}`);
|
|
@@ -608863,7 +610994,7 @@ module.exports = /*#__PURE__*/JSON.parse('["aaa","aarp","abb","abbott","abbvie",
|
|
|
608863
610994
|
/***/ ((module) => {
|
|
608864
610995
|
|
|
608865
610996
|
"use strict";
|
|
608866
|
-
module.exports = /*#__PURE__*/JSON.parse('{"name":"@probelabs/visor","version":"0.1.
|
|
610997
|
+
module.exports = /*#__PURE__*/JSON.parse('{"name":"@probelabs/visor","version":"0.1.42","main":"dist/index.js","bin":{"visor":"./dist/index.js"},"exports":{".":{"require":"./dist/index.js","import":"./dist/index.js"},"./sdk":{"types":"./dist/sdk/sdk.d.ts","import":"./dist/sdk/sdk.mjs","require":"./dist/sdk/sdk.js"},"./cli":{"require":"./dist/index.js"}},"files":["dist/","defaults/","action.yml","README.md","LICENSE"],"publishConfig":{"access":"public","registry":"https://registry.npmjs.org/"},"scripts":{"build:cli":"ncc build src/index.ts -o dist && cp -r defaults dist/ && cp -r output dist/ && cp -r docs dist/ && cp -r examples dist/ && cp -r src/debug-visualizer/ui dist/debug-visualizer/ && node scripts/inject-version.js && echo \'#!/usr/bin/env node\' | cat - dist/index.js > temp && mv temp dist/index.js && chmod +x dist/index.js","build:sdk":"tsup src/sdk.ts --dts --sourcemap --format esm,cjs --out-dir dist/sdk","build":"./scripts/build-oss.sh","build:ee":"npm run build:cli && npm run build:sdk","test":"jest && npm run test:yaml","test:unit":"jest","prepublishOnly":"npm run build","test:watch":"jest --watch","test:coverage":"jest --coverage","test:ee":"jest --testPathPatterns=\'tests/ee\' --testPathIgnorePatterns=\'/node_modules/\' --no-coverage","test:manual:bash":"RUN_MANUAL_TESTS=true jest tests/manual/bash-config-manual.test.ts","lint":"eslint src tests --ext .ts","lint:fix":"eslint src tests --ext .ts --fix","format":"prettier --write src tests","format:check":"prettier --check src tests","clean":"","clean:traces":"node scripts/clean-traces.js","prebuild":"npm run clean && node scripts/generate-config-schema.js","pretest":"npm run clean:traces && node scripts/generate-config-schema.js && npm run build:cli","pretest:unit":"npm run clean:traces && node scripts/generate-config-schema.js && npm run build:cli","test:with-build":"npm run build:cli && jest","test:yaml":"node dist/index.js test --progress compact","test:yaml:parallel":"node dist/index.js test --progress compact --max-parallel 4","prepare":"husky","pre-commit":"lint-staged","deploy:site":"cd site && npx wrangler pages deploy . --project-name=visor-site --commit-dirty=true","deploy:worker":"npx wrangler deploy","deploy":"npm run deploy:site && npm run deploy:worker","publish:ee":"./scripts/publish-ee.sh","release":"./scripts/release.sh","release:patch":"./scripts/release.sh patch","release:minor":"./scripts/release.sh minor","release:major":"./scripts/release.sh major","release:prerelease":"./scripts/release.sh prerelease","docs:validate":"node scripts/validate-readme-links.js","workshop:setup":"npm install -D reveal-md@6.1.2","workshop:serve":"cd workshop && reveal-md slides.md -w","workshop:export":"reveal-md workshop/slides.md --static workshop/build","workshop:pdf":"reveal-md workshop/slides.md --print workshop/Visor-Workshop.pdf --print-size letter","workshop:pdf:ci":"reveal-md workshop/slides.md --print workshop/Visor-Workshop.pdf --print-size letter --puppeteer-launch-args=\\"--no-sandbox --disable-dev-shm-usage\\"","workshop:pdf:a4":"reveal-md workshop/slides.md --print workshop/Visor-Workshop-A4.pdf --print-size A4","workshop:build":"npm run workshop:export && npm run workshop:pdf","simulate:issue":"TS_NODE_TRANSPILE_ONLY=1 ts-node scripts/simulate-gh-run.ts --event issues --action opened --debug","simulate:comment":"TS_NODE_TRANSPILE_ONLY=1 ts-node scripts/simulate-gh-run.ts --event issue_comment --action created --debug"},"keywords":["code-review","ai","github-action","cli","pr-review","visor"],"author":"Probe Labs","license":"MIT","description":"AI workflow engine for code review, assistants, and automation — orchestrate checks, MCP tools, and AI providers with YAML-driven pipelines","repository":{"type":"git","url":"git+https://github.com/probelabs/visor.git"},"bugs":{"url":"https://github.com/probelabs/visor/issues"},"homepage":"https://github.com/probelabs/visor#readme","dependencies":{"@actions/core":"^1.11.1","@apidevtools/swagger-parser":"^12.1.0","@grammyjs/runner":"^2.0.3","@modelcontextprotocol/sdk":"^1.25.3","@nyariv/sandboxjs":"github:probelabs/SandboxJS#23c4bb611f7d05f3cb8c523917b5f57103e48108","@octokit/action":"^8.0.2","@octokit/auth-app":"^8.1.0","@octokit/core":"^7.0.3","@octokit/rest":"^22.0.0","@opentelemetry/api":"^1.9.0","@opentelemetry/api-logs":"^0.203.0","@opentelemetry/core":"^1.30.1","@opentelemetry/exporter-logs-otlp-http":"^0.203.0","@opentelemetry/exporter-metrics-otlp-http":"^0.203.0","@opentelemetry/exporter-trace-otlp-grpc":"^0.203.0","@opentelemetry/exporter-trace-otlp-http":"^0.203.0","@opentelemetry/instrumentation":"^0.203.0","@opentelemetry/resources":"^1.30.1","@opentelemetry/sdk-logs":"^0.203.0","@opentelemetry/sdk-metrics":"^1.30.1","@opentelemetry/sdk-node":"^0.203.0","@opentelemetry/sdk-trace-base":"^1.30.1","@opentelemetry/semantic-conventions":"^1.30.1","@probelabs/probe":"^0.6.0-rc293","@types/commander":"^2.12.0","@types/uuid":"^10.0.0","acorn":"^8.16.0","acorn-walk":"^8.3.5","ajv":"^8.17.1","ajv-formats":"^3.0.1","better-sqlite3":"^11.0.0","blessed":"^0.1.81","botbuilder":"^4.23.3","botframework-connector":"^4.23.3","cli-table3":"^0.6.5","commander":"^14.0.0","deepmerge":"^4.3.1","dotenv":"^17.2.3","grammy":"^1.41.1","ignore":"^7.0.5","imapflow":"^1.2.12","js-yaml":"^4.1.0","jsonpath-plus":"^10.4.0","liquidjs":"^10.21.1","mailparser":"^3.9.3","minimatch":"^10.2.2","node-cron":"^3.0.3","nodemailer":"^8.0.1","open":"^9.1.0","resend":"^6.9.3","simple-git":"^3.28.0","uuid":"^11.1.0","ws":"^8.18.3"},"optionalDependencies":{"@anthropic/claude-code-sdk":"npm:null@*","@open-policy-agent/opa-wasm":"^1.10.0","knex":"^3.1.0","mysql2":"^3.11.0","pg":"^8.13.0","tedious":"^19.0.0"},"devDependencies":{"@eslint/js":"^9.34.0","@kie/act-js":"^2.6.2","@kie/mock-github":"^2.0.1","@swc/core":"^1.13.2","@swc/jest":"^0.2.37","@types/better-sqlite3":"^7.6.0","@types/blessed":"^0.1.27","@types/jest":"^30.0.0","@types/js-yaml":"^4.0.9","@types/mailparser":"^3.4.6","@types/node":"^24.3.0","@types/node-cron":"^3.0.11","@types/nodemailer":"^7.0.11","@types/ws":"^8.18.1","@typescript-eslint/eslint-plugin":"^8.42.0","@typescript-eslint/parser":"^8.42.0","@vercel/ncc":"^0.38.4","eslint":"^9.34.0","eslint-config-prettier":"^10.1.8","eslint-plugin-prettier":"^5.5.4","husky":"^9.1.7","jest":"^30.1.3","lint-staged":"^16.1.6","prettier":"^3.6.2","reveal-md":"^6.1.2","ts-json-schema-generator":"^1.5.1","ts-node":"^10.9.2","tsup":"^8.5.0","typescript":"^5.9.2","wrangler":"^3.0.0"},"peerDependenciesMeta":{"@anthropic/claude-code-sdk":{"optional":true}},"directories":{"test":"tests"},"lint-staged":{"src/**/*.{ts,js}":["eslint --fix","prettier --write"],"tests/**/*.{ts,js}":["eslint --fix","prettier --write"],"*.{json,md,yml,yaml}":["prettier --write"]}}');
|
|
608867
610998
|
|
|
608868
610999
|
/***/ })
|
|
608869
611000
|
|