@probelabs/visor 0.1.170 → 0.1.171-ee
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/defaults/code-talk.yaml +39 -238
- package/defaults/intent-router.yaml +1 -0
- package/dist/agent-protocol/task-store.d.ts +5 -0
- package/dist/agent-protocol/task-store.d.ts.map +1 -1
- package/dist/agent-protocol/tasks-cli-handler.d.ts.map +1 -1
- package/dist/agent-protocol/track-execution.d.ts +34 -0
- package/dist/agent-protocol/track-execution.d.ts.map +1 -0
- package/dist/cli-main.d.ts.map +1 -1
- package/dist/cli.d.ts.map +1 -1
- package/dist/defaults/code-talk.yaml +39 -238
- package/dist/defaults/intent-router.yaml +1 -0
- package/dist/frontends/host.d.ts +2 -0
- package/dist/frontends/host.d.ts.map +1 -1
- package/dist/generated/config-schema.d.ts +10 -6
- package/dist/generated/config-schema.d.ts.map +1 -1
- package/dist/generated/config-schema.json +10 -6
- package/dist/index.js +2534 -116
- package/dist/scheduler/scheduler.d.ts +4 -0
- package/dist/scheduler/scheduler.d.ts.map +1 -1
- package/dist/sdk/{a2a-frontend-YTXQGUDH.mjs → a2a-frontend-GUEGI5SX.mjs} +20 -3
- package/dist/sdk/a2a-frontend-GUEGI5SX.mjs.map +1 -0
- package/dist/sdk/{check-provider-registry-5CMLUEFG.mjs → check-provider-registry-7P2QIKJR.mjs} +8 -8
- package/dist/sdk/{check-provider-registry-STRAOYRJ.mjs → check-provider-registry-ZUU7KSKR.mjs} +7 -7
- package/dist/sdk/{chunk-WNLCRRQO.mjs → chunk-5SBX4KLG.mjs} +2 -2
- package/dist/sdk/{chunk-2CNT2EB3.mjs → chunk-6FXVWL6M.mjs} +3 -3
- package/dist/sdk/{chunk-KFKHU6CM.mjs → chunk-6VVXKXTI.mjs} +19 -2
- package/dist/sdk/chunk-6VVXKXTI.mjs.map +1 -0
- package/dist/sdk/{chunk-SVBF7Y2R.mjs → chunk-A2YVTICA.mjs} +11 -7
- package/dist/sdk/chunk-A2YVTICA.mjs.map +1 -0
- package/dist/sdk/{chunk-DLO46M5M.mjs → chunk-CXA3WUOB.mjs} +62 -23
- package/dist/sdk/chunk-CXA3WUOB.mjs.map +1 -0
- package/dist/sdk/{chunk-62PXPI6Q.mjs → chunk-GGNR347O.mjs} +8 -2
- package/dist/sdk/chunk-GGNR347O.mjs.map +1 -0
- package/dist/sdk/{chunk-NYQTQYGU.mjs → chunk-YCPJBOJB.mjs} +68 -29
- package/dist/sdk/chunk-YCPJBOJB.mjs.map +1 -0
- package/dist/sdk/{config-IHECYTNT.mjs → config-6GWD673K.mjs} +2 -2
- package/dist/sdk/{failure-condition-evaluator-NJO6DSL4.mjs → failure-condition-evaluator-5HRNHZCC.mjs} +4 -3
- package/dist/sdk/{github-frontend-BAPXDLBB.mjs → github-frontend-ZZRU6P43.mjs} +7 -7
- package/dist/sdk/{host-6HV5FMD7.mjs → host-A7UNRBQU.mjs} +3 -3
- package/dist/sdk/host-A7UNRBQU.mjs.map +1 -0
- package/dist/sdk/{host-K6IZWJG3.mjs → host-ECXTIDWG.mjs} +3 -3
- package/dist/sdk/host-ECXTIDWG.mjs.map +1 -0
- package/dist/sdk/knex-store-CRORFJE6.mjs +527 -0
- package/dist/sdk/knex-store-CRORFJE6.mjs.map +1 -0
- package/dist/sdk/loader-QMJFFST6.mjs +89 -0
- package/dist/sdk/loader-QMJFFST6.mjs.map +1 -0
- package/dist/sdk/opa-policy-engine-S2S2ULEI.mjs +655 -0
- package/dist/sdk/opa-policy-engine-S2S2ULEI.mjs.map +1 -0
- package/dist/sdk/{routing-RWZEXSRZ.mjs → routing-AMRQYI7J.mjs} +5 -4
- package/dist/sdk/{schedule-tool-JCKV47FU.mjs → schedule-tool-DGVJDHJM.mjs} +7 -7
- package/dist/sdk/{schedule-tool-TGWPINHO.mjs → schedule-tool-I6VG3ZVA.mjs} +8 -8
- package/dist/sdk/{schedule-tool-handler-OEBLE5AB.mjs → schedule-tool-handler-DFUC5S55.mjs} +8 -8
- package/dist/sdk/{schedule-tool-handler-UQWDPFP6.mjs → schedule-tool-handler-XLCSBU3E.mjs} +7 -7
- package/dist/sdk/sdk.d.mts +4 -0
- package/dist/sdk/sdk.d.ts +4 -0
- package/dist/sdk/sdk.js +1926 -408
- package/dist/sdk/sdk.js.map +1 -1
- package/dist/sdk/sdk.mjs +7 -7
- package/dist/sdk/{trace-helpers-ZYN23GBG.mjs → trace-helpers-4ZBZWH5W.mjs} +3 -2
- package/dist/sdk/track-execution-VWLQIGY7.mjs +82 -0
- package/dist/sdk/track-execution-VWLQIGY7.mjs.map +1 -0
- package/dist/sdk/validator-XTZJZZJH.mjs +134 -0
- package/dist/sdk/validator-XTZJZZJH.mjs.map +1 -0
- package/dist/sdk/{workflow-check-provider-OAOD3A5U.mjs → workflow-check-provider-AKXDIL2Y.mjs} +8 -8
- package/dist/sdk/{workflow-check-provider-VDSZR7Y5.mjs → workflow-check-provider-KQNLEQEY.mjs} +7 -7
- package/dist/slack/socket-runner.d.ts +4 -0
- package/dist/slack/socket-runner.d.ts.map +1 -1
- package/dist/telemetry/trace-helpers.d.ts.map +1 -1
- package/dist/tui/chat-runner.d.ts +4 -0
- package/dist/tui/chat-runner.d.ts.map +1 -1
- package/dist/types/cli.d.ts +2 -0
- package/dist/types/cli.d.ts.map +1 -1
- package/dist/types/config.d.ts +4 -0
- package/dist/types/config.d.ts.map +1 -1
- package/dist/utils/instance-id.d.ts +9 -0
- package/dist/utils/instance-id.d.ts.map +1 -0
- package/package.json +2 -2
- package/dist/output/traces/run-2026-03-07T15-43-18-430Z.ndjson +0 -138
- package/dist/output/traces/run-2026-03-07T15-43-56-196Z.ndjson +0 -2266
- package/dist/sdk/a2a-frontend-IPLHACI6.mjs +0 -1605
- package/dist/sdk/a2a-frontend-IPLHACI6.mjs.map +0 -1
- package/dist/sdk/a2a-frontend-YTXQGUDH.mjs.map +0 -1
- package/dist/sdk/check-provider-registry-T5FWS4SW.mjs +0 -30
- package/dist/sdk/chunk-47WAHGHK.mjs +0 -1502
- package/dist/sdk/chunk-47WAHGHK.mjs.map +0 -1
- package/dist/sdk/chunk-62PXPI6Q.mjs.map +0 -1
- package/dist/sdk/chunk-DLO46M5M.mjs.map +0 -1
- package/dist/sdk/chunk-FTUGQP5L.mjs +0 -739
- package/dist/sdk/chunk-KFKHU6CM.mjs.map +0 -1
- package/dist/sdk/chunk-LB77GR4Q.mjs +0 -44771
- package/dist/sdk/chunk-LB77GR4Q.mjs.map +0 -1
- package/dist/sdk/chunk-NYQTQYGU.mjs.map +0 -1
- package/dist/sdk/chunk-SVBF7Y2R.mjs.map +0 -1
- package/dist/sdk/chunk-WNLCRRQO.mjs.map +0 -1
- package/dist/sdk/chunk-ZM7ALGTE.mjs +0 -443
- package/dist/sdk/chunk-ZM7ALGTE.mjs.map +0 -1
- package/dist/sdk/failure-condition-evaluator-T67YFO2Z.mjs +0 -17
- package/dist/sdk/github-frontend-WPTKI4AY.mjs +0 -1386
- package/dist/sdk/github-frontend-WPTKI4AY.mjs.map +0 -1
- package/dist/sdk/host-6HV5FMD7.mjs.map +0 -1
- package/dist/sdk/host-K6IZWJG3.mjs.map +0 -1
- package/dist/sdk/routing-SAGHEUOA.mjs +0 -25
- package/dist/sdk/schedule-tool-H4G5ITNL.mjs +0 -36
- package/dist/sdk/schedule-tool-handler-UQWDPFP6.mjs.map +0 -1
- package/dist/sdk/schedule-tool-handler-ZDAD6SWM.mjs +0 -40
- package/dist/sdk/schedule-tool-handler-ZDAD6SWM.mjs.map +0 -1
- package/dist/sdk/trace-helpers-M7RVAZQ2.mjs +0 -25
- package/dist/sdk/trace-helpers-M7RVAZQ2.mjs.map +0 -1
- package/dist/sdk/trace-helpers-ZYN23GBG.mjs.map +0 -1
- package/dist/sdk/workflow-check-provider-FAO4AUGB.mjs +0 -30
- package/dist/sdk/workflow-check-provider-FAO4AUGB.mjs.map +0 -1
- package/dist/sdk/workflow-check-provider-OAOD3A5U.mjs.map +0 -1
- package/dist/sdk/workflow-check-provider-VDSZR7Y5.mjs.map +0 -1
- package/dist/traces/run-2026-03-07T15-43-18-430Z.ndjson +0 -138
- package/dist/traces/run-2026-03-07T15-43-56-196Z.ndjson +0 -2266
- /package/dist/sdk/{check-provider-registry-5CMLUEFG.mjs.map → check-provider-registry-7P2QIKJR.mjs.map} +0 -0
- /package/dist/sdk/{check-provider-registry-STRAOYRJ.mjs.map → check-provider-registry-ZUU7KSKR.mjs.map} +0 -0
- /package/dist/sdk/{chunk-FTUGQP5L.mjs.map → chunk-5SBX4KLG.mjs.map} +0 -0
- /package/dist/sdk/{chunk-2CNT2EB3.mjs.map → chunk-6FXVWL6M.mjs.map} +0 -0
- /package/dist/sdk/{check-provider-registry-T5FWS4SW.mjs.map → config-6GWD673K.mjs.map} +0 -0
- /package/dist/sdk/{config-IHECYTNT.mjs.map → failure-condition-evaluator-5HRNHZCC.mjs.map} +0 -0
- /package/dist/sdk/{github-frontend-BAPXDLBB.mjs.map → github-frontend-ZZRU6P43.mjs.map} +0 -0
- /package/dist/sdk/{failure-condition-evaluator-NJO6DSL4.mjs.map → routing-AMRQYI7J.mjs.map} +0 -0
- /package/dist/sdk/{failure-condition-evaluator-T67YFO2Z.mjs.map → schedule-tool-DGVJDHJM.mjs.map} +0 -0
- /package/dist/sdk/{routing-RWZEXSRZ.mjs.map → schedule-tool-I6VG3ZVA.mjs.map} +0 -0
- /package/dist/sdk/{routing-SAGHEUOA.mjs.map → schedule-tool-handler-DFUC5S55.mjs.map} +0 -0
- /package/dist/sdk/{schedule-tool-H4G5ITNL.mjs.map → schedule-tool-handler-XLCSBU3E.mjs.map} +0 -0
- /package/dist/sdk/{schedule-tool-JCKV47FU.mjs.map → trace-helpers-4ZBZWH5W.mjs.map} +0 -0
- /package/dist/sdk/{schedule-tool-TGWPINHO.mjs.map → workflow-check-provider-AKXDIL2Y.mjs.map} +0 -0
- /package/dist/sdk/{schedule-tool-handler-OEBLE5AB.mjs.map → workflow-check-provider-KQNLEQEY.mjs.map} +0 -0
|
@@ -2,7 +2,7 @@ import {
|
|
|
2
2
|
ConfigManager,
|
|
3
3
|
VALID_EVENT_TRIGGERS,
|
|
4
4
|
init_config
|
|
5
|
-
} from "./chunk-
|
|
5
|
+
} from "./chunk-A2YVTICA.mjs";
|
|
6
6
|
import "./chunk-NCWIZVOT.mjs";
|
|
7
7
|
import "./chunk-LW3INISN.mjs";
|
|
8
8
|
import "./chunk-SZXICFQ3.mjs";
|
|
@@ -13,4 +13,4 @@ export {
|
|
|
13
13
|
ConfigManager,
|
|
14
14
|
VALID_EVENT_TRIGGERS
|
|
15
15
|
};
|
|
16
|
-
//# sourceMappingURL=config-
|
|
16
|
+
//# sourceMappingURL=config-6GWD673K.mjs.map
|
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
import {
|
|
2
2
|
FailureConditionEvaluator,
|
|
3
3
|
init_failure_condition_evaluator
|
|
4
|
-
} from "./chunk-
|
|
5
|
-
import "./chunk-
|
|
4
|
+
} from "./chunk-5SBX4KLG.mjs";
|
|
5
|
+
import "./chunk-GGNR347O.mjs";
|
|
6
|
+
import "./chunk-6VVXKXTI.mjs";
|
|
6
7
|
import "./chunk-JL7JXCET.mjs";
|
|
7
8
|
import "./chunk-25IC7KXZ.mjs";
|
|
8
9
|
import "./chunk-LW3INISN.mjs";
|
|
@@ -14,4 +15,4 @@ init_failure_condition_evaluator();
|
|
|
14
15
|
export {
|
|
15
16
|
FailureConditionEvaluator
|
|
16
17
|
};
|
|
17
|
-
//# sourceMappingURL=failure-condition-evaluator-
|
|
18
|
+
//# sourceMappingURL=failure-condition-evaluator-5HRNHZCC.mjs.map
|
|
@@ -2,15 +2,15 @@ import {
|
|
|
2
2
|
extractTextFromJson,
|
|
3
3
|
init_json_text_extractor
|
|
4
4
|
} from "./chunk-H5BOW5CR.mjs";
|
|
5
|
-
import {
|
|
6
|
-
generateShortHumanId,
|
|
7
|
-
init_human_id
|
|
8
|
-
} from "./chunk-KFKHU6CM.mjs";
|
|
9
5
|
import {
|
|
10
6
|
failure_condition_evaluator_exports,
|
|
11
7
|
init_failure_condition_evaluator
|
|
12
|
-
} from "./chunk-
|
|
13
|
-
import "./chunk-
|
|
8
|
+
} from "./chunk-5SBX4KLG.mjs";
|
|
9
|
+
import "./chunk-GGNR347O.mjs";
|
|
10
|
+
import {
|
|
11
|
+
generateShortHumanId,
|
|
12
|
+
init_human_id
|
|
13
|
+
} from "./chunk-6VVXKXTI.mjs";
|
|
14
14
|
import "./chunk-JL7JXCET.mjs";
|
|
15
15
|
import "./chunk-25IC7KXZ.mjs";
|
|
16
16
|
import "./chunk-LW3INISN.mjs";
|
|
@@ -1383,4 +1383,4 @@ init_github_frontend();
|
|
|
1383
1383
|
export {
|
|
1384
1384
|
GitHubFrontend
|
|
1385
1385
|
};
|
|
1386
|
-
//# sourceMappingURL=github-frontend-
|
|
1386
|
+
//# sourceMappingURL=github-frontend-ZZRU6P43.mjs.map
|
|
@@ -24,7 +24,7 @@ var init_host = __esm({
|
|
|
24
24
|
const { NdjsonSink } = await import("./ndjson-sink-FD2PSXGD.mjs");
|
|
25
25
|
this.frontends.push(new NdjsonSink(spec.config));
|
|
26
26
|
} else if (spec.name === "github") {
|
|
27
|
-
const { GitHubFrontend } = await import("./github-frontend-
|
|
27
|
+
const { GitHubFrontend } = await import("./github-frontend-ZZRU6P43.mjs");
|
|
28
28
|
this.frontends.push(new GitHubFrontend());
|
|
29
29
|
} else if (spec.name === "slack") {
|
|
30
30
|
const { SlackFrontend } = await import("./slack-frontend-QO7LW5BH.mjs");
|
|
@@ -33,7 +33,7 @@ var init_host = __esm({
|
|
|
33
33
|
const { TuiFrontend } = await import("./tui-frontend-T56PZB67.mjs");
|
|
34
34
|
this.frontends.push(new TuiFrontend(spec.config));
|
|
35
35
|
} else if (spec.name === "a2a") {
|
|
36
|
-
const { A2AFrontend } = await import("./a2a-frontend-
|
|
36
|
+
const { A2AFrontend } = await import("./a2a-frontend-GUEGI5SX.mjs");
|
|
37
37
|
this.frontends.push(new A2AFrontend(spec.config));
|
|
38
38
|
} else {
|
|
39
39
|
this.log.warn(`[FrontendsHost] Unknown frontend '${spec.name}', skipping`);
|
|
@@ -72,4 +72,4 @@ export {
|
|
|
72
72
|
FrontendsHost,
|
|
73
73
|
isActiveFrontend
|
|
74
74
|
};
|
|
75
|
-
//# sourceMappingURL=host-
|
|
75
|
+
//# sourceMappingURL=host-A7UNRBQU.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/frontends/host.ts"],"sourcesContent":["import { EventBus } from '../event-bus/event-bus';\nimport type { TaskStore } from '../agent-protocol/task-store';\n\nexport interface FrontendContext {\n eventBus: EventBus;\n logger: {\n info: (...a: any[]) => void;\n warn: (...a: any[]) => void;\n error: (...a: any[]) => void;\n };\n config: unknown;\n run: {\n runId: string;\n workflowId?: string;\n repo?: { owner: string; name: string };\n pr?: number;\n headSha?: string;\n event?: string;\n actor?: string;\n };\n octokit?: any;\n // Optional webhook context (e.g., Slack Events API payload injected by socket runner)\n webhookContext?: { webhookData?: Map<string, unknown>; eventType?: string };\n // Optional engine reference for frontends that spawn their own execution runs (e.g., A2A)\n engine?: any;\n // Optional full VisorConfig for frontends that need access to checks/workflows\n visorConfig?: any;\n // Optional shared task store for cross-frontend execution tracking (when task_tracking is enabled)\n taskStore?: TaskStore;\n}\n\nexport interface Frontend {\n readonly name: string;\n start(ctx: FrontendContext): Promise<void> | void;\n stop(): Promise<void> | void;\n}\n\n/** Frontends that can trigger their own engine executions (e.g., A2A). */\nexport interface ActiveFrontend extends Frontend {\n setEngine(engine: any): void;\n setVisorConfig(config: any): void;\n}\n\n/** Type guard for ActiveFrontend (duck-typed). */\nexport function isActiveFrontend(f: Frontend): f is ActiveFrontend {\n return (\n typeof (f as any).setEngine === 'function' && typeof (f as any).setVisorConfig === 'function'\n );\n}\n\nexport interface FrontendSpec {\n name: string; // e.g., 'ndjson-sink', 'github', 'slack'\n package?: string; // external package name (future)\n config?: unknown;\n}\n\nexport class FrontendsHost {\n private bus: EventBus;\n private log: FrontendContext['logger'];\n private frontends: Frontend[] = [];\n\n constructor(bus: EventBus, log: FrontendContext['logger']) {\n this.bus = bus;\n this.log = log;\n }\n\n async load(specs: FrontendSpec[]): Promise<void> {\n this.frontends = [];\n for (const spec of specs) {\n if (spec.name === 'ndjson-sink') {\n const { NdjsonSink } = await import('./ndjson-sink');\n this.frontends.push(new NdjsonSink(spec.config));\n } else if (spec.name === 'github') {\n const { GitHubFrontend } = await import('./github-frontend');\n this.frontends.push(new GitHubFrontend());\n } else if (spec.name === 'slack') {\n const { SlackFrontend } = await import('./slack-frontend');\n this.frontends.push(new SlackFrontend(spec.config as any));\n } else if (spec.name === 'tui') {\n const { TuiFrontend } = await import('../tui/tui-frontend');\n this.frontends.push(new TuiFrontend(spec.config as any));\n } else if (spec.name === 'a2a') {\n const { A2AFrontend } = await import('../agent-protocol/a2a-frontend');\n this.frontends.push(new A2AFrontend(spec.config as any));\n } else {\n this.log.warn(`[FrontendsHost] Unknown frontend '${spec.name}', skipping`);\n }\n }\n }\n\n async startAll(ctxFactory: () => FrontendContext): Promise<void> {\n for (const f of this.frontends) {\n try {\n const ctx = ctxFactory();\n // Auto-inject engine/config into active frontends\n if (isActiveFrontend(f)) {\n if (ctx.engine) f.setEngine(ctx.engine);\n if (ctx.visorConfig) f.setVisorConfig(ctx.visorConfig);\n }\n await f.start(ctx);\n this.log.info(`[FrontendsHost] Started frontend '${f.name}'`);\n } catch (err) {\n this.log.error(`[FrontendsHost] Failed to start '${f.name}':`, err);\n }\n }\n }\n\n async stopAll(): Promise<void> {\n for (const f of this.frontends) {\n try {\n await f.stop();\n } catch (err) {\n this.log.error(`[FrontendsHost] Failed to stop '${f.name}':`, err);\n }\n }\n }\n}\n"],"mappings":";;;;;AA4CO,SAAS,iBAAiB,GAAkC;AACjE,SACE,OAAQ,EAAU,cAAc,cAAc,OAAQ,EAAU,mBAAmB;AAEvF;AAhDA,IAwDa;AAxDb;AAAA;AAwDO,IAAM,gBAAN,MAAoB;AAAA,MACjB;AAAA,MACA;AAAA,MACA,YAAwB,CAAC;AAAA,MAEjC,YAAY,KAAe,KAAgC;AACzD,aAAK,MAAM;AACX,aAAK,MAAM;AAAA,MACb;AAAA,MAEA,MAAM,KAAK,OAAsC;AAC/C,aAAK,YAAY,CAAC;AAClB,mBAAW,QAAQ,OAAO;AACxB,cAAI,KAAK,SAAS,eAAe;AAC/B,kBAAM,EAAE,WAAW,IAAI,MAAM,OAAO,4BAAe;AACnD,iBAAK,UAAU,KAAK,IAAI,WAAW,KAAK,MAAM,CAAC;AAAA,UACjD,WAAW,KAAK,SAAS,UAAU;AACjC,kBAAM,EAAE,eAAe,IAAI,MAAM,OAAO,gCAAmB;AAC3D,iBAAK,UAAU,KAAK,IAAI,eAAe,CAAC;AAAA,UAC1C,WAAW,KAAK,SAAS,SAAS;AAChC,kBAAM,EAAE,cAAc,IAAI,MAAM,OAAO,+BAAkB;AACzD,iBAAK,UAAU,KAAK,IAAI,cAAc,KAAK,MAAa,CAAC;AAAA,UAC3D,WAAW,KAAK,SAAS,OAAO;AAC9B,kBAAM,EAAE,YAAY,IAAI,MAAM,OAAO,6BAAqB;AAC1D,iBAAK,UAAU,KAAK,IAAI,YAAY,KAAK,MAAa,CAAC;AAAA,UACzD,WAAW,KAAK,SAAS,OAAO;AAC9B,kBAAM,EAAE,YAAY,IAAI,MAAM,OAAO,6BAAgC;AACrE,iBAAK,UAAU,KAAK,IAAI,YAAY,KAAK,MAAa,CAAC;AAAA,UACzD,OAAO;AACL,iBAAK,IAAI,KAAK,qCAAqC,KAAK,IAAI,aAAa;AAAA,UAC3E;AAAA,QACF;AAAA,MACF;AAAA,MAEA,MAAM,SAAS,YAAkD;AAC/D,mBAAW,KAAK,KAAK,WAAW;AAC9B,cAAI;AACF,kBAAM,MAAM,WAAW;AAEvB,gBAAI,iBAAiB,CAAC,GAAG;AACvB,kBAAI,IAAI,OAAQ,GAAE,UAAU,IAAI,MAAM;AACtC,kBAAI,IAAI,YAAa,GAAE,eAAe,IAAI,WAAW;AAAA,YACvD;AACA,kBAAM,EAAE,MAAM,GAAG;AACjB,iBAAK,IAAI,KAAK,qCAAqC,EAAE,IAAI,GAAG;AAAA,UAC9D,SAAS,KAAK;AACZ,iBAAK,IAAI,MAAM,oCAAoC,EAAE,IAAI,MAAM,GAAG;AAAA,UACpE;AAAA,QACF;AAAA,MACF;AAAA,MAEA,MAAM,UAAyB;AAC7B,mBAAW,KAAK,KAAK,WAAW;AAC9B,cAAI;AACF,kBAAM,EAAE,KAAK;AAAA,UACf,SAAS,KAAK;AACZ,iBAAK,IAAI,MAAM,mCAAmC,EAAE,IAAI,MAAM,GAAG;AAAA,UACnE;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA;AAAA;","names":[]}
|
|
@@ -24,7 +24,7 @@ var init_host = __esm({
|
|
|
24
24
|
const { NdjsonSink } = await import("./ndjson-sink-FD2PSXGD.mjs");
|
|
25
25
|
this.frontends.push(new NdjsonSink(spec.config));
|
|
26
26
|
} else if (spec.name === "github") {
|
|
27
|
-
const { GitHubFrontend } = await import("./github-frontend-
|
|
27
|
+
const { GitHubFrontend } = await import("./github-frontend-ZZRU6P43.mjs");
|
|
28
28
|
this.frontends.push(new GitHubFrontend());
|
|
29
29
|
} else if (spec.name === "slack") {
|
|
30
30
|
const { SlackFrontend } = await import("./slack-frontend-QO7LW5BH.mjs");
|
|
@@ -33,7 +33,7 @@ var init_host = __esm({
|
|
|
33
33
|
const { TuiFrontend } = await import("./tui-frontend-T56PZB67.mjs");
|
|
34
34
|
this.frontends.push(new TuiFrontend(spec.config));
|
|
35
35
|
} else if (spec.name === "a2a") {
|
|
36
|
-
const { A2AFrontend } = await import("./a2a-frontend-
|
|
36
|
+
const { A2AFrontend } = await import("./a2a-frontend-GUEGI5SX.mjs");
|
|
37
37
|
this.frontends.push(new A2AFrontend(spec.config));
|
|
38
38
|
} else {
|
|
39
39
|
this.log.warn(`[FrontendsHost] Unknown frontend '${spec.name}', skipping`);
|
|
@@ -72,4 +72,4 @@ export {
|
|
|
72
72
|
FrontendsHost,
|
|
73
73
|
isActiveFrontend
|
|
74
74
|
};
|
|
75
|
-
//# sourceMappingURL=host-
|
|
75
|
+
//# sourceMappingURL=host-ECXTIDWG.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/frontends/host.ts"],"sourcesContent":["import { EventBus } from '../event-bus/event-bus';\nimport type { TaskStore } from '../agent-protocol/task-store';\n\nexport interface FrontendContext {\n eventBus: EventBus;\n logger: {\n info: (...a: any[]) => void;\n warn: (...a: any[]) => void;\n error: (...a: any[]) => void;\n };\n config: unknown;\n run: {\n runId: string;\n workflowId?: string;\n repo?: { owner: string; name: string };\n pr?: number;\n headSha?: string;\n event?: string;\n actor?: string;\n };\n octokit?: any;\n // Optional webhook context (e.g., Slack Events API payload injected by socket runner)\n webhookContext?: { webhookData?: Map<string, unknown>; eventType?: string };\n // Optional engine reference for frontends that spawn their own execution runs (e.g., A2A)\n engine?: any;\n // Optional full VisorConfig for frontends that need access to checks/workflows\n visorConfig?: any;\n // Optional shared task store for cross-frontend execution tracking (when task_tracking is enabled)\n taskStore?: TaskStore;\n}\n\nexport interface Frontend {\n readonly name: string;\n start(ctx: FrontendContext): Promise<void> | void;\n stop(): Promise<void> | void;\n}\n\n/** Frontends that can trigger their own engine executions (e.g., A2A). */\nexport interface ActiveFrontend extends Frontend {\n setEngine(engine: any): void;\n setVisorConfig(config: any): void;\n}\n\n/** Type guard for ActiveFrontend (duck-typed). */\nexport function isActiveFrontend(f: Frontend): f is ActiveFrontend {\n return (\n typeof (f as any).setEngine === 'function' && typeof (f as any).setVisorConfig === 'function'\n );\n}\n\nexport interface FrontendSpec {\n name: string; // e.g., 'ndjson-sink', 'github', 'slack'\n package?: string; // external package name (future)\n config?: unknown;\n}\n\nexport class FrontendsHost {\n private bus: EventBus;\n private log: FrontendContext['logger'];\n private frontends: Frontend[] = [];\n\n constructor(bus: EventBus, log: FrontendContext['logger']) {\n this.bus = bus;\n this.log = log;\n }\n\n async load(specs: FrontendSpec[]): Promise<void> {\n this.frontends = [];\n for (const spec of specs) {\n if (spec.name === 'ndjson-sink') {\n const { NdjsonSink } = await import('./ndjson-sink');\n this.frontends.push(new NdjsonSink(spec.config));\n } else if (spec.name === 'github') {\n const { GitHubFrontend } = await import('./github-frontend');\n this.frontends.push(new GitHubFrontend());\n } else if (spec.name === 'slack') {\n const { SlackFrontend } = await import('./slack-frontend');\n this.frontends.push(new SlackFrontend(spec.config as any));\n } else if (spec.name === 'tui') {\n const { TuiFrontend } = await import('../tui/tui-frontend');\n this.frontends.push(new TuiFrontend(spec.config as any));\n } else if (spec.name === 'a2a') {\n const { A2AFrontend } = await import('../agent-protocol/a2a-frontend');\n this.frontends.push(new A2AFrontend(spec.config as any));\n } else {\n this.log.warn(`[FrontendsHost] Unknown frontend '${spec.name}', skipping`);\n }\n }\n }\n\n async startAll(ctxFactory: () => FrontendContext): Promise<void> {\n for (const f of this.frontends) {\n try {\n const ctx = ctxFactory();\n // Auto-inject engine/config into active frontends\n if (isActiveFrontend(f)) {\n if (ctx.engine) f.setEngine(ctx.engine);\n if (ctx.visorConfig) f.setVisorConfig(ctx.visorConfig);\n }\n await f.start(ctx);\n this.log.info(`[FrontendsHost] Started frontend '${f.name}'`);\n } catch (err) {\n this.log.error(`[FrontendsHost] Failed to start '${f.name}':`, err);\n }\n }\n }\n\n async stopAll(): Promise<void> {\n for (const f of this.frontends) {\n try {\n await f.stop();\n } catch (err) {\n this.log.error(`[FrontendsHost] Failed to stop '${f.name}':`, err);\n }\n }\n }\n}\n"],"mappings":";;;;;AA4CO,SAAS,iBAAiB,GAAkC;AACjE,SACE,OAAQ,EAAU,cAAc,cAAc,OAAQ,EAAU,mBAAmB;AAEvF;AAhDA,IAwDa;AAxDb;AAAA;AAwDO,IAAM,gBAAN,MAAoB;AAAA,MACjB;AAAA,MACA;AAAA,MACA,YAAwB,CAAC;AAAA,MAEjC,YAAY,KAAe,KAAgC;AACzD,aAAK,MAAM;AACX,aAAK,MAAM;AAAA,MACb;AAAA,MAEA,MAAM,KAAK,OAAsC;AAC/C,aAAK,YAAY,CAAC;AAClB,mBAAW,QAAQ,OAAO;AACxB,cAAI,KAAK,SAAS,eAAe;AAC/B,kBAAM,EAAE,WAAW,IAAI,MAAM,OAAO,4BAAe;AACnD,iBAAK,UAAU,KAAK,IAAI,WAAW,KAAK,MAAM,CAAC;AAAA,UACjD,WAAW,KAAK,SAAS,UAAU;AACjC,kBAAM,EAAE,eAAe,IAAI,MAAM,OAAO,gCAAmB;AAC3D,iBAAK,UAAU,KAAK,IAAI,eAAe,CAAC;AAAA,UAC1C,WAAW,KAAK,SAAS,SAAS;AAChC,kBAAM,EAAE,cAAc,IAAI,MAAM,OAAO,+BAAkB;AACzD,iBAAK,UAAU,KAAK,IAAI,cAAc,KAAK,MAAa,CAAC;AAAA,UAC3D,WAAW,KAAK,SAAS,OAAO;AAC9B,kBAAM,EAAE,YAAY,IAAI,MAAM,OAAO,6BAAqB;AAC1D,iBAAK,UAAU,KAAK,IAAI,YAAY,KAAK,MAAa,CAAC;AAAA,UACzD,WAAW,KAAK,SAAS,OAAO;AAC9B,kBAAM,EAAE,YAAY,IAAI,MAAM,OAAO,6BAAgC;AACrE,iBAAK,UAAU,KAAK,IAAI,YAAY,KAAK,MAAa,CAAC;AAAA,UACzD,OAAO;AACL,iBAAK,IAAI,KAAK,qCAAqC,KAAK,IAAI,aAAa;AAAA,UAC3E;AAAA,QACF;AAAA,MACF;AAAA,MAEA,MAAM,SAAS,YAAkD;AAC/D,mBAAW,KAAK,KAAK,WAAW;AAC9B,cAAI;AACF,kBAAM,MAAM,WAAW;AAEvB,gBAAI,iBAAiB,CAAC,GAAG;AACvB,kBAAI,IAAI,OAAQ,GAAE,UAAU,IAAI,MAAM;AACtC,kBAAI,IAAI,YAAa,GAAE,eAAe,IAAI,WAAW;AAAA,YACvD;AACA,kBAAM,EAAE,MAAM,GAAG;AACjB,iBAAK,IAAI,KAAK,qCAAqC,EAAE,IAAI,GAAG;AAAA,UAC9D,SAAS,KAAK;AACZ,iBAAK,IAAI,MAAM,oCAAoC,EAAE,IAAI,MAAM,GAAG;AAAA,UACpE;AAAA,QACF;AAAA,MACF;AAAA,MAEA,MAAM,UAAyB;AAC7B,mBAAW,KAAK,KAAK,WAAW;AAC9B,cAAI;AACF,kBAAM,EAAE,KAAK;AAAA,UACf,SAAS,KAAK;AACZ,iBAAK,IAAI,MAAM,mCAAmC,EAAE,IAAI,MAAM,GAAG;AAAA,UACnE;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA;AAAA;","names":[]}
|
|
@@ -0,0 +1,527 @@
|
|
|
1
|
+
import {
|
|
2
|
+
init_logger,
|
|
3
|
+
logger
|
|
4
|
+
} from "./chunk-SZXICFQ3.mjs";
|
|
5
|
+
import "./chunk-UCMJJ3IM.mjs";
|
|
6
|
+
import {
|
|
7
|
+
__esm,
|
|
8
|
+
__require
|
|
9
|
+
} from "./chunk-J7LXIPZS.mjs";
|
|
10
|
+
|
|
11
|
+
// src/enterprise/scheduler/knex-store.ts
|
|
12
|
+
import * as fs from "fs";
|
|
13
|
+
import * as path from "path";
|
|
14
|
+
import { v4 as uuidv4 } from "uuid";
|
|
15
|
+
function toNum(val) {
|
|
16
|
+
if (val === null || val === void 0) return void 0;
|
|
17
|
+
return typeof val === "string" ? parseInt(val, 10) : val;
|
|
18
|
+
}
|
|
19
|
+
function safeJsonParse(value) {
|
|
20
|
+
if (!value) return void 0;
|
|
21
|
+
try {
|
|
22
|
+
return JSON.parse(value);
|
|
23
|
+
} catch {
|
|
24
|
+
return void 0;
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
function fromTriggerRow(row) {
|
|
28
|
+
return {
|
|
29
|
+
id: row.id,
|
|
30
|
+
creatorId: row.creator_id,
|
|
31
|
+
creatorContext: row.creator_context ?? void 0,
|
|
32
|
+
creatorName: row.creator_name ?? void 0,
|
|
33
|
+
description: row.description ?? void 0,
|
|
34
|
+
channels: safeJsonParse(row.channels),
|
|
35
|
+
fromUsers: safeJsonParse(row.from_users),
|
|
36
|
+
fromBots: row.from_bots === true || row.from_bots === 1,
|
|
37
|
+
contains: safeJsonParse(row.contains),
|
|
38
|
+
matchPattern: row.match_pattern ?? void 0,
|
|
39
|
+
threads: row.threads,
|
|
40
|
+
workflow: row.workflow,
|
|
41
|
+
inputs: safeJsonParse(row.inputs),
|
|
42
|
+
outputContext: safeJsonParse(row.output_context),
|
|
43
|
+
status: row.status,
|
|
44
|
+
enabled: row.enabled === true || row.enabled === 1,
|
|
45
|
+
createdAt: toNum(row.created_at)
|
|
46
|
+
};
|
|
47
|
+
}
|
|
48
|
+
function toTriggerInsertRow(trigger) {
|
|
49
|
+
return {
|
|
50
|
+
id: trigger.id,
|
|
51
|
+
creator_id: trigger.creatorId,
|
|
52
|
+
creator_context: trigger.creatorContext ?? null,
|
|
53
|
+
creator_name: trigger.creatorName ?? null,
|
|
54
|
+
description: trigger.description ?? null,
|
|
55
|
+
channels: trigger.channels ? JSON.stringify(trigger.channels) : null,
|
|
56
|
+
from_users: trigger.fromUsers ? JSON.stringify(trigger.fromUsers) : null,
|
|
57
|
+
from_bots: trigger.fromBots,
|
|
58
|
+
contains: trigger.contains ? JSON.stringify(trigger.contains) : null,
|
|
59
|
+
match_pattern: trigger.matchPattern ?? null,
|
|
60
|
+
threads: trigger.threads,
|
|
61
|
+
workflow: trigger.workflow,
|
|
62
|
+
inputs: trigger.inputs ? JSON.stringify(trigger.inputs) : null,
|
|
63
|
+
output_context: trigger.outputContext ? JSON.stringify(trigger.outputContext) : null,
|
|
64
|
+
status: trigger.status,
|
|
65
|
+
enabled: trigger.enabled,
|
|
66
|
+
created_at: trigger.createdAt
|
|
67
|
+
};
|
|
68
|
+
}
|
|
69
|
+
function fromDbRow(row) {
|
|
70
|
+
return {
|
|
71
|
+
id: row.id,
|
|
72
|
+
creatorId: row.creator_id,
|
|
73
|
+
creatorContext: row.creator_context ?? void 0,
|
|
74
|
+
creatorName: row.creator_name ?? void 0,
|
|
75
|
+
timezone: row.timezone,
|
|
76
|
+
schedule: row.schedule_expr,
|
|
77
|
+
runAt: toNum(row.run_at),
|
|
78
|
+
isRecurring: row.is_recurring === true || row.is_recurring === 1,
|
|
79
|
+
originalExpression: row.original_expression,
|
|
80
|
+
workflow: row.workflow ?? void 0,
|
|
81
|
+
workflowInputs: safeJsonParse(row.workflow_inputs),
|
|
82
|
+
outputContext: safeJsonParse(row.output_context),
|
|
83
|
+
status: row.status,
|
|
84
|
+
createdAt: toNum(row.created_at),
|
|
85
|
+
lastRunAt: toNum(row.last_run_at),
|
|
86
|
+
nextRunAt: toNum(row.next_run_at),
|
|
87
|
+
runCount: row.run_count,
|
|
88
|
+
failureCount: row.failure_count,
|
|
89
|
+
lastError: row.last_error ?? void 0,
|
|
90
|
+
previousResponse: row.previous_response ?? void 0
|
|
91
|
+
};
|
|
92
|
+
}
|
|
93
|
+
function toInsertRow(schedule) {
|
|
94
|
+
return {
|
|
95
|
+
id: schedule.id,
|
|
96
|
+
creator_id: schedule.creatorId,
|
|
97
|
+
creator_context: schedule.creatorContext ?? null,
|
|
98
|
+
creator_name: schedule.creatorName ?? null,
|
|
99
|
+
timezone: schedule.timezone,
|
|
100
|
+
schedule_expr: schedule.schedule,
|
|
101
|
+
run_at: schedule.runAt ?? null,
|
|
102
|
+
is_recurring: schedule.isRecurring,
|
|
103
|
+
original_expression: schedule.originalExpression,
|
|
104
|
+
workflow: schedule.workflow ?? null,
|
|
105
|
+
workflow_inputs: schedule.workflowInputs ? JSON.stringify(schedule.workflowInputs) : null,
|
|
106
|
+
output_context: schedule.outputContext ? JSON.stringify(schedule.outputContext) : null,
|
|
107
|
+
status: schedule.status,
|
|
108
|
+
created_at: schedule.createdAt,
|
|
109
|
+
last_run_at: schedule.lastRunAt ?? null,
|
|
110
|
+
next_run_at: schedule.nextRunAt ?? null,
|
|
111
|
+
run_count: schedule.runCount,
|
|
112
|
+
failure_count: schedule.failureCount,
|
|
113
|
+
last_error: schedule.lastError ?? null,
|
|
114
|
+
previous_response: schedule.previousResponse ?? null
|
|
115
|
+
};
|
|
116
|
+
}
|
|
117
|
+
var KnexStoreBackend;
|
|
118
|
+
var init_knex_store = __esm({
|
|
119
|
+
"src/enterprise/scheduler/knex-store.ts"() {
|
|
120
|
+
init_logger();
|
|
121
|
+
KnexStoreBackend = class {
|
|
122
|
+
knex = null;
|
|
123
|
+
driver;
|
|
124
|
+
connection;
|
|
125
|
+
constructor(driver, storageConfig, _haConfig) {
|
|
126
|
+
this.driver = driver;
|
|
127
|
+
this.connection = storageConfig.connection || {};
|
|
128
|
+
}
|
|
129
|
+
async initialize() {
|
|
130
|
+
const { createRequire } = __require("module");
|
|
131
|
+
const runtimeRequire = createRequire(__filename);
|
|
132
|
+
let knexFactory;
|
|
133
|
+
try {
|
|
134
|
+
knexFactory = runtimeRequire("knex");
|
|
135
|
+
} catch (err) {
|
|
136
|
+
const code = err?.code;
|
|
137
|
+
if (code === "MODULE_NOT_FOUND" || code === "ERR_MODULE_NOT_FOUND") {
|
|
138
|
+
throw new Error(
|
|
139
|
+
"knex is required for PostgreSQL/MySQL/MSSQL schedule storage. Install it with: npm install knex"
|
|
140
|
+
);
|
|
141
|
+
}
|
|
142
|
+
throw err;
|
|
143
|
+
}
|
|
144
|
+
const clientMap = {
|
|
145
|
+
postgresql: "pg",
|
|
146
|
+
mysql: "mysql2",
|
|
147
|
+
mssql: "tedious"
|
|
148
|
+
};
|
|
149
|
+
const client = clientMap[this.driver];
|
|
150
|
+
let connection;
|
|
151
|
+
if (this.connection.connection_string) {
|
|
152
|
+
connection = this.connection.connection_string;
|
|
153
|
+
} else if (this.driver === "mssql") {
|
|
154
|
+
connection = this.buildMssqlConnection();
|
|
155
|
+
} else {
|
|
156
|
+
connection = this.buildStandardConnection();
|
|
157
|
+
}
|
|
158
|
+
this.knex = knexFactory({
|
|
159
|
+
client,
|
|
160
|
+
connection,
|
|
161
|
+
pool: {
|
|
162
|
+
min: this.connection.pool?.min ?? 0,
|
|
163
|
+
max: this.connection.pool?.max ?? 10
|
|
164
|
+
}
|
|
165
|
+
});
|
|
166
|
+
await this.migrateSchema();
|
|
167
|
+
logger.info(`[KnexStore] Initialized (${this.driver})`);
|
|
168
|
+
}
|
|
169
|
+
buildStandardConnection() {
|
|
170
|
+
return {
|
|
171
|
+
host: this.connection.host || "localhost",
|
|
172
|
+
port: this.connection.port,
|
|
173
|
+
database: this.connection.database || "visor",
|
|
174
|
+
user: this.connection.user,
|
|
175
|
+
password: this.connection.password,
|
|
176
|
+
ssl: this.resolveSslConfig()
|
|
177
|
+
};
|
|
178
|
+
}
|
|
179
|
+
buildMssqlConnection() {
|
|
180
|
+
const ssl = this.connection.ssl;
|
|
181
|
+
const sslEnabled = ssl === true || typeof ssl === "object" && ssl.enabled !== false;
|
|
182
|
+
return {
|
|
183
|
+
server: this.connection.host || "localhost",
|
|
184
|
+
port: this.connection.port,
|
|
185
|
+
database: this.connection.database || "visor",
|
|
186
|
+
user: this.connection.user,
|
|
187
|
+
password: this.connection.password,
|
|
188
|
+
options: {
|
|
189
|
+
encrypt: sslEnabled,
|
|
190
|
+
trustServerCertificate: typeof ssl === "object" ? ssl.reject_unauthorized === false : !sslEnabled
|
|
191
|
+
}
|
|
192
|
+
};
|
|
193
|
+
}
|
|
194
|
+
resolveSslConfig() {
|
|
195
|
+
const ssl = this.connection.ssl;
|
|
196
|
+
if (ssl === false || ssl === void 0) return false;
|
|
197
|
+
if (ssl === true) return { rejectUnauthorized: true };
|
|
198
|
+
if (ssl.enabled === false) return false;
|
|
199
|
+
const result = {
|
|
200
|
+
rejectUnauthorized: ssl.reject_unauthorized !== false
|
|
201
|
+
};
|
|
202
|
+
if (ssl.ca) {
|
|
203
|
+
const caPath = this.validateSslPath(ssl.ca, "CA certificate");
|
|
204
|
+
result.ca = fs.readFileSync(caPath, "utf8");
|
|
205
|
+
}
|
|
206
|
+
if (ssl.cert) {
|
|
207
|
+
const certPath = this.validateSslPath(ssl.cert, "client certificate");
|
|
208
|
+
result.cert = fs.readFileSync(certPath, "utf8");
|
|
209
|
+
}
|
|
210
|
+
if (ssl.key) {
|
|
211
|
+
const keyPath = this.validateSslPath(ssl.key, "client key");
|
|
212
|
+
result.key = fs.readFileSync(keyPath, "utf8");
|
|
213
|
+
}
|
|
214
|
+
return result;
|
|
215
|
+
}
|
|
216
|
+
validateSslPath(filePath, label) {
|
|
217
|
+
const resolved = path.resolve(filePath);
|
|
218
|
+
if (resolved !== path.normalize(resolved)) {
|
|
219
|
+
throw new Error(`SSL ${label} path contains invalid sequences: ${filePath}`);
|
|
220
|
+
}
|
|
221
|
+
if (!fs.existsSync(resolved)) {
|
|
222
|
+
throw new Error(`SSL ${label} not found: ${filePath}`);
|
|
223
|
+
}
|
|
224
|
+
return resolved;
|
|
225
|
+
}
|
|
226
|
+
async shutdown() {
|
|
227
|
+
if (this.knex) {
|
|
228
|
+
await this.knex.destroy();
|
|
229
|
+
this.knex = null;
|
|
230
|
+
}
|
|
231
|
+
}
|
|
232
|
+
async migrateSchema() {
|
|
233
|
+
const knex = this.getKnex();
|
|
234
|
+
const exists = await knex.schema.hasTable("schedules");
|
|
235
|
+
if (!exists) {
|
|
236
|
+
await knex.schema.createTable("schedules", (table) => {
|
|
237
|
+
table.string("id", 36).primary();
|
|
238
|
+
table.string("creator_id", 255).notNullable().index();
|
|
239
|
+
table.string("creator_context", 255);
|
|
240
|
+
table.string("creator_name", 255);
|
|
241
|
+
table.string("timezone", 64).notNullable().defaultTo("UTC");
|
|
242
|
+
table.string("schedule_expr", 255);
|
|
243
|
+
table.bigInteger("run_at");
|
|
244
|
+
table.boolean("is_recurring").notNullable();
|
|
245
|
+
table.text("original_expression");
|
|
246
|
+
table.string("workflow", 255);
|
|
247
|
+
table.text("workflow_inputs");
|
|
248
|
+
table.text("output_context");
|
|
249
|
+
table.string("status", 20).notNullable().index();
|
|
250
|
+
table.bigInteger("created_at").notNullable();
|
|
251
|
+
table.bigInteger("last_run_at");
|
|
252
|
+
table.bigInteger("next_run_at");
|
|
253
|
+
table.integer("run_count").notNullable().defaultTo(0);
|
|
254
|
+
table.integer("failure_count").notNullable().defaultTo(0);
|
|
255
|
+
table.text("last_error");
|
|
256
|
+
table.text("previous_response");
|
|
257
|
+
table.index(["status", "next_run_at"]);
|
|
258
|
+
});
|
|
259
|
+
}
|
|
260
|
+
const triggersExist = await knex.schema.hasTable("message_triggers");
|
|
261
|
+
if (!triggersExist) {
|
|
262
|
+
await knex.schema.createTable("message_triggers", (table) => {
|
|
263
|
+
table.string("id", 36).primary();
|
|
264
|
+
table.string("creator_id", 255).notNullable().index();
|
|
265
|
+
table.string("creator_context", 255);
|
|
266
|
+
table.string("creator_name", 255);
|
|
267
|
+
table.text("description");
|
|
268
|
+
table.text("channels");
|
|
269
|
+
table.text("from_users");
|
|
270
|
+
table.boolean("from_bots").notNullable().defaultTo(false);
|
|
271
|
+
table.text("contains");
|
|
272
|
+
table.text("match_pattern");
|
|
273
|
+
table.string("threads", 20).notNullable().defaultTo("any");
|
|
274
|
+
table.string("workflow", 255).notNullable();
|
|
275
|
+
table.text("inputs");
|
|
276
|
+
table.text("output_context");
|
|
277
|
+
table.string("status", 20).notNullable().defaultTo("active").index();
|
|
278
|
+
table.boolean("enabled").notNullable().defaultTo(true);
|
|
279
|
+
table.bigInteger("created_at").notNullable();
|
|
280
|
+
});
|
|
281
|
+
}
|
|
282
|
+
const locksExist = await knex.schema.hasTable("scheduler_locks");
|
|
283
|
+
if (!locksExist) {
|
|
284
|
+
await knex.schema.createTable("scheduler_locks", (table) => {
|
|
285
|
+
table.string("lock_id", 255).primary();
|
|
286
|
+
table.string("node_id", 255).notNullable();
|
|
287
|
+
table.string("lock_token", 36).notNullable();
|
|
288
|
+
table.bigInteger("acquired_at").notNullable();
|
|
289
|
+
table.bigInteger("expires_at").notNullable();
|
|
290
|
+
});
|
|
291
|
+
}
|
|
292
|
+
}
|
|
293
|
+
getKnex() {
|
|
294
|
+
if (!this.knex) {
|
|
295
|
+
throw new Error("[KnexStore] Not initialized. Call initialize() first.");
|
|
296
|
+
}
|
|
297
|
+
return this.knex;
|
|
298
|
+
}
|
|
299
|
+
// --- CRUD ---
|
|
300
|
+
async create(schedule) {
|
|
301
|
+
const knex = this.getKnex();
|
|
302
|
+
const newSchedule = {
|
|
303
|
+
...schedule,
|
|
304
|
+
id: uuidv4(),
|
|
305
|
+
createdAt: Date.now(),
|
|
306
|
+
runCount: 0,
|
|
307
|
+
failureCount: 0,
|
|
308
|
+
status: "active"
|
|
309
|
+
};
|
|
310
|
+
await knex("schedules").insert(toInsertRow(newSchedule));
|
|
311
|
+
logger.info(`[KnexStore] Created schedule ${newSchedule.id} for user ${newSchedule.creatorId}`);
|
|
312
|
+
return newSchedule;
|
|
313
|
+
}
|
|
314
|
+
async importSchedule(schedule) {
|
|
315
|
+
const knex = this.getKnex();
|
|
316
|
+
const existing = await knex("schedules").where("id", schedule.id).first();
|
|
317
|
+
if (existing) return;
|
|
318
|
+
await knex("schedules").insert(toInsertRow(schedule));
|
|
319
|
+
}
|
|
320
|
+
async get(id) {
|
|
321
|
+
const knex = this.getKnex();
|
|
322
|
+
const row = await knex("schedules").where("id", id).first();
|
|
323
|
+
return row ? fromDbRow(row) : void 0;
|
|
324
|
+
}
|
|
325
|
+
async update(id, patch) {
|
|
326
|
+
const knex = this.getKnex();
|
|
327
|
+
const existing = await knex("schedules").where("id", id).first();
|
|
328
|
+
if (!existing) return void 0;
|
|
329
|
+
const current = fromDbRow(existing);
|
|
330
|
+
const updated = { ...current, ...patch, id: current.id };
|
|
331
|
+
const row = toInsertRow(updated);
|
|
332
|
+
delete row.id;
|
|
333
|
+
await knex("schedules").where("id", id).update(row);
|
|
334
|
+
return updated;
|
|
335
|
+
}
|
|
336
|
+
async delete(id) {
|
|
337
|
+
const knex = this.getKnex();
|
|
338
|
+
const deleted = await knex("schedules").where("id", id).del();
|
|
339
|
+
if (deleted > 0) {
|
|
340
|
+
logger.info(`[KnexStore] Deleted schedule ${id}`);
|
|
341
|
+
return true;
|
|
342
|
+
}
|
|
343
|
+
return false;
|
|
344
|
+
}
|
|
345
|
+
// --- Queries ---
|
|
346
|
+
async getByCreator(creatorId) {
|
|
347
|
+
const knex = this.getKnex();
|
|
348
|
+
const rows = await knex("schedules").where("creator_id", creatorId);
|
|
349
|
+
return rows.map((r) => fromDbRow(r));
|
|
350
|
+
}
|
|
351
|
+
async getActiveSchedules() {
|
|
352
|
+
const knex = this.getKnex();
|
|
353
|
+
const rows = await knex("schedules").where("status", "active");
|
|
354
|
+
return rows.map((r) => fromDbRow(r));
|
|
355
|
+
}
|
|
356
|
+
async getDueSchedules(now) {
|
|
357
|
+
const ts = now ?? Date.now();
|
|
358
|
+
const knex = this.getKnex();
|
|
359
|
+
const bFalse = this.driver === "mssql" ? 0 : false;
|
|
360
|
+
const bTrue = this.driver === "mssql" ? 1 : true;
|
|
361
|
+
const rows = await knex("schedules").where("status", "active").andWhere(function() {
|
|
362
|
+
this.where(function() {
|
|
363
|
+
this.where("is_recurring", bFalse).whereNotNull("run_at").where("run_at", "<=", ts);
|
|
364
|
+
}).orWhere(function() {
|
|
365
|
+
this.where("is_recurring", bTrue).whereNotNull("next_run_at").where("next_run_at", "<=", ts);
|
|
366
|
+
});
|
|
367
|
+
});
|
|
368
|
+
return rows.map((r) => fromDbRow(r));
|
|
369
|
+
}
|
|
370
|
+
async findByWorkflow(creatorId, workflowName) {
|
|
371
|
+
const knex = this.getKnex();
|
|
372
|
+
const escaped = workflowName.toLowerCase().replace(/[%_\\]/g, "\\$&");
|
|
373
|
+
const pattern = `%${escaped}%`;
|
|
374
|
+
const rows = await knex("schedules").where("creator_id", creatorId).where("status", "active").whereRaw("LOWER(workflow) LIKE ? ESCAPE '\\'", [pattern]);
|
|
375
|
+
return rows.map((r) => fromDbRow(r));
|
|
376
|
+
}
|
|
377
|
+
async getAll() {
|
|
378
|
+
const knex = this.getKnex();
|
|
379
|
+
const rows = await knex("schedules");
|
|
380
|
+
return rows.map((r) => fromDbRow(r));
|
|
381
|
+
}
|
|
382
|
+
async getStats() {
|
|
383
|
+
const knex = this.getKnex();
|
|
384
|
+
const boolTrue = this.driver === "mssql" ? "1" : "true";
|
|
385
|
+
const boolFalse = this.driver === "mssql" ? "0" : "false";
|
|
386
|
+
const result = await knex("schedules").select(
|
|
387
|
+
knex.raw("COUNT(*) as total"),
|
|
388
|
+
knex.raw("SUM(CASE WHEN status = 'active' THEN 1 ELSE 0 END) as active"),
|
|
389
|
+
knex.raw("SUM(CASE WHEN status = 'paused' THEN 1 ELSE 0 END) as paused"),
|
|
390
|
+
knex.raw("SUM(CASE WHEN status = 'completed' THEN 1 ELSE 0 END) as completed"),
|
|
391
|
+
knex.raw("SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END) as failed"),
|
|
392
|
+
knex.raw(`SUM(CASE WHEN is_recurring = ${boolTrue} THEN 1 ELSE 0 END) as recurring`),
|
|
393
|
+
knex.raw(`SUM(CASE WHEN is_recurring = ${boolFalse} THEN 1 ELSE 0 END) as one_time`)
|
|
394
|
+
).first();
|
|
395
|
+
return {
|
|
396
|
+
total: Number(result.total) || 0,
|
|
397
|
+
active: Number(result.active) || 0,
|
|
398
|
+
paused: Number(result.paused) || 0,
|
|
399
|
+
completed: Number(result.completed) || 0,
|
|
400
|
+
failed: Number(result.failed) || 0,
|
|
401
|
+
recurring: Number(result.recurring) || 0,
|
|
402
|
+
oneTime: Number(result.one_time) || 0
|
|
403
|
+
};
|
|
404
|
+
}
|
|
405
|
+
async validateLimits(creatorId, isRecurring, limits) {
|
|
406
|
+
const knex = this.getKnex();
|
|
407
|
+
if (limits.maxGlobal) {
|
|
408
|
+
const result = await knex("schedules").count("* as cnt").first();
|
|
409
|
+
if (Number(result?.cnt) >= limits.maxGlobal) {
|
|
410
|
+
throw new Error(`Global schedule limit reached (${limits.maxGlobal})`);
|
|
411
|
+
}
|
|
412
|
+
}
|
|
413
|
+
if (limits.maxPerUser) {
|
|
414
|
+
const result = await knex("schedules").where("creator_id", creatorId).count("* as cnt").first();
|
|
415
|
+
if (Number(result?.cnt) >= limits.maxPerUser) {
|
|
416
|
+
throw new Error(`You have reached the maximum number of schedules (${limits.maxPerUser})`);
|
|
417
|
+
}
|
|
418
|
+
}
|
|
419
|
+
if (isRecurring && limits.maxRecurringPerUser) {
|
|
420
|
+
const bTrue = this.driver === "mssql" ? 1 : true;
|
|
421
|
+
const result = await knex("schedules").where("creator_id", creatorId).where("is_recurring", bTrue).count("* as cnt").first();
|
|
422
|
+
if (Number(result?.cnt) >= limits.maxRecurringPerUser) {
|
|
423
|
+
throw new Error(
|
|
424
|
+
`You have reached the maximum number of recurring schedules (${limits.maxRecurringPerUser})`
|
|
425
|
+
);
|
|
426
|
+
}
|
|
427
|
+
}
|
|
428
|
+
}
|
|
429
|
+
// --- HA Distributed Locking (via scheduler_locks table) ---
|
|
430
|
+
async tryAcquireLock(lockId, nodeId, ttlSeconds) {
|
|
431
|
+
const knex = this.getKnex();
|
|
432
|
+
const now = Date.now();
|
|
433
|
+
const expiresAt = now + ttlSeconds * 1e3;
|
|
434
|
+
const token = uuidv4();
|
|
435
|
+
const updated = await knex("scheduler_locks").where("lock_id", lockId).where("expires_at", "<", now).update({
|
|
436
|
+
node_id: nodeId,
|
|
437
|
+
lock_token: token,
|
|
438
|
+
acquired_at: now,
|
|
439
|
+
expires_at: expiresAt
|
|
440
|
+
});
|
|
441
|
+
if (updated > 0) return token;
|
|
442
|
+
try {
|
|
443
|
+
await knex("scheduler_locks").insert({
|
|
444
|
+
lock_id: lockId,
|
|
445
|
+
node_id: nodeId,
|
|
446
|
+
lock_token: token,
|
|
447
|
+
acquired_at: now,
|
|
448
|
+
expires_at: expiresAt
|
|
449
|
+
});
|
|
450
|
+
return token;
|
|
451
|
+
} catch {
|
|
452
|
+
return null;
|
|
453
|
+
}
|
|
454
|
+
}
|
|
455
|
+
async releaseLock(lockId, lockToken) {
|
|
456
|
+
const knex = this.getKnex();
|
|
457
|
+
await knex("scheduler_locks").where("lock_id", lockId).where("lock_token", lockToken).del();
|
|
458
|
+
}
|
|
459
|
+
async renewLock(lockId, lockToken, ttlSeconds) {
|
|
460
|
+
const knex = this.getKnex();
|
|
461
|
+
const now = Date.now();
|
|
462
|
+
const expiresAt = now + ttlSeconds * 1e3;
|
|
463
|
+
const updated = await knex("scheduler_locks").where("lock_id", lockId).where("lock_token", lockToken).update({ acquired_at: now, expires_at: expiresAt });
|
|
464
|
+
return updated > 0;
|
|
465
|
+
}
|
|
466
|
+
async flush() {
|
|
467
|
+
}
|
|
468
|
+
// --- Message Trigger CRUD ---
|
|
469
|
+
async createTrigger(trigger) {
|
|
470
|
+
const knex = this.getKnex();
|
|
471
|
+
const newTrigger = {
|
|
472
|
+
...trigger,
|
|
473
|
+
id: uuidv4(),
|
|
474
|
+
createdAt: Date.now()
|
|
475
|
+
};
|
|
476
|
+
await knex("message_triggers").insert(toTriggerInsertRow(newTrigger));
|
|
477
|
+
logger.info(`[KnexStore] Created trigger ${newTrigger.id} for user ${newTrigger.creatorId}`);
|
|
478
|
+
return newTrigger;
|
|
479
|
+
}
|
|
480
|
+
async getTrigger(id) {
|
|
481
|
+
const knex = this.getKnex();
|
|
482
|
+
const row = await knex("message_triggers").where("id", id).first();
|
|
483
|
+
return row ? fromTriggerRow(row) : void 0;
|
|
484
|
+
}
|
|
485
|
+
async updateTrigger(id, patch) {
|
|
486
|
+
const knex = this.getKnex();
|
|
487
|
+
const existing = await knex("message_triggers").where("id", id).first();
|
|
488
|
+
if (!existing) return void 0;
|
|
489
|
+
const current = fromTriggerRow(existing);
|
|
490
|
+
const updated = {
|
|
491
|
+
...current,
|
|
492
|
+
...patch,
|
|
493
|
+
id: current.id,
|
|
494
|
+
createdAt: current.createdAt
|
|
495
|
+
};
|
|
496
|
+
const row = toTriggerInsertRow(updated);
|
|
497
|
+
delete row.id;
|
|
498
|
+
await knex("message_triggers").where("id", id).update(row);
|
|
499
|
+
return updated;
|
|
500
|
+
}
|
|
501
|
+
async deleteTrigger(id) {
|
|
502
|
+
const knex = this.getKnex();
|
|
503
|
+
const deleted = await knex("message_triggers").where("id", id).del();
|
|
504
|
+
if (deleted > 0) {
|
|
505
|
+
logger.info(`[KnexStore] Deleted trigger ${id}`);
|
|
506
|
+
return true;
|
|
507
|
+
}
|
|
508
|
+
return false;
|
|
509
|
+
}
|
|
510
|
+
async getTriggersByCreator(creatorId) {
|
|
511
|
+
const knex = this.getKnex();
|
|
512
|
+
const rows = await knex("message_triggers").where("creator_id", creatorId);
|
|
513
|
+
return rows.map((r) => fromTriggerRow(r));
|
|
514
|
+
}
|
|
515
|
+
async getActiveTriggers() {
|
|
516
|
+
const knex = this.getKnex();
|
|
517
|
+
const rows = await knex("message_triggers").where("status", "active").where("enabled", this.driver === "mssql" ? 1 : true);
|
|
518
|
+
return rows.map((r) => fromTriggerRow(r));
|
|
519
|
+
}
|
|
520
|
+
};
|
|
521
|
+
}
|
|
522
|
+
});
|
|
523
|
+
init_knex_store();
|
|
524
|
+
export {
|
|
525
|
+
KnexStoreBackend
|
|
526
|
+
};
|
|
527
|
+
//# sourceMappingURL=knex-store-CRORFJE6.mjs.map
|