clinkx 0.2.0 → 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/clinkx-workflows/dist/artifacts.d.ts +65 -0
- package/clinkx-workflows/dist/artifacts.js +268 -0
- package/clinkx-workflows/dist/artifacts.js.map +1 -0
- package/clinkx-workflows/dist/backend.d.ts +33 -0
- package/clinkx-workflows/dist/backend.js +9 -0
- package/clinkx-workflows/dist/backend.js.map +1 -0
- package/clinkx-workflows/dist/child-env.d.ts +23 -0
- package/clinkx-workflows/dist/child-env.js +53 -0
- package/clinkx-workflows/dist/child-env.js.map +1 -0
- package/clinkx-workflows/dist/clink-client.d.ts +51 -0
- package/clinkx-workflows/dist/clink-client.js +216 -0
- package/clinkx-workflows/dist/clink-client.js.map +1 -0
- package/clinkx-workflows/dist/config.d.ts +126 -0
- package/clinkx-workflows/dist/config.js +226 -0
- package/clinkx-workflows/dist/config.js.map +1 -0
- package/clinkx-workflows/dist/definition-normalizer.d.ts +59 -0
- package/clinkx-workflows/dist/definition-normalizer.js +75 -0
- package/clinkx-workflows/dist/definition-normalizer.js.map +1 -0
- package/clinkx-workflows/dist/engine.d.ts +235 -0
- package/clinkx-workflows/dist/engine.js +1044 -0
- package/clinkx-workflows/dist/engine.js.map +1 -0
- package/clinkx-workflows/dist/errors.d.ts +74 -0
- package/clinkx-workflows/dist/errors.js +84 -0
- package/clinkx-workflows/dist/errors.js.map +1 -0
- package/clinkx-workflows/dist/fidelity.d.ts +112 -0
- package/clinkx-workflows/dist/fidelity.js +140 -0
- package/clinkx-workflows/dist/fidelity.js.map +1 -0
- package/clinkx-workflows/dist/fingerprint.d.ts +69 -0
- package/clinkx-workflows/dist/fingerprint.js +143 -0
- package/clinkx-workflows/dist/fingerprint.js.map +1 -0
- package/clinkx-workflows/dist/index.d.ts +16 -0
- package/clinkx-workflows/dist/index.js +42 -0
- package/clinkx-workflows/dist/index.js.map +1 -0
- package/clinkx-workflows/dist/loader.d.ts +64 -0
- package/clinkx-workflows/dist/loader.js +371 -0
- package/clinkx-workflows/dist/loader.js.map +1 -0
- package/clinkx-workflows/dist/logger.d.ts +16 -0
- package/clinkx-workflows/dist/logger.js +31 -0
- package/clinkx-workflows/dist/logger.js.map +1 -0
- package/clinkx-workflows/dist/path-validation.d.ts +23 -0
- package/clinkx-workflows/dist/path-validation.js +73 -0
- package/clinkx-workflows/dist/path-validation.js.map +1 -0
- package/clinkx-workflows/dist/prompt-budget.d.ts +31 -0
- package/clinkx-workflows/dist/prompt-budget.js +78 -0
- package/clinkx-workflows/dist/prompt-budget.js.map +1 -0
- package/clinkx-workflows/dist/queue.d.ts +16 -0
- package/clinkx-workflows/dist/queue.js +46 -0
- package/clinkx-workflows/dist/queue.js.map +1 -0
- package/clinkx-workflows/dist/ranking-reducer.d.ts +11 -0
- package/clinkx-workflows/dist/ranking-reducer.js +245 -0
- package/clinkx-workflows/dist/ranking-reducer.js.map +1 -0
- package/clinkx-workflows/dist/reducers/index.d.ts +8 -0
- package/clinkx-workflows/dist/reducers/index.js +12 -0
- package/clinkx-workflows/dist/reducers/index.js.map +1 -0
- package/clinkx-workflows/dist/run-id.d.ts +17 -0
- package/clinkx-workflows/dist/run-id.js +26 -0
- package/clinkx-workflows/dist/run-id.js.map +1 -0
- package/clinkx-workflows/dist/run-summary/cards/council-answer.d.ts +8 -0
- package/clinkx-workflows/dist/run-summary/cards/council-answer.js +75 -0
- package/clinkx-workflows/dist/run-summary/cards/council-answer.js.map +1 -0
- package/clinkx-workflows/dist/run-summary/cards/council-code-review.d.ts +13 -0
- package/clinkx-workflows/dist/run-summary/cards/council-code-review.js +90 -0
- package/clinkx-workflows/dist/run-summary/cards/council-code-review.js.map +1 -0
- package/clinkx-workflows/dist/run-summary/cards/council-debug.d.ts +9 -0
- package/clinkx-workflows/dist/run-summary/cards/council-debug.js +79 -0
- package/clinkx-workflows/dist/run-summary/cards/council-debug.js.map +1 -0
- package/clinkx-workflows/dist/run-summary/cards/council-default.d.ts +11 -0
- package/clinkx-workflows/dist/run-summary/cards/council-default.js +57 -0
- package/clinkx-workflows/dist/run-summary/cards/council-default.js.map +1 -0
- package/clinkx-workflows/dist/run-summary/cards/council-discover.d.ts +10 -0
- package/clinkx-workflows/dist/run-summary/cards/council-discover.js +79 -0
- package/clinkx-workflows/dist/run-summary/cards/council-discover.js.map +1 -0
- package/clinkx-workflows/dist/run-summary/cards/generic.d.ts +2 -0
- package/clinkx-workflows/dist/run-summary/cards/generic.js +4 -0
- package/clinkx-workflows/dist/run-summary/cards/generic.js.map +1 -0
- package/clinkx-workflows/dist/run-summary/cards/index.d.ts +6 -0
- package/clinkx-workflows/dist/run-summary/cards/index.js +17 -0
- package/clinkx-workflows/dist/run-summary/cards/index.js.map +1 -0
- package/clinkx-workflows/dist/run-summary/utils.d.ts +6 -0
- package/clinkx-workflows/dist/run-summary/utils.js +30 -0
- package/clinkx-workflows/dist/run-summary/utils.js.map +1 -0
- package/clinkx-workflows/dist/run-summary-derived.d.ts +19 -0
- package/clinkx-workflows/dist/run-summary-derived.js +100 -0
- package/clinkx-workflows/dist/run-summary-derived.js.map +1 -0
- package/clinkx-workflows/dist/run-summary.d.ts +70 -0
- package/clinkx-workflows/dist/run-summary.js +125 -0
- package/clinkx-workflows/dist/run-summary.js.map +1 -0
- package/clinkx-workflows/dist/schema.d.ts +609 -0
- package/clinkx-workflows/dist/schema.js +123 -0
- package/clinkx-workflows/dist/schema.js.map +1 -0
- package/clinkx-workflows/dist/server.d.ts +16 -0
- package/clinkx-workflows/dist/server.js +33 -0
- package/clinkx-workflows/dist/server.js.map +1 -0
- package/clinkx-workflows/dist/shutdown.d.ts +54 -0
- package/clinkx-workflows/dist/shutdown.js +120 -0
- package/clinkx-workflows/dist/shutdown.js.map +1 -0
- package/clinkx-workflows/dist/state-schema.d.ts +141 -0
- package/clinkx-workflows/dist/state-schema.js +21 -0
- package/clinkx-workflows/dist/state-schema.js.map +1 -0
- package/clinkx-workflows/dist/state.d.ts +37 -0
- package/clinkx-workflows/dist/state.js +838 -0
- package/clinkx-workflows/dist/state.js.map +1 -0
- package/clinkx-workflows/dist/template-loader.d.ts +30 -0
- package/clinkx-workflows/dist/template-loader.js +77 -0
- package/clinkx-workflows/dist/template-loader.js.map +1 -0
- package/clinkx-workflows/dist/template.d.ts +54 -0
- package/clinkx-workflows/dist/template.js +128 -0
- package/clinkx-workflows/dist/template.js.map +1 -0
- package/clinkx-workflows/dist/transport.d.ts +91 -0
- package/clinkx-workflows/dist/transport.js +249 -0
- package/clinkx-workflows/dist/transport.js.map +1 -0
- package/clinkx-workflows/dist/types.d.ts +137 -0
- package/clinkx-workflows/dist/types.js +11 -0
- package/clinkx-workflows/dist/types.js.map +1 -0
- package/clinkx-workflows/dist/validators/council.d.ts +1488 -0
- package/clinkx-workflows/dist/validators/council.js +509 -0
- package/clinkx-workflows/dist/validators/council.js.map +1 -0
- package/clinkx-workflows/dist/validators/index.d.ts +40 -0
- package/clinkx-workflows/dist/validators/index.js +43 -0
- package/clinkx-workflows/dist/validators/index.js.map +1 -0
- package/clinkx-workflows/dist/workflow-receipt.d.ts +4 -0
- package/clinkx-workflows/dist/workflow-receipt.js +177 -0
- package/clinkx-workflows/dist/workflow-receipt.js.map +1 -0
- package/clinkx-workflows/dist/workflow-tools.d.ts +77 -0
- package/clinkx-workflows/dist/workflow-tools.js +1131 -0
- package/clinkx-workflows/dist/workflow-tools.js.map +1 -0
- package/clinkx-workflows/dist/workflows/council-default.d.ts +123 -0
- package/clinkx-workflows/dist/workflows/council-default.js +141 -0
- package/clinkx-workflows/dist/workflows/council-default.js.map +1 -0
- package/clinkx-workflows/dist/workflows/index.d.ts +12 -0
- package/clinkx-workflows/dist/workflows/index.js +15 -0
- package/clinkx-workflows/dist/workflows/index.js.map +1 -0
- package/conf/adapters/codex.json +2 -2
- package/conf/adapters/hapi/codex.json +2 -2
- package/dist/config.d.ts +5 -0
- package/dist/config.js +17 -0
- package/dist/config.js.map +1 -1
- package/dist/parsers/extract.d.ts +2 -0
- package/dist/parsers/extract.js +29 -20
- package/dist/parsers/extract.js.map +1 -1
- package/dist/pipeline.d.ts +2 -4
- package/dist/pipeline.js +93 -8
- package/dist/pipeline.js.map +1 -1
- package/dist/result-contract.d.ts +6 -1
- package/dist/result-contract.js +10 -22
- package/dist/result-contract.js.map +1 -1
- package/dist/runner.js +43 -1
- package/dist/runner.js.map +1 -1
- package/package.json +11 -5
|
@@ -0,0 +1,1131 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Reusable workflow tool registration for any MCP Server instance.
|
|
3
|
+
*
|
|
4
|
+
* Extracted from server.ts to allow both:
|
|
5
|
+
* - Standalone STDIO server (ClinkSession child process backend)
|
|
6
|
+
* - In-process integration (LocalClinkBackend via root ClinkX)
|
|
7
|
+
*
|
|
8
|
+
* The `registerWorkflowTools()` function registers three tools on a Server:
|
|
9
|
+
* - run_workflow: start a new workflow execution
|
|
10
|
+
* - resume_workflow: resume a failed/cancelled run
|
|
11
|
+
* - list_workflows: list available workflow definitions
|
|
12
|
+
*
|
|
13
|
+
* The caller provides a `createBackend` factory that determines how clink
|
|
14
|
+
* calls are dispatched — either via child process or in-process.
|
|
15
|
+
*/
|
|
16
|
+
import { CallToolRequestSchema, ListToolsRequestSchema, ErrorCode, McpError, } from "@modelcontextprotocol/sdk/types.js";
|
|
17
|
+
import { loadWorkflow, listWorkflowCatalog } from "./loader.js";
|
|
18
|
+
import { WorkflowEngine } from "./engine.js";
|
|
19
|
+
import { WorkflowStateStore } from "./state.js";
|
|
20
|
+
import { validateRunId } from "./run-id.js";
|
|
21
|
+
import { getTemplatesPath } from "./config.js";
|
|
22
|
+
import { WorkflowCancellationError, } from "./errors.js";
|
|
23
|
+
import { shutdownCoordinator } from "./shutdown.js";
|
|
24
|
+
import { basename } from "node:path";
|
|
25
|
+
import { logger } from "./logger.js";
|
|
26
|
+
import { createValidatorRegistry } from "./validators/index.js";
|
|
27
|
+
import { deriveDecisiveFailure } from "./run-summary-derived.js";
|
|
28
|
+
import { buildRunSummaryOrUndefined, buildFailureRunSummary } from "./run-summary.js";
|
|
29
|
+
import { extractCard } from "./run-summary/cards/index.js";
|
|
30
|
+
import { renderWorkflowReceipt } from "./workflow-receipt.js";
|
|
31
|
+
// ---------------------------------------------------------------------------
|
|
32
|
+
// Admission control
|
|
33
|
+
// ---------------------------------------------------------------------------
|
|
34
|
+
let activeMutex = null;
|
|
35
|
+
function acquireMutex(runId) {
|
|
36
|
+
if (activeMutex != null) {
|
|
37
|
+
throw new McpError(ErrorCode.InvalidRequest, `A workflow is already running (run_id: "${activeMutex.runId}", started: ${activeMutex.startedAt}). ` +
|
|
38
|
+
`Wait for it to complete or cancel it before starting another. ` +
|
|
39
|
+
`Only one workflow can run at a time per server instance.`);
|
|
40
|
+
}
|
|
41
|
+
activeMutex = { runId, startedAt: new Date().toISOString() };
|
|
42
|
+
}
|
|
43
|
+
function releaseMutex() {
|
|
44
|
+
activeMutex = null;
|
|
45
|
+
}
|
|
46
|
+
// ---------------------------------------------------------------------------
|
|
47
|
+
// Variable validation (6-6)
|
|
48
|
+
// ---------------------------------------------------------------------------
|
|
49
|
+
function formatExpectedVariables(schema) {
|
|
50
|
+
const parts = [];
|
|
51
|
+
for (const [name, def] of Object.entries(schema)) {
|
|
52
|
+
const suffix = def.required && def.default === undefined ? ", required" : "";
|
|
53
|
+
parts.push(`${name} (${def.type}${suffix})`);
|
|
54
|
+
}
|
|
55
|
+
return parts.length > 0 ? `Expected variables: ${parts.join(", ")}` : "";
|
|
56
|
+
}
|
|
57
|
+
function validateVariables(supplied, schema) {
|
|
58
|
+
const resolved = {};
|
|
59
|
+
const errors = [];
|
|
60
|
+
for (const [name, def] of Object.entries(schema)) {
|
|
61
|
+
const value = supplied?.[name];
|
|
62
|
+
if (value === undefined) {
|
|
63
|
+
if (def.default !== undefined) {
|
|
64
|
+
resolved[name] = def.default;
|
|
65
|
+
continue;
|
|
66
|
+
}
|
|
67
|
+
if (def.required) {
|
|
68
|
+
errors.push(`Missing required variable "${name}"`);
|
|
69
|
+
continue;
|
|
70
|
+
}
|
|
71
|
+
continue;
|
|
72
|
+
}
|
|
73
|
+
const typeError = validateVariableType(name, value, def.type);
|
|
74
|
+
if (typeError != null) {
|
|
75
|
+
errors.push(typeError);
|
|
76
|
+
continue;
|
|
77
|
+
}
|
|
78
|
+
resolved[name] = value;
|
|
79
|
+
}
|
|
80
|
+
// Reject unknown variables
|
|
81
|
+
if (supplied != null) {
|
|
82
|
+
for (const key of Object.keys(supplied)) {
|
|
83
|
+
if (!(key in schema)) {
|
|
84
|
+
errors.push(`Unknown variable "${key}"`);
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
if (errors.length > 0) {
|
|
89
|
+
const expected = formatExpectedVariables(schema);
|
|
90
|
+
const suffix = expected !== "" ? `. ${expected}` : "";
|
|
91
|
+
throw new McpError(ErrorCode.InvalidParams, `Variable validation failed: ${errors.join("; ")}${suffix}`);
|
|
92
|
+
}
|
|
93
|
+
return resolved;
|
|
94
|
+
}
|
|
95
|
+
function validateVariableType(name, value, expectedType) {
|
|
96
|
+
switch (expectedType) {
|
|
97
|
+
case "string":
|
|
98
|
+
if (typeof value !== "string") {
|
|
99
|
+
return `Variable "${name}" must be a string, got ${typeof value}`;
|
|
100
|
+
}
|
|
101
|
+
return null;
|
|
102
|
+
case "number":
|
|
103
|
+
if (typeof value !== "number" || !Number.isFinite(value)) {
|
|
104
|
+
return `Variable "${name}" must be a finite number`;
|
|
105
|
+
}
|
|
106
|
+
return null;
|
|
107
|
+
case "boolean":
|
|
108
|
+
if (typeof value !== "boolean") {
|
|
109
|
+
return `Variable "${name}" must be a boolean, got ${typeof value}`;
|
|
110
|
+
}
|
|
111
|
+
return null;
|
|
112
|
+
case "string[]":
|
|
113
|
+
if (!Array.isArray(value) || !value.every((v) => typeof v === "string")) {
|
|
114
|
+
return `Variable "${name}" must be a string array`;
|
|
115
|
+
}
|
|
116
|
+
return null;
|
|
117
|
+
default:
|
|
118
|
+
return `Variable "${name}" has unknown type "${expectedType}"`;
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
// ---------------------------------------------------------------------------
|
|
122
|
+
// Progress (6-7) — WorkflowProgressReporter
|
|
123
|
+
// ---------------------------------------------------------------------------
|
|
124
|
+
/** Heartbeat interval matching root progress.ts pattern. */
|
|
125
|
+
const HEARTBEAT_INTERVAL_MS = 5_000;
|
|
126
|
+
/** Percentage reserved for startup notification. */
|
|
127
|
+
const STARTUP_RESERVE = 5;
|
|
128
|
+
/** Execution band spans [STARTUP_RESERVE, 100 - FINALIZATION_RESERVE]. */
|
|
129
|
+
const FINALIZATION_RESERVE = 5;
|
|
130
|
+
/** Width of the execution band. */
|
|
131
|
+
const EXECUTION_BAND = 100 - STARTUP_RESERVE - FINALIZATION_RESERVE; // 90
|
|
132
|
+
/** Decay constant for asymptotic heartbeat ramp. */
|
|
133
|
+
const HEARTBEAT_DECAY_K = 0.05;
|
|
134
|
+
/**
|
|
135
|
+
* Humanize a stage ID: replace underscores with spaces, capitalize first letter only.
|
|
136
|
+
* e.g. "cross_review" → "Cross review", "ranking_analysis" → "Ranking analysis"
|
|
137
|
+
*/
|
|
138
|
+
export function humanizeStageId(id) {
|
|
139
|
+
const spaced = id.replace(/_/g, " ");
|
|
140
|
+
return spaced.charAt(0).toUpperCase() + spaced.slice(1);
|
|
141
|
+
}
|
|
142
|
+
/**
|
|
143
|
+
* Rich percentage-based progress reporter for workflow executions.
|
|
144
|
+
*
|
|
145
|
+
* Replaces the flat monotonic counter with real 0-100 percentages
|
|
146
|
+
* derived from the workflow definition's call count. Includes a
|
|
147
|
+
* heartbeat timer for liveness signals during long-running calls.
|
|
148
|
+
*/
|
|
149
|
+
class WorkflowProgressReporter {
|
|
150
|
+
server;
|
|
151
|
+
progressToken;
|
|
152
|
+
runId;
|
|
153
|
+
workflowName;
|
|
154
|
+
totalCalls;
|
|
155
|
+
totalStages;
|
|
156
|
+
bandWidth;
|
|
157
|
+
callBands = new Map();
|
|
158
|
+
/** Monotonic guard — highest progress value emitted. */
|
|
159
|
+
lastProgress = 0;
|
|
160
|
+
/** Heartbeat timer handle. */
|
|
161
|
+
heartbeatTimer;
|
|
162
|
+
/** Currently running calls, keyed by "stageId.callId". */
|
|
163
|
+
runningCalls = new Map();
|
|
164
|
+
constructor(server, progressToken, runId, definition) {
|
|
165
|
+
this.server = server;
|
|
166
|
+
this.progressToken = progressToken;
|
|
167
|
+
this.runId = runId;
|
|
168
|
+
this.workflowName = definition.name;
|
|
169
|
+
this.totalStages = definition.stages.length;
|
|
170
|
+
// Build per-call bands: each call gets a slice of [5..95]
|
|
171
|
+
let globalIndex = 0;
|
|
172
|
+
for (let si = 0; si < definition.stages.length; si++) {
|
|
173
|
+
const stage = definition.stages[si];
|
|
174
|
+
const stageLabel = stage.description ?? humanizeStageId(stage.id);
|
|
175
|
+
for (const call of stage.calls) {
|
|
176
|
+
const key = `${stage.id}.${call.id}`;
|
|
177
|
+
this.callBands.set(key, {
|
|
178
|
+
globalIndex,
|
|
179
|
+
stageIndex: si + 1,
|
|
180
|
+
stageId: stage.id,
|
|
181
|
+
stageLabel,
|
|
182
|
+
floor: 0, // filled below
|
|
183
|
+
ceiling: 0,
|
|
184
|
+
});
|
|
185
|
+
globalIndex++;
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
this.totalCalls = globalIndex;
|
|
189
|
+
this.bandWidth = this.totalCalls > 0 ? EXECUTION_BAND / this.totalCalls : EXECUTION_BAND;
|
|
190
|
+
// Compute floor/ceiling for each call band
|
|
191
|
+
for (const [key, band] of this.callBands) {
|
|
192
|
+
this.callBands.set(key, {
|
|
193
|
+
...band,
|
|
194
|
+
floor: STARTUP_RESERVE + band.globalIndex * this.bandWidth,
|
|
195
|
+
ceiling: STARTUP_RESERVE + (band.globalIndex + 1) * this.bandWidth,
|
|
196
|
+
});
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
/**
|
|
200
|
+
* Seed progress offset for resumed workflows.
|
|
201
|
+
* Call before engine.execute() with the count of already-completed calls.
|
|
202
|
+
*/
|
|
203
|
+
seedCompletedCalls(count) {
|
|
204
|
+
if (count > 0 && this.totalCalls > 0) {
|
|
205
|
+
this.lastProgress = Math.floor(STARTUP_RESERVE + Math.min(count, this.totalCalls) * this.bandWidth);
|
|
206
|
+
}
|
|
207
|
+
}
|
|
208
|
+
/** Engine onProgress hook handler. */
|
|
209
|
+
async onProgress(event) {
|
|
210
|
+
if (event.scope === "run") {
|
|
211
|
+
await this.handleRunProgress(event);
|
|
212
|
+
}
|
|
213
|
+
else if (event.scope === "call" && event.stageId != null && event.callId != null) {
|
|
214
|
+
await this.handleCallProgress(event);
|
|
215
|
+
}
|
|
216
|
+
// Stage-level events are covered by call-level granularity — no separate MCP emission.
|
|
217
|
+
}
|
|
218
|
+
/** Engine onRetryScheduled hook handler. */
|
|
219
|
+
async onRetryScheduled(event) {
|
|
220
|
+
const band = this.callBands.get(`${event.stageId}.${event.callId}`);
|
|
221
|
+
if (band == null)
|
|
222
|
+
return;
|
|
223
|
+
const delaySec = Math.round(event.delayMs / 1000);
|
|
224
|
+
const message = `[${String(band.stageIndex)}/${String(this.totalStages)}] ${band.stageLabel} | ` +
|
|
225
|
+
`${event.callId} retrying in ${String(delaySec)}s (attempt ${String(event.nextRetryCount + 1)}, ${event.failureClass})`;
|
|
226
|
+
// Informational only — do not advance progress percentage
|
|
227
|
+
await this.emit(this.lastProgress, message);
|
|
228
|
+
}
|
|
229
|
+
/** Stop heartbeat timer. Must be called in finally blocks. */
|
|
230
|
+
stop() {
|
|
231
|
+
this.stopHeartbeat();
|
|
232
|
+
}
|
|
233
|
+
// ── Internal ──────────────────────────────────────────────
|
|
234
|
+
async handleRunProgress(event) {
|
|
235
|
+
switch (event.state) {
|
|
236
|
+
case "running": {
|
|
237
|
+
const message = `[run_id: ${this.runId}] ${this.workflowName} starting ` +
|
|
238
|
+
`(${String(this.totalStages)} stages, ${String(this.totalCalls)} calls)`;
|
|
239
|
+
await this.emit(2, message);
|
|
240
|
+
break;
|
|
241
|
+
}
|
|
242
|
+
case "succeeded":
|
|
243
|
+
case "failed":
|
|
244
|
+
case "cancelled": {
|
|
245
|
+
this.stopHeartbeat();
|
|
246
|
+
const suffix = event.state === "succeeded" ? "complete" : event.state;
|
|
247
|
+
await this.emit(100, `${this.workflowName} ${suffix}`);
|
|
248
|
+
break;
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
}
|
|
252
|
+
async handleCallProgress(event) {
|
|
253
|
+
const callKey = `${event.stageId}.${event.callId}`;
|
|
254
|
+
const band = this.callBands.get(callKey);
|
|
255
|
+
if (band == null)
|
|
256
|
+
return;
|
|
257
|
+
const prefix = `[${String(band.stageIndex)}/${String(this.totalStages)}] ${band.stageLabel}`;
|
|
258
|
+
switch (event.state) {
|
|
259
|
+
case "running": {
|
|
260
|
+
this.runningCalls.set(callKey, {
|
|
261
|
+
stageId: event.stageId,
|
|
262
|
+
callId: event.callId,
|
|
263
|
+
startedAt: Date.now(),
|
|
264
|
+
band,
|
|
265
|
+
});
|
|
266
|
+
this.startHeartbeatIfNeeded();
|
|
267
|
+
const progress = Math.floor(band.floor + 0.1 * this.bandWidth);
|
|
268
|
+
const retryInfo = (event.retryCount ?? 0) > 0
|
|
269
|
+
? ` (attempt ${String((event.retryCount ?? 0) + 1)})`
|
|
270
|
+
: "";
|
|
271
|
+
await this.emit(progress, `${prefix} | ${event.callId} running${retryInfo}`);
|
|
272
|
+
break;
|
|
273
|
+
}
|
|
274
|
+
case "succeeded": {
|
|
275
|
+
this.runningCalls.delete(callKey);
|
|
276
|
+
this.stopHeartbeatIfIdle();
|
|
277
|
+
await this.emit(Math.floor(band.ceiling), `${prefix} | ${event.callId} succeeded`);
|
|
278
|
+
break;
|
|
279
|
+
}
|
|
280
|
+
case "failed": {
|
|
281
|
+
this.runningCalls.delete(callKey);
|
|
282
|
+
this.stopHeartbeatIfIdle();
|
|
283
|
+
await this.emit(Math.floor(band.ceiling), `${prefix} | ${event.callId} failed`);
|
|
284
|
+
break;
|
|
285
|
+
}
|
|
286
|
+
case "cancelled": {
|
|
287
|
+
this.runningCalls.delete(callKey);
|
|
288
|
+
this.stopHeartbeatIfIdle();
|
|
289
|
+
// Do not advance progress on cancellation
|
|
290
|
+
await this.emit(this.lastProgress, `${prefix} | ${event.callId} cancelled`);
|
|
291
|
+
break;
|
|
292
|
+
}
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
startHeartbeatIfNeeded() {
|
|
296
|
+
if (this.heartbeatTimer != null)
|
|
297
|
+
return;
|
|
298
|
+
const tick = () => {
|
|
299
|
+
const earliest = this.findEarliestRunningCall();
|
|
300
|
+
if (earliest == null) {
|
|
301
|
+
this.stopHeartbeat();
|
|
302
|
+
return;
|
|
303
|
+
}
|
|
304
|
+
const elapsedSec = Math.round((Date.now() - earliest.startedAt) / 1000);
|
|
305
|
+
const progress = this.computeHeartbeatProgress(earliest.band, earliest.startedAt);
|
|
306
|
+
const prefix = `[${String(earliest.band.stageIndex)}/${String(this.totalStages)}] ${earliest.band.stageLabel}`;
|
|
307
|
+
const message = `${prefix} | ${earliest.callId} running (${String(elapsedSec)}s)`;
|
|
308
|
+
// Self-scheduling: next tick fires after current send completes
|
|
309
|
+
void this.emit(progress, message).finally(() => {
|
|
310
|
+
if (this.heartbeatTimer != null) {
|
|
311
|
+
this.heartbeatTimer = setTimeout(tick, HEARTBEAT_INTERVAL_MS);
|
|
312
|
+
this.heartbeatTimer.unref();
|
|
313
|
+
}
|
|
314
|
+
});
|
|
315
|
+
};
|
|
316
|
+
this.heartbeatTimer = setTimeout(tick, HEARTBEAT_INTERVAL_MS);
|
|
317
|
+
this.heartbeatTimer.unref();
|
|
318
|
+
}
|
|
319
|
+
stopHeartbeat() {
|
|
320
|
+
if (this.heartbeatTimer != null) {
|
|
321
|
+
clearTimeout(this.heartbeatTimer);
|
|
322
|
+
this.heartbeatTimer = undefined;
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
stopHeartbeatIfIdle() {
|
|
326
|
+
if (this.runningCalls.size === 0) {
|
|
327
|
+
this.stopHeartbeat();
|
|
328
|
+
}
|
|
329
|
+
}
|
|
330
|
+
findEarliestRunningCall() {
|
|
331
|
+
let earliest;
|
|
332
|
+
for (const entry of this.runningCalls.values()) {
|
|
333
|
+
if (earliest == null || entry.startedAt < earliest.startedAt) {
|
|
334
|
+
earliest = entry;
|
|
335
|
+
}
|
|
336
|
+
}
|
|
337
|
+
return earliest;
|
|
338
|
+
}
|
|
339
|
+
/**
|
|
340
|
+
* Asymptotic ramp from floor+10% toward ceiling-10% within a call's band.
|
|
341
|
+
* Uses exponential decay so each heartbeat tick adds diminishing increments.
|
|
342
|
+
*/
|
|
343
|
+
computeHeartbeatProgress(band, startedAt) {
|
|
344
|
+
const base = band.floor + 0.1 * this.bandWidth;
|
|
345
|
+
const target = band.ceiling - 0.1 * this.bandWidth;
|
|
346
|
+
const range = target - base;
|
|
347
|
+
if (range <= 0)
|
|
348
|
+
return Math.floor(base);
|
|
349
|
+
const elapsedTicks = (Date.now() - startedAt) / HEARTBEAT_INTERVAL_MS;
|
|
350
|
+
const raw = target - range * Math.exp(-HEARTBEAT_DECAY_K * elapsedTicks);
|
|
351
|
+
return Math.floor(Math.min(raw, target));
|
|
352
|
+
}
|
|
353
|
+
/**
|
|
354
|
+
* Send an MCP progress notification.
|
|
355
|
+
* Enforces monotonically increasing progress; best-effort delivery.
|
|
356
|
+
*/
|
|
357
|
+
async emit(progress, message) {
|
|
358
|
+
const clamped = Math.min(Math.max(progress, this.lastProgress), 100);
|
|
359
|
+
try {
|
|
360
|
+
await this.server.notification({
|
|
361
|
+
method: "notifications/progress",
|
|
362
|
+
params: {
|
|
363
|
+
progressToken: this.progressToken,
|
|
364
|
+
progress: clamped,
|
|
365
|
+
total: 100,
|
|
366
|
+
message,
|
|
367
|
+
},
|
|
368
|
+
});
|
|
369
|
+
this.lastProgress = clamped;
|
|
370
|
+
}
|
|
371
|
+
catch {
|
|
372
|
+
// Best-effort — host may not support progress
|
|
373
|
+
}
|
|
374
|
+
}
|
|
375
|
+
}
|
|
376
|
+
// ---------------------------------------------------------------------------
|
|
377
|
+
// Debug metadata (6-8)
|
|
378
|
+
// ---------------------------------------------------------------------------
|
|
379
|
+
function buildDebugBlock(runId, result, store) {
|
|
380
|
+
const state = store?.runState;
|
|
381
|
+
const debug = {
|
|
382
|
+
run_id: runId,
|
|
383
|
+
stages: result.stages.map((stage) => ({
|
|
384
|
+
stage_id: stage.stageId,
|
|
385
|
+
ok: stage.ok,
|
|
386
|
+
calls: stage.calls.map((call) => {
|
|
387
|
+
const callState = state?.stages
|
|
388
|
+
.find((s) => s.stageId === stage.stageId)
|
|
389
|
+
?.calls.find((c) => c.callId === call.callId);
|
|
390
|
+
return {
|
|
391
|
+
call_id: call.callId,
|
|
392
|
+
ok: call.ok,
|
|
393
|
+
duration_ms: callState?.durationMs,
|
|
394
|
+
retry_count: callState?.retryCount ?? 0,
|
|
395
|
+
...(call.error != null ? { error: call.error } : {}),
|
|
396
|
+
...(call.fidelity != null
|
|
397
|
+
? {
|
|
398
|
+
truncation: {
|
|
399
|
+
response: call.fidelity.responseTruncated,
|
|
400
|
+
capture: call.fidelity.captureTruncated,
|
|
401
|
+
},
|
|
402
|
+
}
|
|
403
|
+
: {}),
|
|
404
|
+
};
|
|
405
|
+
}),
|
|
406
|
+
...(stage.aggregateArtifactPath != null
|
|
407
|
+
? { aggregate_artifact: stage.aggregateArtifactPath }
|
|
408
|
+
: {}),
|
|
409
|
+
})),
|
|
410
|
+
artifacts_dir: result.artifactsDir,
|
|
411
|
+
...(state?.fingerprint != null
|
|
412
|
+
? {
|
|
413
|
+
fingerprint: {
|
|
414
|
+
algorithm: state.fingerprint.algorithm,
|
|
415
|
+
hash: state.fingerprint.hash.substring(0, 16) + "...",
|
|
416
|
+
},
|
|
417
|
+
}
|
|
418
|
+
: {}),
|
|
419
|
+
};
|
|
420
|
+
return JSON.stringify(debug, null, 2);
|
|
421
|
+
}
|
|
422
|
+
function buildErrorDebugBlock(runId, store) {
|
|
423
|
+
const state = store?.runState;
|
|
424
|
+
if (state == null) {
|
|
425
|
+
return JSON.stringify({ run_id: runId }, null, 2);
|
|
426
|
+
}
|
|
427
|
+
const debug = {
|
|
428
|
+
run_id: runId,
|
|
429
|
+
status: state.status,
|
|
430
|
+
stages: state.stages.map((stage) => ({
|
|
431
|
+
stage_id: stage.stageId,
|
|
432
|
+
state: stage.state,
|
|
433
|
+
calls: stage.calls.map((call) => ({
|
|
434
|
+
call_id: call.callId,
|
|
435
|
+
state: call.state,
|
|
436
|
+
retry_count: call.retryCount,
|
|
437
|
+
duration_ms: call.durationMs,
|
|
438
|
+
...(call.errorMessage != null ? { error: call.errorMessage } : {}),
|
|
439
|
+
...(call.failureClass != null
|
|
440
|
+
? { failure_class: call.failureClass }
|
|
441
|
+
: {}),
|
|
442
|
+
...(call.childDebugMetadata != null
|
|
443
|
+
? { child_debug: call.childDebugMetadata }
|
|
444
|
+
: {}),
|
|
445
|
+
})),
|
|
446
|
+
})),
|
|
447
|
+
...(state.fingerprint != null
|
|
448
|
+
? {
|
|
449
|
+
fingerprint: {
|
|
450
|
+
algorithm: state.fingerprint.algorithm,
|
|
451
|
+
hash: state.fingerprint.hash.substring(0, 16) + "...",
|
|
452
|
+
},
|
|
453
|
+
}
|
|
454
|
+
: {}),
|
|
455
|
+
};
|
|
456
|
+
return JSON.stringify(debug, null, 2);
|
|
457
|
+
}
|
|
458
|
+
// ---------------------------------------------------------------------------
|
|
459
|
+
// Card extractor adapter
|
|
460
|
+
// ---------------------------------------------------------------------------
|
|
461
|
+
/**
|
|
462
|
+
* Adapt `extractCard()` from `./run-summary/cards/index.js` into the
|
|
463
|
+
* `RunSummaryCardExtractor` signature expected by `buildSuccessRunSummary()`.
|
|
464
|
+
*/
|
|
465
|
+
function createCardExtractor(definition) {
|
|
466
|
+
return (calls) => {
|
|
467
|
+
const stageCallArtifacts = new Map();
|
|
468
|
+
for (const call of calls) {
|
|
469
|
+
if (call.artifact?.parsed === undefined)
|
|
470
|
+
continue;
|
|
471
|
+
let callMap = stageCallArtifacts.get(call.stageId);
|
|
472
|
+
if (callMap == null) {
|
|
473
|
+
callMap = new Map();
|
|
474
|
+
stageCallArtifacts.set(call.stageId, callMap);
|
|
475
|
+
}
|
|
476
|
+
callMap.set(call.callId, call.artifact.parsed);
|
|
477
|
+
}
|
|
478
|
+
const input = {
|
|
479
|
+
definitionName: definition.name,
|
|
480
|
+
stageCallArtifacts,
|
|
481
|
+
};
|
|
482
|
+
return extractCard(input);
|
|
483
|
+
};
|
|
484
|
+
}
|
|
485
|
+
// ---------------------------------------------------------------------------
|
|
486
|
+
// Receipt-aware content block assembly (2-7)
|
|
487
|
+
// ---------------------------------------------------------------------------
|
|
488
|
+
/**
|
|
489
|
+
* Shared content block assembly for both `executeRunWorkflow()` and
|
|
490
|
+
* `executeResumeWorkflow()`. Ordering contract:
|
|
491
|
+
*
|
|
492
|
+
* content[0] — raw result (always present, byte-for-byte unchanged)
|
|
493
|
+
* content[1] — receipt (when receipt=true and summary exists)
|
|
494
|
+
* content[N] — debug (when debug=true)
|
|
495
|
+
*
|
|
496
|
+
* Receipt rendering is fail-open: errors are logged and silently omitted.
|
|
497
|
+
*/
|
|
498
|
+
function finalizeWorkflowPresentation(options) {
|
|
499
|
+
const content = [
|
|
500
|
+
{ type: "text", text: options.outputText }, // content[0] — always raw result
|
|
501
|
+
];
|
|
502
|
+
// content[1] — receipt (when receipt=true and summary exists)
|
|
503
|
+
if (options.receipt && options.summary !== undefined) {
|
|
504
|
+
try {
|
|
505
|
+
const receiptText = renderWorkflowReceipt(options.summary);
|
|
506
|
+
content.push({ type: "text", text: receiptText });
|
|
507
|
+
}
|
|
508
|
+
catch (err) {
|
|
509
|
+
logger.warn({ err }, "receipt rendering failed, omitting receipt");
|
|
510
|
+
}
|
|
511
|
+
}
|
|
512
|
+
// content[N] — debug (when debug=true)
|
|
513
|
+
if (options.debug && options.debugBlock !== undefined) {
|
|
514
|
+
content.push({ type: "text", text: options.debugBlock });
|
|
515
|
+
}
|
|
516
|
+
return content;
|
|
517
|
+
}
|
|
518
|
+
// ---------------------------------------------------------------------------
|
|
519
|
+
// Actionable error formatting (5-6)
|
|
520
|
+
// ---------------------------------------------------------------------------
|
|
521
|
+
function formatWorkflowError(runId, error, store, workflowSource) {
|
|
522
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
523
|
+
const state = store?.runState;
|
|
524
|
+
// Find the decisive failed call — prefer the call in the stage that has
|
|
525
|
+
// on_failure=abort (the one that caused the workflow to terminate), not
|
|
526
|
+
// just the first failed call found in state.
|
|
527
|
+
let location = "";
|
|
528
|
+
let retryCount = 0;
|
|
529
|
+
let childMeta;
|
|
530
|
+
let callErrorMessage;
|
|
531
|
+
if (state != null) {
|
|
532
|
+
const decisiveFailure = deriveDecisiveFailure(state.stages, store?.definition);
|
|
533
|
+
if (decisiveFailure != null) {
|
|
534
|
+
location = ` at ${decisiveFailure.location}`;
|
|
535
|
+
const decisiveCall = getCallEntryByLocation(state.stages, decisiveFailure.location);
|
|
536
|
+
retryCount = decisiveCall?.retryCount ?? 0;
|
|
537
|
+
childMeta = decisiveCall?.childDebugMetadata;
|
|
538
|
+
callErrorMessage = decisiveCall?.errorMessage;
|
|
539
|
+
}
|
|
540
|
+
}
|
|
541
|
+
// Build diagnostic details block
|
|
542
|
+
const metaLines = [];
|
|
543
|
+
// Always show the decisive call's error message when it differs from
|
|
544
|
+
// the top-level message — this carries validator failure details,
|
|
545
|
+
// child rejection reasons, etc. that the stage-abort message hides.
|
|
546
|
+
if (callErrorMessage != null && callErrorMessage !== message) {
|
|
547
|
+
metaLines.push(` call_error: ${callErrorMessage}`);
|
|
548
|
+
}
|
|
549
|
+
if (childMeta != null) {
|
|
550
|
+
if (childMeta.rejection_reason != null) {
|
|
551
|
+
metaLines.push(` rejection_reason: ${childMeta.rejection_reason}`);
|
|
552
|
+
}
|
|
553
|
+
if (childMeta.parse_status != null) {
|
|
554
|
+
metaLines.push(` parse_status: ${childMeta.parse_status}`);
|
|
555
|
+
}
|
|
556
|
+
if (childMeta.parse_source != null) {
|
|
557
|
+
metaLines.push(` parse_source: ${childMeta.parse_source}`);
|
|
558
|
+
}
|
|
559
|
+
if (childMeta.exit_code !== undefined) {
|
|
560
|
+
metaLines.push(` exit_code: ${String(childMeta.exit_code)}`);
|
|
561
|
+
}
|
|
562
|
+
if (childMeta.codex_panic_class != null) {
|
|
563
|
+
metaLines.push(` codex_panic_class: ${childMeta.codex_panic_class}`);
|
|
564
|
+
}
|
|
565
|
+
}
|
|
566
|
+
const metaBlock = metaLines.length > 0
|
|
567
|
+
? `\n\nFailure details:\n${metaLines.join("\n")}`
|
|
568
|
+
: "";
|
|
569
|
+
// Workflow source annotation
|
|
570
|
+
const sourceLabel = workflowSource != null
|
|
571
|
+
? ` (source: ${workflowSource.source}${workflowSource.sourcePath != null ? ` [${basename(workflowSource.sourcePath)}]` : ""})`
|
|
572
|
+
: "";
|
|
573
|
+
let nextAction = "";
|
|
574
|
+
if (store != null) {
|
|
575
|
+
const isResumable = state?.resumable === true;
|
|
576
|
+
nextAction = isResumable
|
|
577
|
+
? `Use resume_workflow with run_id "${runId}" to retry from the last checkpoint.`
|
|
578
|
+
: `Start a new run with run_workflow (this run is not resumable — set CLINKX_WORKFLOWS_CHILD_CONFIG_PATH for resumable runs).`;
|
|
579
|
+
}
|
|
580
|
+
return (`Workflow failed [run_id: ${runId}]${sourceLabel}${location}` +
|
|
581
|
+
(retryCount > 0 ? ` (after ${String(retryCount)} retries)` : "") +
|
|
582
|
+
`: ${message}${metaBlock}` +
|
|
583
|
+
(nextAction !== "" ? `\n\n${nextAction}` : ""));
|
|
584
|
+
}
|
|
585
|
+
function getCallEntryByLocation(stages, location) {
|
|
586
|
+
const dotIndex = location.indexOf(".");
|
|
587
|
+
if (dotIndex === -1) {
|
|
588
|
+
return undefined;
|
|
589
|
+
}
|
|
590
|
+
const stageId = location.slice(0, dotIndex);
|
|
591
|
+
const callId = location.slice(dotIndex + 1);
|
|
592
|
+
const stage = stages.find((entry) => entry.stageId === stageId);
|
|
593
|
+
return stage?.calls.find((call) => call.callId === callId);
|
|
594
|
+
}
|
|
595
|
+
// ---------------------------------------------------------------------------
|
|
596
|
+
// Tool execution helpers
|
|
597
|
+
// ---------------------------------------------------------------------------
|
|
598
|
+
async function executeRunWorkflow(server, options, args, signal, progressToken) {
|
|
599
|
+
const workflow = args["workflow"];
|
|
600
|
+
const runId = args["run_id"];
|
|
601
|
+
const variables = args["variables"];
|
|
602
|
+
const debug = args["debug"] === true;
|
|
603
|
+
const receipt = args["receipt"] === true;
|
|
604
|
+
if (typeof workflow !== "string" || workflow === "") {
|
|
605
|
+
throw new McpError(ErrorCode.InvalidParams, 'Missing or invalid "workflow" parameter');
|
|
606
|
+
}
|
|
607
|
+
if (typeof runId !== "string" || runId === "") {
|
|
608
|
+
throw new McpError(ErrorCode.InvalidParams, 'Missing or invalid "run_id" parameter');
|
|
609
|
+
}
|
|
610
|
+
try {
|
|
611
|
+
validateRunId(runId);
|
|
612
|
+
}
|
|
613
|
+
catch {
|
|
614
|
+
throw new McpError(ErrorCode.InvalidParams, `Invalid run_id "${runId}". Must match [A-Za-z0-9_-]{1,64}.`);
|
|
615
|
+
}
|
|
616
|
+
const validatorRegistry = createValidatorRegistry();
|
|
617
|
+
const loaded = loadWorkflow(workflow, {
|
|
618
|
+
templatesRoot: options.templatesRoot ?? getTemplatesPath(),
|
|
619
|
+
validatorRegistry,
|
|
620
|
+
});
|
|
621
|
+
const resolvedVariables = validateVariables(variables, loaded.definition.variables);
|
|
622
|
+
acquireMutex(runId);
|
|
623
|
+
let store = null;
|
|
624
|
+
let backend = null;
|
|
625
|
+
let reporter;
|
|
626
|
+
try {
|
|
627
|
+
// Create state store (resumable if child config path is set)
|
|
628
|
+
store = await WorkflowStateStore.create({
|
|
629
|
+
runId,
|
|
630
|
+
definition: loaded.normalized,
|
|
631
|
+
variables: resolvedVariables,
|
|
632
|
+
debugKeepArtifacts: debug,
|
|
633
|
+
});
|
|
634
|
+
// Create backend via factory
|
|
635
|
+
backend = options.createBackend(runId);
|
|
636
|
+
await backend.connect();
|
|
637
|
+
// Wire up shutdown coordinator
|
|
638
|
+
const controller = new AbortController();
|
|
639
|
+
if (signal.aborted) {
|
|
640
|
+
controller.abort(new WorkflowCancellationError());
|
|
641
|
+
}
|
|
642
|
+
else {
|
|
643
|
+
signal.addEventListener("abort", () => {
|
|
644
|
+
controller.abort(new WorkflowCancellationError());
|
|
645
|
+
}, { once: true });
|
|
646
|
+
}
|
|
647
|
+
shutdownCoordinator.register(backend, controller, async () => {
|
|
648
|
+
if (store != null) {
|
|
649
|
+
await store.markCancelled();
|
|
650
|
+
}
|
|
651
|
+
});
|
|
652
|
+
// Build engine hooks — capture store in a const for closure narrowing
|
|
653
|
+
const storeRef = store;
|
|
654
|
+
reporter = progressToken != null
|
|
655
|
+
? new WorkflowProgressReporter(server, progressToken, runId, loaded.normalized)
|
|
656
|
+
: undefined;
|
|
657
|
+
const rpt = reporter; // local const for closure narrowing
|
|
658
|
+
const hooks = {
|
|
659
|
+
...storeRef.hooks,
|
|
660
|
+
...(rpt != null
|
|
661
|
+
? {
|
|
662
|
+
onProgress: async (event) => {
|
|
663
|
+
await storeRef.hooks.onProgress?.(event);
|
|
664
|
+
await rpt.onProgress(event);
|
|
665
|
+
},
|
|
666
|
+
onRetryScheduled: async (event) => {
|
|
667
|
+
await storeRef.hooks.onRetryScheduled?.(event);
|
|
668
|
+
await rpt.onRetryScheduled(event);
|
|
669
|
+
},
|
|
670
|
+
}
|
|
671
|
+
: {}),
|
|
672
|
+
};
|
|
673
|
+
const engine = new WorkflowEngine({
|
|
674
|
+
definition: loaded.normalized,
|
|
675
|
+
session: backend,
|
|
676
|
+
variables: resolvedVariables,
|
|
677
|
+
validatorRegistry,
|
|
678
|
+
runId,
|
|
679
|
+
runDir: storeRef.runDir,
|
|
680
|
+
signal: controller.signal,
|
|
681
|
+
hooks,
|
|
682
|
+
});
|
|
683
|
+
const result = await engine.execute();
|
|
684
|
+
shutdownCoordinator.unregister();
|
|
685
|
+
// Build summary (fail-open)
|
|
686
|
+
let summary;
|
|
687
|
+
if (receipt) {
|
|
688
|
+
try {
|
|
689
|
+
const cardExtractor = createCardExtractor(loaded.normalized);
|
|
690
|
+
summary = buildRunSummaryOrUndefined(result, store, cardExtractor);
|
|
691
|
+
}
|
|
692
|
+
catch (err) {
|
|
693
|
+
logger.warn({ err }, "run summary construction failed, omitting receipt");
|
|
694
|
+
}
|
|
695
|
+
}
|
|
696
|
+
const content = finalizeWorkflowPresentation({
|
|
697
|
+
outputText: result.outputText,
|
|
698
|
+
receipt,
|
|
699
|
+
debug,
|
|
700
|
+
summary,
|
|
701
|
+
debugBlock: debug ? buildDebugBlock(runId, result, store) : undefined,
|
|
702
|
+
});
|
|
703
|
+
return { content };
|
|
704
|
+
}
|
|
705
|
+
catch (error) {
|
|
706
|
+
shutdownCoordinator.unregister();
|
|
707
|
+
if (error instanceof WorkflowCancellationError) {
|
|
708
|
+
if (store != null) {
|
|
709
|
+
await store.markCancelled();
|
|
710
|
+
}
|
|
711
|
+
throw error;
|
|
712
|
+
}
|
|
713
|
+
const errorText = formatWorkflowError(runId, error, store, {
|
|
714
|
+
source: loaded.source,
|
|
715
|
+
sourcePath: loaded.sourcePath,
|
|
716
|
+
});
|
|
717
|
+
// Build failure summary (fail-open)
|
|
718
|
+
let failureSummary;
|
|
719
|
+
if (receipt && store != null) {
|
|
720
|
+
try {
|
|
721
|
+
failureSummary = buildFailureRunSummary(store);
|
|
722
|
+
}
|
|
723
|
+
catch (err) {
|
|
724
|
+
logger.warn({ err }, "failure summary construction failed, omitting receipt");
|
|
725
|
+
}
|
|
726
|
+
}
|
|
727
|
+
const content = finalizeWorkflowPresentation({
|
|
728
|
+
outputText: errorText,
|
|
729
|
+
receipt,
|
|
730
|
+
debug,
|
|
731
|
+
summary: failureSummary,
|
|
732
|
+
debugBlock: debug && store != null ? buildErrorDebugBlock(runId, store) : undefined,
|
|
733
|
+
});
|
|
734
|
+
return { content, isError: true };
|
|
735
|
+
}
|
|
736
|
+
finally {
|
|
737
|
+
reporter?.stop();
|
|
738
|
+
try {
|
|
739
|
+
if (backend != null && !backend.isTerminated) {
|
|
740
|
+
await backend.terminate();
|
|
741
|
+
}
|
|
742
|
+
}
|
|
743
|
+
catch (cleanupErr) {
|
|
744
|
+
logger.warn({ err: cleanupErr }, "backend.terminate() failed during cleanup");
|
|
745
|
+
}
|
|
746
|
+
try {
|
|
747
|
+
if (store != null) {
|
|
748
|
+
await store.close();
|
|
749
|
+
}
|
|
750
|
+
}
|
|
751
|
+
catch (cleanupErr) {
|
|
752
|
+
logger.warn({ err: cleanupErr }, "store.close() failed during cleanup");
|
|
753
|
+
}
|
|
754
|
+
releaseMutex();
|
|
755
|
+
}
|
|
756
|
+
}
|
|
757
|
+
async function executeResumeWorkflow(server, options, args, signal, progressToken) {
|
|
758
|
+
const runId = args["run_id"];
|
|
759
|
+
const debug = args["debug"] === true;
|
|
760
|
+
const receipt = args["receipt"] === true;
|
|
761
|
+
if (typeof runId !== "string" || runId === "") {
|
|
762
|
+
throw new McpError(ErrorCode.InvalidParams, 'Missing or invalid "run_id" parameter');
|
|
763
|
+
}
|
|
764
|
+
try {
|
|
765
|
+
validateRunId(runId);
|
|
766
|
+
}
|
|
767
|
+
catch {
|
|
768
|
+
throw new McpError(ErrorCode.InvalidParams, `Invalid run_id "${runId}". Must match [A-Za-z0-9_-]{1,64}.`);
|
|
769
|
+
}
|
|
770
|
+
acquireMutex(runId);
|
|
771
|
+
let store = null;
|
|
772
|
+
let backend = null;
|
|
773
|
+
let reporter;
|
|
774
|
+
try {
|
|
775
|
+
const resumeOptions = {
|
|
776
|
+
runId,
|
|
777
|
+
debugKeepArtifacts: debug,
|
|
778
|
+
};
|
|
779
|
+
store = await WorkflowStateStore.resume(resumeOptions);
|
|
780
|
+
const validatorRegistry = createValidatorRegistry();
|
|
781
|
+
// Create backend via factory
|
|
782
|
+
backend = options.createBackend(runId);
|
|
783
|
+
await backend.connect();
|
|
784
|
+
// Wire up shutdown coordinator
|
|
785
|
+
const controller = new AbortController();
|
|
786
|
+
if (signal.aborted) {
|
|
787
|
+
controller.abort(new WorkflowCancellationError());
|
|
788
|
+
}
|
|
789
|
+
else {
|
|
790
|
+
signal.addEventListener("abort", () => {
|
|
791
|
+
controller.abort(new WorkflowCancellationError());
|
|
792
|
+
}, { once: true });
|
|
793
|
+
}
|
|
794
|
+
shutdownCoordinator.register(backend, controller, async () => {
|
|
795
|
+
await store.markCancelled();
|
|
796
|
+
});
|
|
797
|
+
// Build engine hooks with progress reporter
|
|
798
|
+
reporter = progressToken != null
|
|
799
|
+
? new WorkflowProgressReporter(server, progressToken, runId, store.definition)
|
|
800
|
+
: undefined;
|
|
801
|
+
if (reporter != null) {
|
|
802
|
+
// Seed progress from already-completed calls so resumed runs don't start at 0%
|
|
803
|
+
const completedCount = store.resume.completedCalls?.size ?? 0;
|
|
804
|
+
reporter.seedCompletedCalls(completedCount);
|
|
805
|
+
}
|
|
806
|
+
const rpt = reporter; // local const for closure narrowing
|
|
807
|
+
const hooks = {
|
|
808
|
+
...store.hooks,
|
|
809
|
+
...(rpt != null
|
|
810
|
+
? {
|
|
811
|
+
onProgress: async (event) => {
|
|
812
|
+
await store.hooks.onProgress?.(event);
|
|
813
|
+
await rpt.onProgress(event);
|
|
814
|
+
},
|
|
815
|
+
onRetryScheduled: async (event) => {
|
|
816
|
+
await store.hooks.onRetryScheduled?.(event);
|
|
817
|
+
await rpt.onRetryScheduled(event);
|
|
818
|
+
},
|
|
819
|
+
}
|
|
820
|
+
: {}),
|
|
821
|
+
};
|
|
822
|
+
const engine = new WorkflowEngine({
|
|
823
|
+
definition: store.definition,
|
|
824
|
+
session: backend,
|
|
825
|
+
variables: store.variables,
|
|
826
|
+
validatorRegistry,
|
|
827
|
+
runId,
|
|
828
|
+
runDir: store.runDir,
|
|
829
|
+
signal: controller.signal,
|
|
830
|
+
hooks: rpt != null ? hooks : store.hooks,
|
|
831
|
+
resume: store.resume,
|
|
832
|
+
});
|
|
833
|
+
const result = await engine.execute();
|
|
834
|
+
shutdownCoordinator.unregister();
|
|
835
|
+
// Build summary (fail-open)
|
|
836
|
+
let summary;
|
|
837
|
+
if (receipt) {
|
|
838
|
+
try {
|
|
839
|
+
const cardExtractor = createCardExtractor(store.definition);
|
|
840
|
+
summary = buildRunSummaryOrUndefined(result, store, cardExtractor);
|
|
841
|
+
}
|
|
842
|
+
catch (err) {
|
|
843
|
+
logger.warn({ err }, "run summary construction failed, omitting receipt");
|
|
844
|
+
}
|
|
845
|
+
}
|
|
846
|
+
const content = finalizeWorkflowPresentation({
|
|
847
|
+
outputText: result.outputText,
|
|
848
|
+
receipt,
|
|
849
|
+
debug,
|
|
850
|
+
summary,
|
|
851
|
+
debugBlock: debug ? buildDebugBlock(runId, result, store) : undefined,
|
|
852
|
+
});
|
|
853
|
+
return { content };
|
|
854
|
+
}
|
|
855
|
+
catch (error) {
|
|
856
|
+
shutdownCoordinator.unregister();
|
|
857
|
+
if (error instanceof WorkflowCancellationError) {
|
|
858
|
+
if (store != null) {
|
|
859
|
+
await store.markCancelled();
|
|
860
|
+
}
|
|
861
|
+
throw error;
|
|
862
|
+
}
|
|
863
|
+
const errorText = formatWorkflowError(runId, error, store);
|
|
864
|
+
// Build failure summary (fail-open)
|
|
865
|
+
let failureSummary;
|
|
866
|
+
if (receipt && store != null) {
|
|
867
|
+
try {
|
|
868
|
+
failureSummary = buildFailureRunSummary(store);
|
|
869
|
+
}
|
|
870
|
+
catch (err) {
|
|
871
|
+
logger.warn({ err }, "failure summary construction failed, omitting receipt");
|
|
872
|
+
}
|
|
873
|
+
}
|
|
874
|
+
const content = finalizeWorkflowPresentation({
|
|
875
|
+
outputText: errorText,
|
|
876
|
+
receipt,
|
|
877
|
+
debug,
|
|
878
|
+
summary: failureSummary,
|
|
879
|
+
debugBlock: debug && store != null ? buildErrorDebugBlock(runId, store) : undefined,
|
|
880
|
+
});
|
|
881
|
+
return { content, isError: true };
|
|
882
|
+
}
|
|
883
|
+
finally {
|
|
884
|
+
reporter?.stop();
|
|
885
|
+
try {
|
|
886
|
+
if (backend != null && !backend.isTerminated) {
|
|
887
|
+
await backend.terminate();
|
|
888
|
+
}
|
|
889
|
+
}
|
|
890
|
+
catch (cleanupErr) {
|
|
891
|
+
logger.warn({ err: cleanupErr }, "backend.terminate() failed during cleanup");
|
|
892
|
+
}
|
|
893
|
+
try {
|
|
894
|
+
if (store != null) {
|
|
895
|
+
await store.close();
|
|
896
|
+
}
|
|
897
|
+
}
|
|
898
|
+
catch (cleanupErr) {
|
|
899
|
+
logger.warn({ err: cleanupErr }, "store.close() failed during cleanup");
|
|
900
|
+
}
|
|
901
|
+
releaseMutex();
|
|
902
|
+
}
|
|
903
|
+
}
|
|
904
|
+
function executeListWorkflows() {
|
|
905
|
+
const workflows = listWorkflowCatalog();
|
|
906
|
+
const entries = [];
|
|
907
|
+
for (const [name, entry] of workflows) {
|
|
908
|
+
const def = entry.definition;
|
|
909
|
+
entries.push({
|
|
910
|
+
name,
|
|
911
|
+
description: def.description,
|
|
912
|
+
version: def.version,
|
|
913
|
+
source: entry.source,
|
|
914
|
+
shadowed_yaml: entry.shadowedYaml,
|
|
915
|
+
...(entry.path != null ? { path: basename(entry.path) } : {}),
|
|
916
|
+
variables: Object.fromEntries(Object.entries(def.variables).map(([varName, varDef]) => [
|
|
917
|
+
varName,
|
|
918
|
+
{
|
|
919
|
+
type: varDef.type,
|
|
920
|
+
required: varDef.required,
|
|
921
|
+
...(varDef.default !== undefined ? { default: varDef.default } : {}),
|
|
922
|
+
...(varDef.description != null ? { description: varDef.description } : {}),
|
|
923
|
+
},
|
|
924
|
+
])),
|
|
925
|
+
});
|
|
926
|
+
}
|
|
927
|
+
return {
|
|
928
|
+
content: [
|
|
929
|
+
{
|
|
930
|
+
type: "text",
|
|
931
|
+
text: JSON.stringify({ workflows: entries }, null, 2),
|
|
932
|
+
},
|
|
933
|
+
],
|
|
934
|
+
};
|
|
935
|
+
}
|
|
936
|
+
// ---------------------------------------------------------------------------
|
|
937
|
+
// Tool input schemas (JSON Schema)
|
|
938
|
+
// ---------------------------------------------------------------------------
|
|
939
|
+
const MAX_INLINE_WORKFLOW_DETAILS = 8;
|
|
940
|
+
const RUN_WORKFLOW_SCHEMA = {
|
|
941
|
+
type: "object",
|
|
942
|
+
properties: {
|
|
943
|
+
workflow: {
|
|
944
|
+
type: "string",
|
|
945
|
+
description: "Name of the workflow to run",
|
|
946
|
+
},
|
|
947
|
+
run_id: {
|
|
948
|
+
type: "string",
|
|
949
|
+
description: "Unique run identifier. Must match [A-Za-z0-9_-]{1,64}",
|
|
950
|
+
pattern: "^[A-Za-z0-9_-]{1,64}$",
|
|
951
|
+
},
|
|
952
|
+
variables: {
|
|
953
|
+
type: "object",
|
|
954
|
+
description: "Workflow input variables",
|
|
955
|
+
additionalProperties: true,
|
|
956
|
+
},
|
|
957
|
+
debug: {
|
|
958
|
+
type: "boolean",
|
|
959
|
+
description: "When true, append execution metadata as a second content block",
|
|
960
|
+
default: false,
|
|
961
|
+
},
|
|
962
|
+
receipt: {
|
|
963
|
+
type: "boolean",
|
|
964
|
+
description: "When true, append a structured receipt summarizing the workflow run as a content block",
|
|
965
|
+
default: false,
|
|
966
|
+
},
|
|
967
|
+
},
|
|
968
|
+
required: ["workflow", "run_id"],
|
|
969
|
+
additionalProperties: false,
|
|
970
|
+
};
|
|
971
|
+
const RESUME_WORKFLOW_SCHEMA = {
|
|
972
|
+
type: "object",
|
|
973
|
+
properties: {
|
|
974
|
+
run_id: {
|
|
975
|
+
type: "string",
|
|
976
|
+
description: "Run ID of a previously failed/cancelled workflow to resume",
|
|
977
|
+
pattern: "^[A-Za-z0-9_-]{1,64}$",
|
|
978
|
+
},
|
|
979
|
+
debug: {
|
|
980
|
+
type: "boolean",
|
|
981
|
+
description: "When true, append execution metadata as a second content block",
|
|
982
|
+
default: false,
|
|
983
|
+
},
|
|
984
|
+
receipt: {
|
|
985
|
+
type: "boolean",
|
|
986
|
+
description: "When true, append a structured receipt summarizing the workflow run as a content block",
|
|
987
|
+
default: false,
|
|
988
|
+
},
|
|
989
|
+
},
|
|
990
|
+
required: ["run_id"],
|
|
991
|
+
additionalProperties: false,
|
|
992
|
+
};
|
|
993
|
+
const LIST_WORKFLOWS_SCHEMA = {
|
|
994
|
+
type: "object",
|
|
995
|
+
properties: {},
|
|
996
|
+
additionalProperties: false,
|
|
997
|
+
};
|
|
998
|
+
/**
|
|
999
|
+
* Return the three workflow tool definitions for tools/list.
|
|
1000
|
+
*
|
|
1001
|
+
* Dynamically discovers available workflows to include in the
|
|
1002
|
+
* run_workflow description. Safe to call repeatedly — no side effects.
|
|
1003
|
+
*/
|
|
1004
|
+
export function getWorkflowToolDefinitions() {
|
|
1005
|
+
let catalogEntries;
|
|
1006
|
+
try {
|
|
1007
|
+
catalogEntries = listWorkflowCatalog();
|
|
1008
|
+
}
|
|
1009
|
+
catch {
|
|
1010
|
+
// Non-fatal — list_workflows will report the error at call time
|
|
1011
|
+
}
|
|
1012
|
+
// C1: Clone schema and inject dynamic enum for workflow names
|
|
1013
|
+
const runSchema = structuredClone(RUN_WORKFLOW_SCHEMA);
|
|
1014
|
+
const names = catalogEntries != null && catalogEntries.size > 0
|
|
1015
|
+
? [...catalogEntries.keys()].sort()
|
|
1016
|
+
: [];
|
|
1017
|
+
if (names.length > 0) {
|
|
1018
|
+
const props = runSchema["properties"];
|
|
1019
|
+
props["workflow"] = { ...props["workflow"], enum: names };
|
|
1020
|
+
}
|
|
1021
|
+
// C2 + C3: Build rich description with per-workflow variables
|
|
1022
|
+
let workflowSuffix = "";
|
|
1023
|
+
if (names.length > MAX_INLINE_WORKFLOW_DETAILS) {
|
|
1024
|
+
// C3: Cap at names-only for large catalogs
|
|
1025
|
+
workflowSuffix = `\nAvailable workflows: ${names.join(", ")}. Use list_workflows for variable schemas.`;
|
|
1026
|
+
}
|
|
1027
|
+
else if (names.length > 0) {
|
|
1028
|
+
const lines = [];
|
|
1029
|
+
for (const name of names) {
|
|
1030
|
+
const entry = catalogEntries.get(name);
|
|
1031
|
+
const vars = entry.definition.variables;
|
|
1032
|
+
const required = [];
|
|
1033
|
+
const optional = [];
|
|
1034
|
+
for (const [varName, varDef] of Object.entries(vars)) {
|
|
1035
|
+
if (varDef.required && varDef.default === undefined) {
|
|
1036
|
+
required.push(varName);
|
|
1037
|
+
}
|
|
1038
|
+
else {
|
|
1039
|
+
optional.push(varName);
|
|
1040
|
+
}
|
|
1041
|
+
}
|
|
1042
|
+
const parts = [];
|
|
1043
|
+
if (required.length > 0)
|
|
1044
|
+
parts.push(`required: ${required.join(", ")}`);
|
|
1045
|
+
if (optional.length > 0)
|
|
1046
|
+
parts.push(`optional: ${optional.join(", ")}`);
|
|
1047
|
+
lines.push(`- ${name}${parts.length > 0 ? ` (${parts.join("; ")})` : ""}`);
|
|
1048
|
+
}
|
|
1049
|
+
workflowSuffix = `\nAvailable workflows:\n${lines.join("\n")}`;
|
|
1050
|
+
}
|
|
1051
|
+
return [
|
|
1052
|
+
{
|
|
1053
|
+
name: "run_workflow",
|
|
1054
|
+
description: `Start a new multi-stage workflow execution.${workflowSuffix}`,
|
|
1055
|
+
inputSchema: runSchema,
|
|
1056
|
+
},
|
|
1057
|
+
{
|
|
1058
|
+
name: "resume_workflow",
|
|
1059
|
+
description: "Resume a previously failed or cancelled workflow from its last checkpoint. " +
|
|
1060
|
+
"Requires the original run_id and matching child config.",
|
|
1061
|
+
inputSchema: RESUME_WORKFLOW_SCHEMA,
|
|
1062
|
+
},
|
|
1063
|
+
{
|
|
1064
|
+
name: "list_workflows",
|
|
1065
|
+
description: "List available workflow definitions with their name, description, version, and variable schema.",
|
|
1066
|
+
inputSchema: LIST_WORKFLOWS_SCHEMA,
|
|
1067
|
+
annotations: { readOnlyHint: true },
|
|
1068
|
+
},
|
|
1069
|
+
];
|
|
1070
|
+
}
|
|
1071
|
+
/** Names of the workflow tools (for dispatch matching). */
|
|
1072
|
+
export const WORKFLOW_TOOL_NAMES = new Set([
|
|
1073
|
+
"run_workflow",
|
|
1074
|
+
"resume_workflow",
|
|
1075
|
+
"list_workflows",
|
|
1076
|
+
]);
|
|
1077
|
+
/**
|
|
1078
|
+
* Dispatch a workflow tool call.
|
|
1079
|
+
*
|
|
1080
|
+
* Returns the tool result, or null if the tool name is not recognized
|
|
1081
|
+
* as a workflow tool (caller should handle it as an unknown tool).
|
|
1082
|
+
*/
|
|
1083
|
+
export async function handleWorkflowToolCall(server, options, name, args, signal, progressToken) {
|
|
1084
|
+
switch (name) {
|
|
1085
|
+
case "run_workflow":
|
|
1086
|
+
return await executeRunWorkflow(server, options, args, signal, progressToken);
|
|
1087
|
+
case "resume_workflow":
|
|
1088
|
+
return await executeResumeWorkflow(server, options, args, signal, progressToken);
|
|
1089
|
+
case "list_workflows":
|
|
1090
|
+
return executeListWorkflows();
|
|
1091
|
+
default:
|
|
1092
|
+
return null;
|
|
1093
|
+
}
|
|
1094
|
+
}
|
|
1095
|
+
// ---------------------------------------------------------------------------
|
|
1096
|
+
// Public registration function
|
|
1097
|
+
// ---------------------------------------------------------------------------
|
|
1098
|
+
/**
|
|
1099
|
+
* Register workflow tools on an MCP Server instance.
|
|
1100
|
+
*
|
|
1101
|
+
* This is the reusable core extracted from createWorkflowServer().
|
|
1102
|
+
* It registers three tools: run_workflow, resume_workflow, list_workflows.
|
|
1103
|
+
*
|
|
1104
|
+
* The `options.createBackend` factory determines how clink calls are
|
|
1105
|
+
* dispatched — callers provide either a child-process or in-process backend.
|
|
1106
|
+
*
|
|
1107
|
+
* NOTE: This calls setRequestHandler which REPLACES any existing handlers.
|
|
1108
|
+
* For combined servers (root + workflows), use getWorkflowToolDefinitions()
|
|
1109
|
+
* and handleWorkflowToolCall() instead and build a unified handler.
|
|
1110
|
+
*/
|
|
1111
|
+
export function registerWorkflowTools(server, options) {
|
|
1112
|
+
// tools/list
|
|
1113
|
+
server.setRequestHandler(ListToolsRequestSchema, async () => {
|
|
1114
|
+
return { tools: getWorkflowToolDefinitions() };
|
|
1115
|
+
});
|
|
1116
|
+
// tools/call
|
|
1117
|
+
server.setRequestHandler(CallToolRequestSchema, async (request, extra) => {
|
|
1118
|
+
const { name, arguments: args } = request.params;
|
|
1119
|
+
const progressToken = request.params._meta?.progressToken;
|
|
1120
|
+
const result = await handleWorkflowToolCall(server, options, name, args ?? {}, extra.signal, progressToken);
|
|
1121
|
+
if (result != null) {
|
|
1122
|
+
return result;
|
|
1123
|
+
}
|
|
1124
|
+
logger.warn({ tool: name }, "unknown tool requested");
|
|
1125
|
+
return {
|
|
1126
|
+
content: [{ type: "text", text: `Unknown tool: ${String(name)}` }],
|
|
1127
|
+
isError: true,
|
|
1128
|
+
};
|
|
1129
|
+
});
|
|
1130
|
+
}
|
|
1131
|
+
//# sourceMappingURL=workflow-tools.js.map
|