@exaudeus/workrail 3.24.4 → 3.26.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/commands/index.d.ts +6 -0
- package/dist/cli/commands/index.js +14 -1
- package/dist/cli/commands/version.d.ts +6 -0
- package/dist/cli/commands/version.js +14 -0
- package/dist/cli/commands/worktrain-await.d.ts +35 -0
- package/dist/cli/commands/worktrain-await.js +207 -0
- package/dist/cli/commands/worktrain-inbox.d.ts +23 -0
- package/dist/cli/commands/worktrain-inbox.js +82 -0
- package/dist/cli/commands/worktrain-init.d.ts +23 -0
- package/dist/cli/commands/worktrain-init.js +338 -0
- package/dist/cli/commands/worktrain-spawn.d.ts +28 -0
- package/dist/cli/commands/worktrain-spawn.js +106 -0
- package/dist/cli/commands/worktrain-tell.d.ts +25 -0
- package/dist/cli/commands/worktrain-tell.js +32 -0
- package/dist/cli-worktrain.d.ts +2 -0
- package/dist/cli-worktrain.js +169 -0
- package/dist/cli.js +100 -0
- package/dist/config/config-file.d.ts +2 -0
- package/dist/config/config-file.js +55 -0
- package/dist/console/assets/index-8dh0Psu-.css +1 -0
- package/dist/console/assets/{index-TMfptYpQ.js → index-HhtarvD5.js} +10 -10
- package/dist/console/index.html +2 -2
- package/dist/daemon/agent-loop.d.ts +90 -0
- package/dist/daemon/agent-loop.js +214 -0
- package/dist/daemon/pi-mono-loader.d.ts +0 -0
- package/dist/daemon/pi-mono-loader.js +1 -0
- package/dist/daemon/soul-template.d.ts +2 -0
- package/dist/daemon/soul-template.js +22 -0
- package/dist/daemon/workflow-runner.d.ts +63 -0
- package/dist/daemon/workflow-runner.js +689 -0
- package/dist/infrastructure/session/HttpServer.js +2 -2
- package/dist/manifest.json +226 -50
- package/dist/mcp/handlers/v2-execution/start.d.ts +2 -1
- package/dist/mcp/handlers/v2-execution/start.js +4 -3
- package/dist/mcp/output-schemas.d.ts +154 -154
- package/dist/mcp/server.js +1 -1
- package/dist/mcp/transports/bridge-entry.js +20 -2
- package/dist/mcp/transports/bridge-events.d.ts +34 -0
- package/dist/mcp/transports/bridge-events.js +24 -0
- package/dist/mcp/transports/fatal-exit.d.ts +5 -0
- package/dist/mcp/transports/fatal-exit.js +82 -0
- package/dist/mcp/transports/http-entry.js +3 -0
- package/dist/mcp/transports/stdio-entry.js +3 -7
- package/dist/mcp/v2/tools.d.ts +7 -7
- package/dist/trigger/delivery-action.d.ts +37 -0
- package/dist/trigger/delivery-action.js +204 -0
- package/dist/trigger/delivery-client.d.ts +11 -0
- package/dist/trigger/delivery-client.js +27 -0
- package/dist/trigger/index.d.ts +5 -0
- package/dist/trigger/index.js +8 -0
- package/dist/trigger/trigger-listener.d.ts +32 -0
- package/dist/trigger/trigger-listener.js +176 -0
- package/dist/trigger/trigger-router.d.ts +38 -0
- package/dist/trigger/trigger-router.js +343 -0
- package/dist/trigger/trigger-store.d.ts +39 -0
- package/dist/trigger/trigger-store.js +698 -0
- package/dist/trigger/types.d.ts +70 -0
- package/dist/trigger/types.js +10 -0
- package/dist/v2/durable-core/schemas/execution-snapshot/blocked-snapshot.d.ts +22 -22
- package/dist/v2/durable-core/schemas/execution-snapshot/execution-snapshot.v1.d.ts +114 -114
- package/dist/v2/durable-core/schemas/export-bundle/index.d.ts +454 -454
- package/dist/v2/durable-core/schemas/session/blockers.d.ts +14 -14
- package/dist/v2/durable-core/schemas/session/events.d.ts +93 -93
- package/dist/v2/durable-core/schemas/session/gaps.d.ts +2 -2
- package/dist/v2/durable-core/schemas/session/validation-event.d.ts +4 -4
- package/dist/v2/infra/in-memory/daemon-registry/index.d.ts +14 -0
- package/dist/v2/infra/in-memory/daemon-registry/index.js +32 -0
- package/dist/v2/infra/in-memory/keyed-async-queue/index.d.ts +5 -0
- package/dist/v2/infra/in-memory/keyed-async-queue/index.js +32 -0
- package/dist/v2/usecases/console-routes.d.ts +3 -1
- package/dist/v2/usecases/console-routes.js +132 -1
- package/dist/v2/usecases/console-service.d.ts +2 -0
- package/dist/v2/usecases/console-service.js +18 -2
- package/dist/v2/usecases/console-types.d.ts +2 -0
- package/package.json +6 -2
- package/spec/workflow-tags.json +1 -0
- package/workflows/classify-task-workflow.json +68 -0
- package/workflows/coding-task-workflow-agentic.lean.v2.json +43 -13
- package/workflows/workflow-for-workflows.json +4 -2
- package/workflows/workflow-for-workflows.v2.json +4 -2
- package/dist/console/assets/index-BXRk3te_.css +0 -1
- package/workflows/rich-object-contribution.json +0 -258
package/dist/mcp/server.js
CHANGED
|
@@ -188,7 +188,7 @@ async function composeServer() {
|
|
|
188
188
|
snapshotStore: ctx.v2.snapshotStore,
|
|
189
189
|
pinnedWorkflowStore: ctx.v2.pinnedStore,
|
|
190
190
|
});
|
|
191
|
-
ctx.httpServer.mountRoutes((app) => mountConsoleRoutes(app, consoleService, ctx.workflowService, timingRingBuffer, toolCallsPerfFile ?? undefined, serverVersion));
|
|
191
|
+
ctx.httpServer.mountRoutes((app) => mountConsoleRoutes(app, consoleService, ctx.workflowService, timingRingBuffer, toolCallsPerfFile ?? undefined, serverVersion, ctx.v2 ? ctx : undefined));
|
|
192
192
|
console.error('[Console] v2 Console API routes mounted at /api/v2/');
|
|
193
193
|
}
|
|
194
194
|
ctx.httpServer?.finalize();
|
|
@@ -39,6 +39,8 @@ exports.spawnPrimary = spawnPrimary;
|
|
|
39
39
|
exports.reconnectWithBackoff = reconnectWithBackoff;
|
|
40
40
|
exports.handleReconnectOutcome = handleReconnectOutcome;
|
|
41
41
|
exports.startBridgeServer = startBridgeServer;
|
|
42
|
+
const fatal_exit_js_1 = require("./fatal-exit.js");
|
|
43
|
+
const bridge_events_js_1 = require("./bridge-events.js");
|
|
42
44
|
exports.DEFAULT_BRIDGE_CONFIG = {
|
|
43
45
|
reconnectBaseDelayMs: 250,
|
|
44
46
|
reconnectMaxAttempts: 8,
|
|
@@ -73,6 +75,7 @@ async function spawnPrimary(port, deps) {
|
|
|
73
75
|
await sleep(Math.random() * 300);
|
|
74
76
|
const alreadyUp = await detectHealthyPrimary(port, { retries: 1, fetch: deps.fetch });
|
|
75
77
|
if (alreadyUp != null) {
|
|
78
|
+
(0, bridge_events_js_1.logBridgeEvent)({ kind: 'spawn_skipped', reason: 'primary already up after jitter' });
|
|
76
79
|
console.error('[Bridge] Primary already available after jitter — skipping spawn');
|
|
77
80
|
return;
|
|
78
81
|
}
|
|
@@ -81,6 +84,7 @@ async function spawnPrimary(port, deps) {
|
|
|
81
84
|
console.error('[Bridge] Cannot spawn primary: process.argv[1] is undefined');
|
|
82
85
|
return;
|
|
83
86
|
}
|
|
87
|
+
(0, bridge_events_js_1.logBridgeEvent)({ kind: 'spawn_primary', port });
|
|
84
88
|
console.error('[Bridge] Spawning new WorkRail primary process');
|
|
85
89
|
try {
|
|
86
90
|
const child = deps.spawn(process.execPath, [scriptPath], {
|
|
@@ -119,6 +123,7 @@ async function reconnectWithBackoff(deps) {
|
|
|
119
123
|
async function handleReconnectOutcome(outcome, reconnectingState, deps) {
|
|
120
124
|
switch (outcome.kind) {
|
|
121
125
|
case 'reconnected':
|
|
126
|
+
(0, bridge_events_js_1.logBridgeEvent)({ kind: 'reconnected', attempt: reconnectingState.attempt });
|
|
122
127
|
console.error('[Bridge] Reconnected to primary');
|
|
123
128
|
return;
|
|
124
129
|
case 'aborted':
|
|
@@ -135,6 +140,7 @@ async function handleReconnectOutcome(outcome, reconnectingState, deps) {
|
|
|
135
140
|
deps.startReconnectLoop();
|
|
136
141
|
}
|
|
137
142
|
else {
|
|
143
|
+
(0, bridge_events_js_1.logBridgeEvent)({ kind: 'budget_exhausted', budgetUsed: reconnectingState.maxAttempts });
|
|
138
144
|
deps.setConnectionState({ kind: 'closed' });
|
|
139
145
|
deps.performShutdown('respawn budget exhausted — primary repeatedly unavailable');
|
|
140
146
|
}
|
|
@@ -142,7 +148,9 @@ async function handleReconnectOutcome(outcome, reconnectingState, deps) {
|
|
|
142
148
|
}
|
|
143
149
|
}
|
|
144
150
|
async function startBridgeServer(primaryPort, config = exports.DEFAULT_BRIDGE_CONFIG, deps = {}) {
|
|
145
|
-
|
|
151
|
+
(0, fatal_exit_js_1.registerFatalHandlers)('bridge');
|
|
152
|
+
(0, fatal_exit_js_1.logStartup)('bridge', { primaryPort });
|
|
153
|
+
(0, bridge_events_js_1.logBridgeEvent)({ kind: 'started', primaryPort, ppid: process.ppid });
|
|
146
154
|
const { StdioServerTransport } = await Promise.resolve().then(() => __importStar(require('@modelcontextprotocol/sdk/server/stdio.js')));
|
|
147
155
|
const { StreamableHTTPClientTransport } = await Promise.resolve().then(() => __importStar(require('@modelcontextprotocol/sdk/client/streamableHttp.js')));
|
|
148
156
|
const { spawn: nodeSpawn } = await Promise.resolve().then(() => __importStar(require('child_process')));
|
|
@@ -158,7 +166,8 @@ async function startBridgeServer(primaryPort, config = exports.DEFAULT_BRIDGE_CO
|
|
|
158
166
|
if (shutdownSignal.aborted)
|
|
159
167
|
return;
|
|
160
168
|
shutdownController.abort();
|
|
161
|
-
|
|
169
|
+
(0, bridge_events_js_1.logBridgeEvent)({ kind: 'shutdown', reason });
|
|
170
|
+
process.stderr.write(`[Bridge] Shutdown pid=${process.pid} reason="${reason}" ts=${new Date().toISOString()}\n`);
|
|
162
171
|
const state = connectionState;
|
|
163
172
|
void (state.kind === 'connected' ? state.transport.close() : Promise.resolve()).finally(() => process.exit(0));
|
|
164
173
|
};
|
|
@@ -176,6 +185,7 @@ async function startBridgeServer(primaryPort, config = exports.DEFAULT_BRIDGE_CO
|
|
|
176
185
|
const current = connectionState;
|
|
177
186
|
if (current.kind === 'connecting' || current.kind === 'reconnecting')
|
|
178
187
|
return;
|
|
188
|
+
(0, bridge_events_js_1.logBridgeEvent)({ kind: 'disconnected' });
|
|
179
189
|
console.error('[Bridge] Primary connection lost — reconnecting');
|
|
180
190
|
setConnectionState({
|
|
181
191
|
kind: 'reconnecting',
|
|
@@ -203,6 +213,7 @@ async function startBridgeServer(primaryPort, config = exports.DEFAULT_BRIDGE_CO
|
|
|
203
213
|
signal: shutdownSignal,
|
|
204
214
|
config,
|
|
205
215
|
detect: async (attempt) => {
|
|
216
|
+
(0, bridge_events_js_1.logBridgeEvent)({ kind: 'reconnect_attempt', attempt: attempt + 1, maxAttempts: config.reconnectMaxAttempts });
|
|
206
217
|
console.error(`[Bridge] Reconnect attempt ${attempt + 1}/${config.reconnectMaxAttempts}`);
|
|
207
218
|
const detected = await detectHealthyPrimary(primaryPort, { retries: 1, fetch: deps.fetch });
|
|
208
219
|
if (detected == null)
|
|
@@ -224,6 +235,12 @@ async function startBridgeServer(primaryPort, config = exports.DEFAULT_BRIDGE_CO
|
|
|
224
235
|
});
|
|
225
236
|
})
|
|
226
237
|
.catch((err) => {
|
|
238
|
+
const errObj = err instanceof Error ? err : new Error(String(err));
|
|
239
|
+
(0, bridge_events_js_1.logBridgeEvent)({
|
|
240
|
+
kind: 'reconnect_loop_error',
|
|
241
|
+
message: errObj.message,
|
|
242
|
+
stack: errObj.stack ?? null,
|
|
243
|
+
});
|
|
227
244
|
console.error('[Bridge] Unexpected error in reconnect loop:', err);
|
|
228
245
|
});
|
|
229
246
|
};
|
|
@@ -253,6 +270,7 @@ async function startBridgeServer(primaryPort, config = exports.DEFAULT_BRIDGE_CO
|
|
|
253
270
|
if (initialTransport == null) {
|
|
254
271
|
throw new Error(`[Bridge] Failed to connect to primary on port ${primaryPort}`);
|
|
255
272
|
}
|
|
273
|
+
(0, bridge_events_js_1.logBridgeEvent)({ kind: 'connected', primaryPort });
|
|
256
274
|
console.error('[Bridge] Connected to primary');
|
|
257
275
|
process.stdout.on('error', (err) => {
|
|
258
276
|
const code = err.code;
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
export type BridgeEvent = {
|
|
2
|
+
readonly kind: 'started';
|
|
3
|
+
readonly primaryPort: number;
|
|
4
|
+
readonly ppid: number;
|
|
5
|
+
} | {
|
|
6
|
+
readonly kind: 'connected';
|
|
7
|
+
readonly primaryPort: number;
|
|
8
|
+
} | {
|
|
9
|
+
readonly kind: 'disconnected';
|
|
10
|
+
} | {
|
|
11
|
+
readonly kind: 'reconnect_attempt';
|
|
12
|
+
readonly attempt: number;
|
|
13
|
+
readonly maxAttempts: number;
|
|
14
|
+
} | {
|
|
15
|
+
readonly kind: 'reconnected';
|
|
16
|
+
readonly attempt: number;
|
|
17
|
+
} | {
|
|
18
|
+
readonly kind: 'spawn_primary';
|
|
19
|
+
readonly port: number;
|
|
20
|
+
} | {
|
|
21
|
+
readonly kind: 'spawn_skipped';
|
|
22
|
+
readonly reason: string;
|
|
23
|
+
} | {
|
|
24
|
+
readonly kind: 'budget_exhausted';
|
|
25
|
+
readonly budgetUsed: number;
|
|
26
|
+
} | {
|
|
27
|
+
readonly kind: 'reconnect_loop_error';
|
|
28
|
+
readonly message: string;
|
|
29
|
+
readonly stack: string | null;
|
|
30
|
+
} | {
|
|
31
|
+
readonly kind: 'shutdown';
|
|
32
|
+
readonly reason: string;
|
|
33
|
+
};
|
|
34
|
+
export declare function logBridgeEvent(event: BridgeEvent): void;
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.logBridgeEvent = logBridgeEvent;
|
|
4
|
+
const fs_1 = require("fs");
|
|
5
|
+
const os_1 = require("os");
|
|
6
|
+
const path_1 = require("path");
|
|
7
|
+
const BRIDGE_LOG_PATH = (0, path_1.join)((0, os_1.homedir)(), '.workrail', 'bridge.log');
|
|
8
|
+
const BRIDGE_LOG_MAX_BYTES = 512 * 1024;
|
|
9
|
+
function logBridgeEvent(event) {
|
|
10
|
+
try {
|
|
11
|
+
(0, fs_1.mkdirSync)((0, path_1.join)((0, os_1.homedir)(), '.workrail'), { recursive: true });
|
|
12
|
+
try {
|
|
13
|
+
const { statSync } = require('fs');
|
|
14
|
+
if (statSync(BRIDGE_LOG_PATH).size > BRIDGE_LOG_MAX_BYTES) {
|
|
15
|
+
const { writeFileSync } = require('fs');
|
|
16
|
+
writeFileSync(BRIDGE_LOG_PATH, '');
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
catch { }
|
|
20
|
+
const entry = { ts: new Date().toISOString(), pid: process.pid, ...event };
|
|
21
|
+
(0, fs_1.appendFileSync)(BRIDGE_LOG_PATH, JSON.stringify(entry) + '\n');
|
|
22
|
+
}
|
|
23
|
+
catch { }
|
|
24
|
+
}
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
export type TransportKind = 'stdio' | 'http' | 'bridge';
|
|
2
|
+
export declare function formatFatal(reason: unknown): string;
|
|
3
|
+
export declare function fatalExit(label: string, reason: unknown): void;
|
|
4
|
+
export declare function registerFatalHandlers(transport: TransportKind): void;
|
|
5
|
+
export declare function logStartup(transport: TransportKind, extra?: Record<string, string | number>): void;
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.formatFatal = formatFatal;
|
|
4
|
+
exports.fatalExit = fatalExit;
|
|
5
|
+
exports.registerFatalHandlers = registerFatalHandlers;
|
|
6
|
+
exports.logStartup = logStartup;
|
|
7
|
+
const fs_1 = require("fs");
|
|
8
|
+
const os_1 = require("os");
|
|
9
|
+
const path_1 = require("path");
|
|
10
|
+
const fatalHandlerActive = { value: false };
|
|
11
|
+
let registeredTransport = null;
|
|
12
|
+
const startedAtMs = Date.now();
|
|
13
|
+
const CRASH_LOG_PATH = (0, path_1.join)((0, os_1.homedir)(), '.workrail', 'crash.log');
|
|
14
|
+
const CRASH_LOG_MAX_BYTES = 512 * 1024;
|
|
15
|
+
function writeCrashLog(label, reason) {
|
|
16
|
+
try {
|
|
17
|
+
(0, fs_1.mkdirSync)((0, path_1.join)((0, os_1.homedir)(), '.workrail'), { recursive: true });
|
|
18
|
+
try {
|
|
19
|
+
const { statSync } = require('fs');
|
|
20
|
+
const stat = statSync(CRASH_LOG_PATH);
|
|
21
|
+
if (stat.size > CRASH_LOG_MAX_BYTES) {
|
|
22
|
+
(0, fs_1.writeFileSync)(CRASH_LOG_PATH, '');
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
catch {
|
|
26
|
+
}
|
|
27
|
+
const entry = {
|
|
28
|
+
ts: new Date().toISOString(),
|
|
29
|
+
pid: process.pid,
|
|
30
|
+
ppid: process.ppid,
|
|
31
|
+
cwd: process.cwd(),
|
|
32
|
+
transport: registeredTransport ?? 'unknown',
|
|
33
|
+
uptimeMs: Date.now() - startedAtMs,
|
|
34
|
+
label,
|
|
35
|
+
message: reason instanceof Error ? reason.message : String(reason),
|
|
36
|
+
stack: reason instanceof Error ? (reason.stack ?? null) : null,
|
|
37
|
+
};
|
|
38
|
+
(0, fs_1.writeFileSync)(CRASH_LOG_PATH, JSON.stringify(entry) + '\n', { flag: 'a' });
|
|
39
|
+
}
|
|
40
|
+
catch {
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
function formatFatal(reason) {
|
|
44
|
+
if (reason instanceof Error) {
|
|
45
|
+
return reason.stack ?? `${reason.name}: ${reason.message}`;
|
|
46
|
+
}
|
|
47
|
+
return String(reason);
|
|
48
|
+
}
|
|
49
|
+
function fatalExit(label, reason) {
|
|
50
|
+
if (fatalHandlerActive.value)
|
|
51
|
+
return;
|
|
52
|
+
fatalHandlerActive.value = true;
|
|
53
|
+
writeCrashLog(label, reason);
|
|
54
|
+
try {
|
|
55
|
+
process.stderr.write(`[MCP] ${label}: ${formatFatal(reason)}\n`);
|
|
56
|
+
}
|
|
57
|
+
catch {
|
|
58
|
+
}
|
|
59
|
+
process.exit(1);
|
|
60
|
+
}
|
|
61
|
+
function registerFatalHandlers(transport) {
|
|
62
|
+
registeredTransport = transport;
|
|
63
|
+
process.on('uncaughtException', (err) => fatalExit('Uncaught exception', err));
|
|
64
|
+
process.on('unhandledRejection', (reason) => fatalExit('Unhandled promise rejection', reason));
|
|
65
|
+
}
|
|
66
|
+
function logStartup(transport, extra) {
|
|
67
|
+
const version = (() => {
|
|
68
|
+
try {
|
|
69
|
+
return require('../../package.json').version;
|
|
70
|
+
}
|
|
71
|
+
catch {
|
|
72
|
+
return 'unknown';
|
|
73
|
+
}
|
|
74
|
+
})();
|
|
75
|
+
const parts = [
|
|
76
|
+
`[Startup] transport=${transport}`,
|
|
77
|
+
`pid=${process.pid}`,
|
|
78
|
+
`version=${version}`,
|
|
79
|
+
...(extra ? Object.entries(extra).map(([k, v]) => `${k}=${v}`) : []),
|
|
80
|
+
];
|
|
81
|
+
process.stderr.write(parts.join(' ') + '\n');
|
|
82
|
+
}
|
|
@@ -40,10 +40,13 @@ exports.startHttpServer = startHttpServer;
|
|
|
40
40
|
const server_js_1 = require("../server.js");
|
|
41
41
|
const http_listener_js_1 = require("./http-listener.js");
|
|
42
42
|
const shutdown_hooks_js_1 = require("./shutdown-hooks.js");
|
|
43
|
+
const fatal_exit_js_1 = require("./fatal-exit.js");
|
|
43
44
|
const crypto = __importStar(require("crypto"));
|
|
44
45
|
const express_1 = __importDefault(require("express"));
|
|
45
46
|
const HTTP_PORT_SCAN_END = 3199;
|
|
46
47
|
async function startHttpServer(port) {
|
|
48
|
+
(0, fatal_exit_js_1.registerFatalHandlers)('http');
|
|
49
|
+
(0, fatal_exit_js_1.logStartup)('http', { port });
|
|
47
50
|
const { server, ctx } = await (0, server_js_1.composeServer)();
|
|
48
51
|
const scanEnd = Math.max(port, HTTP_PORT_SCAN_END);
|
|
49
52
|
const listener = await (0, http_listener_js_1.bindWithPortFallback)(port, scanEnd);
|
|
@@ -36,6 +36,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
36
36
|
exports.startStdioServer = startStdioServer;
|
|
37
37
|
const server_js_1 = require("../server.js");
|
|
38
38
|
const shutdown_hooks_js_1 = require("./shutdown-hooks.js");
|
|
39
|
+
const fatal_exit_js_1 = require("./fatal-exit.js");
|
|
39
40
|
const INITIAL_ROOTS_TIMEOUT_MS = 1000;
|
|
40
41
|
async function fetchInitialRootsWithTimeout(server) {
|
|
41
42
|
return Promise.race([
|
|
@@ -46,13 +47,8 @@ async function fetchInitialRootsWithTimeout(server) {
|
|
|
46
47
|
]);
|
|
47
48
|
}
|
|
48
49
|
async function startStdioServer() {
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
});
|
|
52
|
-
process.on('unhandledRejection', (reason) => {
|
|
53
|
-
console.error('[MCP] Unhandled promise rejection:', reason);
|
|
54
|
-
process.exit(1);
|
|
55
|
-
});
|
|
50
|
+
(0, fatal_exit_js_1.registerFatalHandlers)('stdio');
|
|
51
|
+
(0, fatal_exit_js_1.logStartup)('stdio');
|
|
56
52
|
const { server, ctx, rootsManager } = await (0, server_js_1.composeServer)();
|
|
57
53
|
const { StdioServerTransport } = await Promise.resolve().then(() => __importStar(require('@modelcontextprotocol/sdk/server/stdio.js')));
|
|
58
54
|
const { RootsListChangedNotificationSchema, } = await Promise.resolve().then(() => __importStar(require('@modelcontextprotocol/sdk/types.js')));
|
package/dist/mcp/v2/tools.d.ts
CHANGED
|
@@ -61,20 +61,20 @@ export declare const V2ContinueWorkflowInputShape: z.ZodObject<{
|
|
|
61
61
|
continueToken: string;
|
|
62
62
|
context?: Record<string, unknown> | undefined;
|
|
63
63
|
workspacePath?: string | undefined;
|
|
64
|
-
intent?: "advance" | "rehydrate" | undefined;
|
|
65
64
|
output?: {
|
|
66
65
|
notesMarkdown?: string | undefined;
|
|
67
66
|
artifacts?: unknown[] | undefined;
|
|
68
67
|
} | undefined;
|
|
68
|
+
intent?: "advance" | "rehydrate" | undefined;
|
|
69
69
|
}, {
|
|
70
70
|
continueToken: string;
|
|
71
71
|
context?: Record<string, unknown> | undefined;
|
|
72
72
|
workspacePath?: string | undefined;
|
|
73
|
-
intent?: "advance" | "rehydrate" | undefined;
|
|
74
73
|
output?: {
|
|
75
74
|
notesMarkdown?: string | undefined;
|
|
76
75
|
artifacts?: unknown[] | undefined;
|
|
77
76
|
} | undefined;
|
|
77
|
+
intent?: "advance" | "rehydrate" | undefined;
|
|
78
78
|
}>;
|
|
79
79
|
export declare const V2ContinueWorkflowInput: z.ZodEffects<z.ZodEffects<z.ZodObject<{
|
|
80
80
|
workspacePath: z.ZodOptional<z.ZodEffects<z.ZodString, string, string>>;
|
|
@@ -97,41 +97,41 @@ export declare const V2ContinueWorkflowInput: z.ZodEffects<z.ZodEffects<z.ZodObj
|
|
|
97
97
|
continueToken: string;
|
|
98
98
|
context?: Record<string, unknown> | undefined;
|
|
99
99
|
workspacePath?: string | undefined;
|
|
100
|
-
intent?: "advance" | "rehydrate" | undefined;
|
|
101
100
|
output?: {
|
|
102
101
|
notesMarkdown?: string | undefined;
|
|
103
102
|
artifacts?: unknown[] | undefined;
|
|
104
103
|
} | undefined;
|
|
104
|
+
intent?: "advance" | "rehydrate" | undefined;
|
|
105
105
|
contextVariables?: Record<string, unknown> | undefined;
|
|
106
106
|
}, {
|
|
107
107
|
continueToken: string;
|
|
108
108
|
context?: Record<string, unknown> | undefined;
|
|
109
109
|
workspacePath?: string | undefined;
|
|
110
|
-
intent?: "advance" | "rehydrate" | undefined;
|
|
111
110
|
output?: {
|
|
112
111
|
notesMarkdown?: string | undefined;
|
|
113
112
|
artifacts?: unknown[] | undefined;
|
|
114
113
|
} | undefined;
|
|
114
|
+
intent?: "advance" | "rehydrate" | undefined;
|
|
115
115
|
contextVariables?: Record<string, unknown> | undefined;
|
|
116
116
|
}>, {
|
|
117
117
|
continueToken: string;
|
|
118
118
|
context?: Record<string, unknown> | undefined;
|
|
119
119
|
workspacePath?: string | undefined;
|
|
120
|
-
intent?: "advance" | "rehydrate" | undefined;
|
|
121
120
|
output?: {
|
|
122
121
|
notesMarkdown?: string | undefined;
|
|
123
122
|
artifacts?: unknown[] | undefined;
|
|
124
123
|
} | undefined;
|
|
124
|
+
intent?: "advance" | "rehydrate" | undefined;
|
|
125
125
|
contextVariables?: Record<string, unknown> | undefined;
|
|
126
126
|
}, {
|
|
127
127
|
continueToken: string;
|
|
128
128
|
context?: Record<string, unknown> | undefined;
|
|
129
129
|
workspacePath?: string | undefined;
|
|
130
|
-
intent?: "advance" | "rehydrate" | undefined;
|
|
131
130
|
output?: {
|
|
132
131
|
notesMarkdown?: string | undefined;
|
|
133
132
|
artifacts?: unknown[] | undefined;
|
|
134
133
|
} | undefined;
|
|
134
|
+
intent?: "advance" | "rehydrate" | undefined;
|
|
135
135
|
contextVariables?: Record<string, unknown> | undefined;
|
|
136
136
|
}>, {
|
|
137
137
|
workspacePath?: string | undefined;
|
|
@@ -146,11 +146,11 @@ export declare const V2ContinueWorkflowInput: z.ZodEffects<z.ZodEffects<z.ZodObj
|
|
|
146
146
|
continueToken: string;
|
|
147
147
|
context?: Record<string, unknown> | undefined;
|
|
148
148
|
workspacePath?: string | undefined;
|
|
149
|
-
intent?: "advance" | "rehydrate" | undefined;
|
|
150
149
|
output?: {
|
|
151
150
|
notesMarkdown?: string | undefined;
|
|
152
151
|
artifacts?: unknown[] | undefined;
|
|
153
152
|
} | undefined;
|
|
153
|
+
intent?: "advance" | "rehydrate" | undefined;
|
|
154
154
|
contextVariables?: Record<string, unknown> | undefined;
|
|
155
155
|
}>;
|
|
156
156
|
export type V2ContinueWorkflowInput = z.infer<typeof V2ContinueWorkflowInput>;
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import type { Result } from '../runtime/result.js';
|
|
2
|
+
export interface HandoffArtifact {
|
|
3
|
+
readonly commitType: string;
|
|
4
|
+
readonly commitScope: string;
|
|
5
|
+
readonly commitSubject: string;
|
|
6
|
+
readonly prTitle: string;
|
|
7
|
+
readonly prBody: string;
|
|
8
|
+
readonly filesChanged: readonly string[];
|
|
9
|
+
readonly followUpTickets: readonly string[];
|
|
10
|
+
}
|
|
11
|
+
export interface DeliveryFlags {
|
|
12
|
+
readonly autoCommit?: boolean;
|
|
13
|
+
readonly autoOpenPR?: boolean;
|
|
14
|
+
}
|
|
15
|
+
export type DeliveryResult = {
|
|
16
|
+
readonly _tag: 'committed';
|
|
17
|
+
readonly sha: string;
|
|
18
|
+
} | {
|
|
19
|
+
readonly _tag: 'pr_opened';
|
|
20
|
+
readonly url: string;
|
|
21
|
+
} | {
|
|
22
|
+
readonly _tag: 'skipped';
|
|
23
|
+
readonly reason: string;
|
|
24
|
+
} | {
|
|
25
|
+
readonly _tag: 'error';
|
|
26
|
+
readonly phase: 'parse' | 'commit' | 'pr';
|
|
27
|
+
readonly details: string;
|
|
28
|
+
};
|
|
29
|
+
export type ExecFn = (file: string, args: string[], options: {
|
|
30
|
+
cwd: string;
|
|
31
|
+
timeout: number;
|
|
32
|
+
}) => Promise<{
|
|
33
|
+
stdout: string;
|
|
34
|
+
stderr: string;
|
|
35
|
+
}>;
|
|
36
|
+
export declare function parseHandoffArtifact(notes: string): Result<HandoffArtifact, string>;
|
|
37
|
+
export declare function runDelivery(artifact: HandoffArtifact, workspacePath: string, flags: DeliveryFlags, execFn: ExecFn): Promise<DeliveryResult>;
|
|
@@ -0,0 +1,204 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
exports.parseHandoffArtifact = parseHandoffArtifact;
|
|
37
|
+
exports.runDelivery = runDelivery;
|
|
38
|
+
const crypto = __importStar(require("node:crypto"));
|
|
39
|
+
const fs = __importStar(require("node:fs/promises"));
|
|
40
|
+
const os = __importStar(require("node:os"));
|
|
41
|
+
const path = __importStar(require("node:path"));
|
|
42
|
+
const result_js_1 = require("../runtime/result.js");
|
|
43
|
+
const DELIVERY_TIMEOUT_MS = 60 * 1000;
|
|
44
|
+
function parseHandoffArtifact(notes) {
|
|
45
|
+
if (!notes || notes.trim() === '') {
|
|
46
|
+
return (0, result_js_1.err)('notes is empty');
|
|
47
|
+
}
|
|
48
|
+
const jsonBlockRe = /```json\s*\n([\s\S]*?)\n```/g;
|
|
49
|
+
for (const blockMatch of notes.matchAll(jsonBlockRe)) {
|
|
50
|
+
const blockContent = blockMatch[1];
|
|
51
|
+
if (!blockContent)
|
|
52
|
+
continue;
|
|
53
|
+
try {
|
|
54
|
+
const parsed = JSON.parse(blockContent);
|
|
55
|
+
const artifact = assembleArtifact(parsed);
|
|
56
|
+
if (artifact.kind === 'ok')
|
|
57
|
+
return (0, result_js_1.ok)(artifact.value);
|
|
58
|
+
}
|
|
59
|
+
catch {
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
const fields = {};
|
|
63
|
+
const lines = notes.split('\n');
|
|
64
|
+
for (const line of lines) {
|
|
65
|
+
const match = line.match(/^[-*]?\s*`?(\w+)`?\s*:\s*(.+)$/);
|
|
66
|
+
if (match && match[1] && match[2]) {
|
|
67
|
+
const key = match[1].trim();
|
|
68
|
+
const value = match[2].trim().replace(/^`|`$/g, '');
|
|
69
|
+
fields[key] = value;
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
const filesChangedIdx = notes.indexOf('filesChanged');
|
|
73
|
+
if (filesChangedIdx !== -1) {
|
|
74
|
+
const afterFilesChanged = notes.slice(filesChangedIdx);
|
|
75
|
+
const fileMatches = afterFilesChanged.matchAll(/^\s*-\s+(.+)$/mg);
|
|
76
|
+
const fileList = [];
|
|
77
|
+
for (const fm of fileMatches) {
|
|
78
|
+
if (fm[1])
|
|
79
|
+
fileList.push(fm[1].trim());
|
|
80
|
+
}
|
|
81
|
+
if (fileList.length > 0) {
|
|
82
|
+
fields['filesChanged'] = JSON.stringify(fileList);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
if (Object.keys(fields).length === 0) {
|
|
86
|
+
return (0, result_js_1.err)('no parseable handoff fields found in notes (no JSON block and no key: value lines)');
|
|
87
|
+
}
|
|
88
|
+
let filesChanged = [];
|
|
89
|
+
if (fields['filesChanged']) {
|
|
90
|
+
try {
|
|
91
|
+
const parsed = JSON.parse(fields['filesChanged']);
|
|
92
|
+
if (Array.isArray(parsed)) {
|
|
93
|
+
filesChanged = parsed.filter((s) => typeof s === 'string');
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
catch {
|
|
97
|
+
filesChanged = fields['filesChanged'].split(',').map(s => s.trim()).filter(Boolean);
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
const assembled = assembleArtifact({
|
|
101
|
+
commitType: fields['commitType'],
|
|
102
|
+
commitScope: fields['commitScope'],
|
|
103
|
+
commitSubject: fields['commitSubject'],
|
|
104
|
+
prTitle: fields['prTitle'],
|
|
105
|
+
prBody: fields['prBody'],
|
|
106
|
+
filesChanged,
|
|
107
|
+
followUpTickets: [],
|
|
108
|
+
});
|
|
109
|
+
return assembled;
|
|
110
|
+
}
|
|
111
|
+
function assembleArtifact(raw) {
|
|
112
|
+
const requiredStrings = ['commitType', 'commitScope', 'commitSubject', 'prTitle', 'prBody'];
|
|
113
|
+
for (const field of requiredStrings) {
|
|
114
|
+
if (!raw[field] || typeof raw[field] !== 'string' || !raw[field].trim()) {
|
|
115
|
+
return (0, result_js_1.err)(`missing or empty required field: ${field}`);
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
const filesRaw = raw['filesChanged'];
|
|
119
|
+
if (!Array.isArray(filesRaw)) {
|
|
120
|
+
return (0, result_js_1.err)('filesChanged must be an array');
|
|
121
|
+
}
|
|
122
|
+
const filesChanged = filesRaw.filter((s) => typeof s === 'string');
|
|
123
|
+
if (filesChanged.length === 0) {
|
|
124
|
+
return (0, result_js_1.err)('filesChanged is empty -- cannot stage files safely');
|
|
125
|
+
}
|
|
126
|
+
const followUpTickets = Array.isArray(raw['followUpTickets'])
|
|
127
|
+
? raw['followUpTickets'].filter((s) => typeof s === 'string')
|
|
128
|
+
: [];
|
|
129
|
+
return (0, result_js_1.ok)({
|
|
130
|
+
commitType: raw['commitType'].trim(),
|
|
131
|
+
commitScope: raw['commitScope'].trim(),
|
|
132
|
+
commitSubject: raw['commitSubject'].trim(),
|
|
133
|
+
prTitle: raw['prTitle'].trim(),
|
|
134
|
+
prBody: raw['prBody'].trim(),
|
|
135
|
+
filesChanged,
|
|
136
|
+
followUpTickets,
|
|
137
|
+
});
|
|
138
|
+
}
|
|
139
|
+
async function runDelivery(artifact, workspacePath, flags, execFn) {
|
|
140
|
+
if (flags.autoCommit !== true) {
|
|
141
|
+
return { _tag: 'skipped', reason: 'autoCommit is not enabled for this trigger' };
|
|
142
|
+
}
|
|
143
|
+
if (artifact.filesChanged.length === 0) {
|
|
144
|
+
return {
|
|
145
|
+
_tag: 'skipped',
|
|
146
|
+
reason: 'filesChanged is empty -- cannot stage files safely (no git add -A fallback)',
|
|
147
|
+
};
|
|
148
|
+
}
|
|
149
|
+
const commitMessage = artifact.commitSubject.startsWith(`${artifact.commitType}(`)
|
|
150
|
+
? artifact.commitSubject
|
|
151
|
+
: `${artifact.commitType}(${artifact.commitScope}): ${artifact.commitSubject}`;
|
|
152
|
+
let commitStdout;
|
|
153
|
+
let commitStderr;
|
|
154
|
+
try {
|
|
155
|
+
await execFn('git', ['add', ...artifact.filesChanged], { cwd: workspacePath, timeout: DELIVERY_TIMEOUT_MS });
|
|
156
|
+
const commitResult = await execFn('git', ['commit', '-m', commitMessage], { cwd: workspacePath, timeout: DELIVERY_TIMEOUT_MS });
|
|
157
|
+
commitStdout = commitResult.stdout;
|
|
158
|
+
commitStderr = commitResult.stderr;
|
|
159
|
+
}
|
|
160
|
+
catch (e) {
|
|
161
|
+
const details = formatExecError(e);
|
|
162
|
+
return { _tag: 'error', phase: 'commit', details };
|
|
163
|
+
}
|
|
164
|
+
const shaMatch = (commitStdout + commitStderr).match(/\[[\w/]+ ([0-9a-f]+)\]/);
|
|
165
|
+
const sha = shaMatch?.[1] ?? 'unknown';
|
|
166
|
+
if (flags.autoOpenPR !== true) {
|
|
167
|
+
return { _tag: 'committed', sha };
|
|
168
|
+
}
|
|
169
|
+
const tmpDir = os.tmpdir();
|
|
170
|
+
const tmpFile = path.join(tmpDir, `workrail-pr-body-${crypto.randomUUID()}.md`);
|
|
171
|
+
let prStdout;
|
|
172
|
+
try {
|
|
173
|
+
await fs.writeFile(tmpFile, artifact.prBody, 'utf8');
|
|
174
|
+
try {
|
|
175
|
+
const prResult = await execFn('gh', ['pr', 'create', '--title', artifact.prTitle, '--body-file', tmpFile], { cwd: workspacePath, timeout: DELIVERY_TIMEOUT_MS });
|
|
176
|
+
prStdout = prResult.stdout;
|
|
177
|
+
}
|
|
178
|
+
catch (e) {
|
|
179
|
+
const details = `commit succeeded (sha: ${sha}) but PR creation failed: ${formatExecError(e)}`;
|
|
180
|
+
return { _tag: 'error', phase: 'pr', details };
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
catch (e) {
|
|
184
|
+
return { _tag: 'error', phase: 'pr', details: formatExecError(e) };
|
|
185
|
+
}
|
|
186
|
+
finally {
|
|
187
|
+
await fs.unlink(tmpFile).catch(() => {
|
|
188
|
+
});
|
|
189
|
+
}
|
|
190
|
+
const prUrl = prStdout.trim().split('\n').at(-1)?.trim() ?? '';
|
|
191
|
+
return { _tag: 'pr_opened', url: prUrl };
|
|
192
|
+
}
|
|
193
|
+
function formatExecError(e) {
|
|
194
|
+
if (e instanceof Error) {
|
|
195
|
+
const execErr = e;
|
|
196
|
+
const parts = [e.message];
|
|
197
|
+
if (execErr.stdout)
|
|
198
|
+
parts.push(`stdout: ${execErr.stdout}`);
|
|
199
|
+
if (execErr.stderr)
|
|
200
|
+
parts.push(`stderr: ${execErr.stderr}`);
|
|
201
|
+
return parts.join(' | ');
|
|
202
|
+
}
|
|
203
|
+
return String(e);
|
|
204
|
+
}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import type { Result } from '../runtime/result.js';
|
|
2
|
+
import type { WorkflowRunResult } from '../daemon/workflow-runner.js';
|
|
3
|
+
export type DeliveryError = {
|
|
4
|
+
readonly kind: 'http_error';
|
|
5
|
+
readonly status: number;
|
|
6
|
+
readonly body: string;
|
|
7
|
+
} | {
|
|
8
|
+
readonly kind: 'network_error';
|
|
9
|
+
readonly message: string;
|
|
10
|
+
};
|
|
11
|
+
export declare function post(callbackUrl: string, result: WorkflowRunResult): Promise<Result<void, DeliveryError>>;
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.post = post;
|
|
4
|
+
const result_js_1 = require("../runtime/result.js");
|
|
5
|
+
async function post(callbackUrl, result) {
|
|
6
|
+
const controller = new AbortController();
|
|
7
|
+
const timer = setTimeout(() => controller.abort(), 30000);
|
|
8
|
+
try {
|
|
9
|
+
const res = await fetch(callbackUrl, {
|
|
10
|
+
method: 'POST',
|
|
11
|
+
headers: { 'Content-Type': 'application/json' },
|
|
12
|
+
body: JSON.stringify(result),
|
|
13
|
+
signal: controller.signal,
|
|
14
|
+
});
|
|
15
|
+
if (!res.ok) {
|
|
16
|
+
const body = await res.text().catch(() => '');
|
|
17
|
+
return (0, result_js_1.err)({ kind: 'http_error', status: res.status, body });
|
|
18
|
+
}
|
|
19
|
+
return (0, result_js_1.ok)(undefined);
|
|
20
|
+
}
|
|
21
|
+
catch (e) {
|
|
22
|
+
return (0, result_js_1.err)({ kind: 'network_error', message: String(e) });
|
|
23
|
+
}
|
|
24
|
+
finally {
|
|
25
|
+
clearTimeout(timer);
|
|
26
|
+
}
|
|
27
|
+
}
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
export { startTriggerListener } from './trigger-listener.js';
|
|
2
|
+
export type { TriggerListenerHandle, TriggerListenerError, StartTriggerListenerOptions } from './trigger-listener.js';
|
|
3
|
+
export { loadTriggerConfig, loadTriggerConfigFromFile } from './trigger-store.js';
|
|
4
|
+
export type { TriggerStoreError } from './trigger-store.js';
|
|
5
|
+
export type { TriggerId, TriggerDefinition, TriggerConfig, TriggerSource, WebhookEvent, ContextMapping, ContextMappingEntry, } from './types.js';
|